{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'PDF TO Markdown' && linkText !== 'PDF TO Markdown' ) { link.textContent = 'PDF TO Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== 'Voice Cloning' ) { link.textContent = 'Voice Cloning'; link.href = 'https://vibevoice.info/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'PDF TO Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); ' +\n '')\n return content\n\n\n@app.route('/api/restaurants')\n@cache.cached(timeout=3600)\ndef api_list_restaurants():\n return main.list_restaurants()\n\n\n@app.route('/api/restaurant/')\n@cache.cached(timeout=3600)\ndef api_get_restaurant(name):\n data = main.get_restaurant(name)\n if not data:\n abort(404)\n return data\n\n\n@app.route('/ki')\n@cache.cached(timeout=3600)\ndef make_menu_ki():\n return main.gen_ki_menu()\n\n\n@app.route('/uu')\n@cache.cached(timeout=3600)\ndef make_menu_uu():\n return main.gen_uu_menu()\n"},"new_contents":{"kind":"string","value":"import json\n\nfrom flask import abort\nfrom flask import Flask\nfrom flask_caching import Cache\n\nimport main\n\n\napp = Flask(__name__)\ncache = Cache(app, config={'CACHE_TYPE': 'simple'})\n\n\n@app.route('/')\ndef display_available():\n content = ('' +\n '' +\n 'Restaurant Menu Parser' +\n '' +\n '' +\n '

Campus Solna (KI)

' +\n '

Campus Uppsala (BMC)

' +\n '' +\n '')\n return content\n\n\n@app.route('/api/restaurants')\n@cache.cached(timeout=3600)\ndef api_list_restaurants():\n return json.dumps(main.list_restaurants())\n\n\n@app.route('/api/restaurant/')\n@cache.cached(timeout=3600)\ndef api_get_restaurant(name):\n data = main.get_restaurant(name)\n if not data:\n abort(404)\n return json.dumps(data)\n\n\n@app.route('/ki')\n@cache.cached(timeout=3600)\ndef make_menu_ki():\n return main.gen_ki_menu()\n\n\n@app.route('/uu')\n@cache.cached(timeout=3600)\ndef make_menu_uu():\n return main.gen_uu_menu()\n"},"subject":{"kind":"string","value":"Return str instead of dict."},"message":{"kind":"string","value":"Return str instead of dict.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"talavis/kimenu"},"ndiff":{"kind":"string","value":"+ import json\n+ \n from flask import abort\n from flask import Flask\n from flask_caching import Cache\n \n import main\n \n \n app = Flask(__name__)\n cache = Cache(app, config={'CACHE_TYPE': 'simple'})\n \n \n @app.route('/')\n def display_available():\n content = ('' +\n '' +\n 'Restaurant Menu Parser' +\n '' +\n '' +\n '

Campus Solna (KI)

' +\n '

Campus Uppsala (BMC)

' +\n '' +\n '')\n return content\n \n \n @app.route('/api/restaurants')\n @cache.cached(timeout=3600)\n def api_list_restaurants():\n- return main.list_restaurants()\n+ return json.dumps(main.list_restaurants())\n \n \n @app.route('/api/restaurant/')\n @cache.cached(timeout=3600)\n def api_get_restaurant(name):\n data = main.get_restaurant(name)\n if not data:\n abort(404)\n- return data\n+ return json.dumps(data)\n \n \n @app.route('/ki')\n @cache.cached(timeout=3600)\n def make_menu_ki():\n return main.gen_ki_menu()\n \n \n @app.route('/uu')\n @cache.cached(timeout=3600)\n def make_menu_uu():\n return main.gen_uu_menu()\n "},"instruction":{"kind":"string","value":"Return str instead of dict."},"content":{"kind":"string","value":"## Code Before:\nfrom flask import abort\nfrom flask import Flask\nfrom flask_caching import Cache\n\nimport main\n\n\napp = Flask(__name__)\ncache = Cache(app, config={'CACHE_TYPE': 'simple'})\n\n\n@app.route('/')\ndef display_available():\n content = ('' +\n '' +\n 'Restaurant Menu Parser' +\n '' +\n '' +\n '

Campus Solna (KI)

' +\n '

Campus Uppsala (BMC)

' +\n '' +\n '')\n return content\n\n\n@app.route('/api/restaurants')\n@cache.cached(timeout=3600)\ndef api_list_restaurants():\n return main.list_restaurants()\n\n\n@app.route('/api/restaurant/')\n@cache.cached(timeout=3600)\ndef api_get_restaurant(name):\n data = main.get_restaurant(name)\n if not data:\n abort(404)\n return data\n\n\n@app.route('/ki')\n@cache.cached(timeout=3600)\ndef make_menu_ki():\n return main.gen_ki_menu()\n\n\n@app.route('/uu')\n@cache.cached(timeout=3600)\ndef make_menu_uu():\n return main.gen_uu_menu()\n\n## Instruction:\nReturn str instead of dict.\n## Code After:\nimport json\n\nfrom flask import abort\nfrom flask import Flask\nfrom flask_caching import Cache\n\nimport main\n\n\napp = Flask(__name__)\ncache = Cache(app, config={'CACHE_TYPE': 'simple'})\n\n\n@app.route('/')\ndef display_available():\n content = ('' +\n '' +\n 'Restaurant Menu Parser' +\n '' +\n '' +\n '

Campus Solna (KI)

' +\n '

Campus Uppsala (BMC)

' +\n '' +\n '')\n return content\n\n\n@app.route('/api/restaurants')\n@cache.cached(timeout=3600)\ndef api_list_restaurants():\n return json.dumps(main.list_restaurants())\n\n\n@app.route('/api/restaurant/')\n@cache.cached(timeout=3600)\ndef api_get_restaurant(name):\n data = main.get_restaurant(name)\n if not data:\n abort(404)\n return json.dumps(data)\n\n\n@app.route('/ki')\n@cache.cached(timeout=3600)\ndef make_menu_ki():\n return main.gen_ki_menu()\n\n\n@app.route('/uu')\n@cache.cached(timeout=3600)\ndef make_menu_uu():\n return main.gen_uu_menu()\n"},"diff":{"kind":"string","value":"+ import json\n+ \n from flask import abort\n from flask import Flask\n from flask_caching import Cache\n \n import main\n \n \n app = Flask(__name__)\n cache = Cache(app, config={'CACHE_TYPE': 'simple'})\n \n \n @app.route('/')\n def display_available():\n content = ('' +\n '' +\n 'Restaurant Menu Parser' +\n '' +\n '' +\n '

Campus Solna (KI)

' +\n '

Campus Uppsala (BMC)

' +\n '' +\n '')\n return content\n \n \n @app.route('/api/restaurants')\n @cache.cached(timeout=3600)\n def api_list_restaurants():\n- return main.list_restaurants()\n+ return json.dumps(main.list_restaurants())\n? +++++++++++ +\n\n \n \n @app.route('/api/restaurant/')\n @cache.cached(timeout=3600)\n def api_get_restaurant(name):\n data = main.get_restaurant(name)\n if not data:\n abort(404)\n- return data\n+ return json.dumps(data)\n \n \n @app.route('/ki')\n @cache.cached(timeout=3600)\n def make_menu_ki():\n return main.gen_ki_menu()\n \n \n @app.route('/uu')\n @cache.cached(timeout=3600)\n def make_menu_uu():\n return main.gen_uu_menu()"}}},{"rowIdx":3492,"cells":{"commit":{"kind":"string","value":"e7805528be294374b128dd6e40e3f8990b03cdac"},"old_file":{"kind":"string","value":"main.py"},"new_file":{"kind":"string","value":"main.py"},"old_contents":{"kind":"string","value":"\nfrom importlib import import_module\n\nfrom bombdefusalmanual.ui.console import ConsoleUI\nfrom bombdefusalmanual.ui.models import Answer\n\n\nANSWERS = [\n Answer('thebutton', 'The Button'),\n Answer('complicatedwires', 'Complicated Wires'),\n Answer('morsecode', 'Morse Code'),\n Answer('passwords', 'Passwords'),\n Answer('whosonfirst', 'Who\\'s on First'),\n]\n\n\ndef ask_for_subject(ui):\n return ui.ask_for_choice('Which subject?', ANSWERS)\n\n\ndef import_subject_module(name):\n return import_module('bombdefusalmanual.subjects.{}'.format(name))\n\n\nif __name__ == '__main__':\n ui = ConsoleUI()\n subject_name = ask_for_subject(ui)\n module = import_subject_module(subject_name)\n module.execute(ui)\n"},"new_contents":{"kind":"string","value":"\nfrom argparse import ArgumentParser\nfrom importlib import import_module\n\nfrom bombdefusalmanual.ui.console import ConsoleUI\nfrom bombdefusalmanual.ui.models import Answer\n\n\nANSWERS = [\n Answer('thebutton', 'The Button'),\n Answer('complicatedwires', 'Complicated Wires'),\n Answer('morsecode', 'Morse Code'),\n Answer('passwords', 'Passwords'),\n Answer('whosonfirst', 'Who\\'s on First'),\n]\n\n\ndef parse_args():\n parser = ArgumentParser()\n\n parser.add_argument(\n '--gui',\n action='store_true',\n default=False,\n dest='use_gui',\n help='use graphical user interface')\n\n return parser.parse_args()\n\n\ndef get_ui(use_gui):\n if use_gui:\n from bombdefusalmanual.ui.tk import TkGUI\n return TkGUI()\n else:\n return ConsoleUI()\n\n\ndef ask_for_subject(ui):\n return ui.ask_for_choice('Which subject?', ANSWERS)\n\n\ndef import_subject_module(name):\n return import_module('bombdefusalmanual.subjects.{}'.format(name))\n\n\nif __name__ == '__main__':\n args = parse_args()\n ui = get_ui(args.use_gui)\n\n subject_name = ask_for_subject(ui)\n\n module = import_subject_module(subject_name)\n module.execute(ui)\n"},"subject":{"kind":"string","value":"Allow to enable graphical UI via command line option."},"message":{"kind":"string","value":"Allow to enable graphical UI via command line option.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"homeworkprod/better-bomb-defusal-manual,homeworkprod/better-bomb-defusal-manual"},"ndiff":{"kind":"string","value":" \n+ from argparse import ArgumentParser\n from importlib import import_module\n \n from bombdefusalmanual.ui.console import ConsoleUI\n from bombdefusalmanual.ui.models import Answer\n \n \n ANSWERS = [\n Answer('thebutton', 'The Button'),\n Answer('complicatedwires', 'Complicated Wires'),\n Answer('morsecode', 'Morse Code'),\n Answer('passwords', 'Passwords'),\n Answer('whosonfirst', 'Who\\'s on First'),\n ]\n \n \n+ def parse_args():\n+ parser = ArgumentParser()\n+ \n+ parser.add_argument(\n+ '--gui',\n+ action='store_true',\n+ default=False,\n+ dest='use_gui',\n+ help='use graphical user interface')\n+ \n+ return parser.parse_args()\n+ \n+ \n+ def get_ui(use_gui):\n+ if use_gui:\n+ from bombdefusalmanual.ui.tk import TkGUI\n+ return TkGUI()\n+ else:\n+ return ConsoleUI()\n+ \n+ \n def ask_for_subject(ui):\n return ui.ask_for_choice('Which subject?', ANSWERS)\n \n \n def import_subject_module(name):\n return import_module('bombdefusalmanual.subjects.{}'.format(name))\n \n \n if __name__ == '__main__':\n- ui = ConsoleUI()\n+ args = parse_args()\n+ ui = get_ui(args.use_gui)\n+ \n subject_name = ask_for_subject(ui)\n+ \n module = import_subject_module(subject_name)\n module.execute(ui)\n "},"instruction":{"kind":"string","value":"Allow to enable graphical UI via command line option."},"content":{"kind":"string","value":"## Code Before:\n\nfrom importlib import import_module\n\nfrom bombdefusalmanual.ui.console import ConsoleUI\nfrom bombdefusalmanual.ui.models import Answer\n\n\nANSWERS = [\n Answer('thebutton', 'The Button'),\n Answer('complicatedwires', 'Complicated Wires'),\n Answer('morsecode', 'Morse Code'),\n Answer('passwords', 'Passwords'),\n Answer('whosonfirst', 'Who\\'s on First'),\n]\n\n\ndef ask_for_subject(ui):\n return ui.ask_for_choice('Which subject?', ANSWERS)\n\n\ndef import_subject_module(name):\n return import_module('bombdefusalmanual.subjects.{}'.format(name))\n\n\nif __name__ == '__main__':\n ui = ConsoleUI()\n subject_name = ask_for_subject(ui)\n module = import_subject_module(subject_name)\n module.execute(ui)\n\n## Instruction:\nAllow to enable graphical UI via command line option.\n## Code After:\n\nfrom argparse import ArgumentParser\nfrom importlib import import_module\n\nfrom bombdefusalmanual.ui.console import ConsoleUI\nfrom bombdefusalmanual.ui.models import Answer\n\n\nANSWERS = [\n Answer('thebutton', 'The Button'),\n Answer('complicatedwires', 'Complicated Wires'),\n Answer('morsecode', 'Morse Code'),\n Answer('passwords', 'Passwords'),\n Answer('whosonfirst', 'Who\\'s on First'),\n]\n\n\ndef parse_args():\n parser = ArgumentParser()\n\n parser.add_argument(\n '--gui',\n action='store_true',\n default=False,\n dest='use_gui',\n help='use graphical user interface')\n\n return parser.parse_args()\n\n\ndef get_ui(use_gui):\n if use_gui:\n from bombdefusalmanual.ui.tk import TkGUI\n return TkGUI()\n else:\n return ConsoleUI()\n\n\ndef ask_for_subject(ui):\n return ui.ask_for_choice('Which subject?', ANSWERS)\n\n\ndef import_subject_module(name):\n return import_module('bombdefusalmanual.subjects.{}'.format(name))\n\n\nif __name__ == '__main__':\n args = parse_args()\n ui = get_ui(args.use_gui)\n\n subject_name = ask_for_subject(ui)\n\n module = import_subject_module(subject_name)\n module.execute(ui)\n"},"diff":{"kind":"string","value":" \n+ from argparse import ArgumentParser\n from importlib import import_module\n \n from bombdefusalmanual.ui.console import ConsoleUI\n from bombdefusalmanual.ui.models import Answer\n \n \n ANSWERS = [\n Answer('thebutton', 'The Button'),\n Answer('complicatedwires', 'Complicated Wires'),\n Answer('morsecode', 'Morse Code'),\n Answer('passwords', 'Passwords'),\n Answer('whosonfirst', 'Who\\'s on First'),\n ]\n \n \n+ def parse_args():\n+ parser = ArgumentParser()\n+ \n+ parser.add_argument(\n+ '--gui',\n+ action='store_true',\n+ default=False,\n+ dest='use_gui',\n+ help='use graphical user interface')\n+ \n+ return parser.parse_args()\n+ \n+ \n+ def get_ui(use_gui):\n+ if use_gui:\n+ from bombdefusalmanual.ui.tk import TkGUI\n+ return TkGUI()\n+ else:\n+ return ConsoleUI()\n+ \n+ \n def ask_for_subject(ui):\n return ui.ask_for_choice('Which subject?', ANSWERS)\n \n \n def import_subject_module(name):\n return import_module('bombdefusalmanual.subjects.{}'.format(name))\n \n \n if __name__ == '__main__':\n- ui = ConsoleUI()\n+ args = parse_args()\n+ ui = get_ui(args.use_gui)\n+ \n subject_name = ask_for_subject(ui)\n+ \n module = import_subject_module(subject_name)\n module.execute(ui)"}}},{"rowIdx":3493,"cells":{"commit":{"kind":"string","value":"b011ccf5c4ce5f93c7b02f938385432325012569"},"old_file":{"kind":"string","value":"tt/core/tt.py"},"new_file":{"kind":"string","value":"tt/core/tt.py"},"old_contents":{"kind":"string","value":"from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape\nfrom .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS\nfrom .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun\n\n# main classes\nfrom .matrix import matrix\nfrom .vector import vector, tensor\n\n# utility\nfrom . import utils\n"},"new_contents":{"kind":"string","value":"from .matrix import matrix\nfrom .vector import vector, tensor\n\n\n# tools\nfrom .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape\nfrom .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS\nfrom .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun\n\n# utility\nfrom . import utils\n\n\n\n\n\n"},"subject":{"kind":"string","value":"Revert \"Import order changed to break tools dependency\""},"message":{"kind":"string","value":"Revert \"Import order changed to break tools dependency\"\n\nThis reverts commit 3a75fd530b1ecb9e6466ac99532d06032ae3a049.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"uranix/ttpy,uranix/ttpy"},"ndiff":{"kind":"string","value":"+ from .matrix import matrix\n+ from .vector import vector, tensor\n+ \n+ \n+ # tools\n from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape\n from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS\n from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun\n \n- # main classes\n- from .matrix import matrix\n- from .vector import vector, tensor\n- \n # utility\n from . import utils\n \n+ \n+ \n+ \n+ \n+ "},"instruction":{"kind":"string","value":"Revert \"Import order changed to break tools dependency\""},"content":{"kind":"string","value":"## Code Before:\nfrom .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape\nfrom .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS\nfrom .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun\n\n# main classes\nfrom .matrix import matrix\nfrom .vector import vector, tensor\n\n# utility\nfrom . import utils\n\n## Instruction:\nRevert \"Import order changed to break tools dependency\"\n## Code After:\nfrom .matrix import matrix\nfrom .vector import vector, tensor\n\n\n# tools\nfrom .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape\nfrom .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS\nfrom .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun\n\n# utility\nfrom . import utils\n\n\n\n\n\n"},"diff":{"kind":"string","value":"+ from .matrix import matrix\n+ from .vector import vector, tensor\n+ \n+ \n+ # tools\n from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape\n from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS\n from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun\n \n- # main classes\n- from .matrix import matrix\n- from .vector import vector, tensor\n- \n # utility\n from . import utils\n+ \n+ \n+ \n+ \n+ "}}},{"rowIdx":3494,"cells":{"commit":{"kind":"string","value":"94c48d9f61b8f7e462ce5f7013b29ce2399e4190"},"old_file":{"kind":"string","value":"log4django/views/__init__.py"},"new_file":{"kind":"string","value":"log4django/views/__init__.py"},"old_contents":{"kind":"string","value":"from django.db.models import Q\n\nfrom ..models import LogRecord\n\n\ndef _filter_records(request):\n getvars = request.GET\n logrecord_qs = LogRecord.objects.all().select_related('app')\n # Filtering by get params.\n if getvars.get('q'):\n q = getvars.get('q')\n logrecord_qs = logrecord_qs.filter(\n Q(app__name__icontains=q)\n | Q(message__icontains=q)\n | Q(fileName__icontains=q)\n | Q(loggerName__icontains=q)\n | Q(exception_message__icontains=q)\n | Q(_extra__icontains=q)\n )\n if getvars.get('app'):\n logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))\n if getvars.get('logger'):\n logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))\n if getvars.getlist('level'):\n logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))\n if getvars.get('from'):\n logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))\n if getvars.get('to'):\n logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))\n return logrecord_qs"},"new_contents":{"kind":"string","value":"from django.db.models import Q\n\nfrom ..models import LogRecord\n\n\ndef _filter_records(request):\n getvars = request.GET\n logrecord_qs = LogRecord.objects.all().select_related('app')\n # Filtering by get params.\n if getvars.get('q'):\n q = getvars.get('q')\n logrecord_qs = logrecord_qs.filter(\n Q(app__name__icontains=q)\n | Q(message__icontains=q)\n | Q(fileName__icontains=q)\n | Q(loggerName__icontains=q)\n | Q(exception_message__icontains=q)\n | Q(request_id__icontains=q)\n | Q(_extra__icontains=q)\n )\n if getvars.get('app'):\n logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))\n if getvars.get('logger'):\n logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))\n if getvars.getlist('level'):\n logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))\n if getvars.get('from'):\n logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))\n if getvars.get('to'):\n logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))\n return logrecord_qs"},"subject":{"kind":"string","value":"Add search by request_id field."},"message":{"kind":"string","value":"Add search by request_id field.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"CodeScaleInc/log4django,CodeScaleInc/log4django,CodeScaleInc/log4django"},"ndiff":{"kind":"string","value":" from django.db.models import Q\n \n from ..models import LogRecord\n \n \n def _filter_records(request):\n getvars = request.GET\n logrecord_qs = LogRecord.objects.all().select_related('app')\n # Filtering by get params.\n if getvars.get('q'):\n q = getvars.get('q')\n logrecord_qs = logrecord_qs.filter(\n Q(app__name__icontains=q)\n | Q(message__icontains=q)\n | Q(fileName__icontains=q)\n | Q(loggerName__icontains=q)\n | Q(exception_message__icontains=q)\n+ | Q(request_id__icontains=q)\n | Q(_extra__icontains=q)\n )\n if getvars.get('app'):\n logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))\n if getvars.get('logger'):\n logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))\n if getvars.getlist('level'):\n logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))\n if getvars.get('from'):\n logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))\n if getvars.get('to'):\n logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))\n return logrecord_qs"},"instruction":{"kind":"string","value":"Add search by request_id field."},"content":{"kind":"string","value":"## Code Before:\nfrom django.db.models import Q\n\nfrom ..models import LogRecord\n\n\ndef _filter_records(request):\n getvars = request.GET\n logrecord_qs = LogRecord.objects.all().select_related('app')\n # Filtering by get params.\n if getvars.get('q'):\n q = getvars.get('q')\n logrecord_qs = logrecord_qs.filter(\n Q(app__name__icontains=q)\n | Q(message__icontains=q)\n | Q(fileName__icontains=q)\n | Q(loggerName__icontains=q)\n | Q(exception_message__icontains=q)\n | Q(_extra__icontains=q)\n )\n if getvars.get('app'):\n logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))\n if getvars.get('logger'):\n logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))\n if getvars.getlist('level'):\n logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))\n if getvars.get('from'):\n logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))\n if getvars.get('to'):\n logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))\n return logrecord_qs\n## Instruction:\nAdd search by request_id field.\n## Code After:\nfrom django.db.models import Q\n\nfrom ..models import LogRecord\n\n\ndef _filter_records(request):\n getvars = request.GET\n logrecord_qs = LogRecord.objects.all().select_related('app')\n # Filtering by get params.\n if getvars.get('q'):\n q = getvars.get('q')\n logrecord_qs = logrecord_qs.filter(\n Q(app__name__icontains=q)\n | Q(message__icontains=q)\n | Q(fileName__icontains=q)\n | Q(loggerName__icontains=q)\n | Q(exception_message__icontains=q)\n | Q(request_id__icontains=q)\n | Q(_extra__icontains=q)\n )\n if getvars.get('app'):\n logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))\n if getvars.get('logger'):\n logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))\n if getvars.getlist('level'):\n logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))\n if getvars.get('from'):\n logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))\n if getvars.get('to'):\n logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))\n return logrecord_qs"},"diff":{"kind":"string","value":" from django.db.models import Q\n \n from ..models import LogRecord\n \n \n def _filter_records(request):\n getvars = request.GET\n logrecord_qs = LogRecord.objects.all().select_related('app')\n # Filtering by get params.\n if getvars.get('q'):\n q = getvars.get('q')\n logrecord_qs = logrecord_qs.filter(\n Q(app__name__icontains=q)\n | Q(message__icontains=q)\n | Q(fileName__icontains=q)\n | Q(loggerName__icontains=q)\n | Q(exception_message__icontains=q)\n+ | Q(request_id__icontains=q)\n | Q(_extra__icontains=q)\n )\n if getvars.get('app'):\n logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app'))\n if getvars.get('logger'):\n logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger'))\n if getvars.getlist('level'):\n logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level'))\n if getvars.get('from'):\n logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from'))\n if getvars.get('to'):\n logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to'))\n return logrecord_qs"}}},{"rowIdx":3495,"cells":{"commit":{"kind":"string","value":"512ec31a3c022bc8a31d57bc51e4e6dac29dcf83"},"old_file":{"kind":"string","value":"src/sentry/web/frontend/organization_api_key_settings.py"},"new_file":{"kind":"string","value":"src/sentry/web/frontend/organization_api_key_settings.py"},"old_contents":{"kind":"string","value":"from __future__ import absolute_import\n\nfrom django import forms\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom sentry.models import ApiKey, OrganizationMemberType\nfrom sentry.web.forms.fields import OriginsField\nfrom sentry.web.frontend.base import OrganizationView\n\n\nclass ApiKeyForm(forms.ModelForm):\n allowed_origins = OriginsField(label=_('Allowed Domains'), required=False,\n help_text=_('Separate multiple entries with a newline.'))\n\n class Meta:\n model = ApiKey\n fields = ('label', 'scopes', 'allowed_origins')\n\n\nclass OrganizationApiKeySettingsView(OrganizationView):\n required_access = OrganizationMemberType.ADMIN\n\n def handle(self, request, organization, key_id):\n key = ApiKey.objects.get(organization=organization, id=key_id)\n\n form = ApiKeyForm(request.POST or None, instance=key)\n\n context = {\n 'key': key,\n 'form': form,\n }\n\n return self.respond('sentry/organization-api-key-settings.html', context)\n"},"new_contents":{"kind":"string","value":"from __future__ import absolute_import\n\nfrom django import forms\nfrom django.contrib import messages\nfrom django.http import HttpResponseRedirect\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom sentry.models import ApiKey, OrganizationMemberType\nfrom sentry.web.forms.fields import OriginsField\nfrom sentry.web.frontend.base import OrganizationView\n\n\nclass ApiKeyForm(forms.ModelForm):\n allowed_origins = OriginsField(label=_('Allowed Domains'), required=False,\n help_text=_('Separate multiple entries with a newline.'))\n\n class Meta:\n model = ApiKey\n fields = ('label', 'scopes', 'allowed_origins')\n\n\nclass OrganizationApiKeySettingsView(OrganizationView):\n required_access = OrganizationMemberType.ADMIN\n\n def handle(self, request, organization, key_id):\n key = ApiKey.objects.get(organization=organization, id=key_id)\n\n form = ApiKeyForm(request.POST or None, instance=key)\n if form.is_valid():\n key.save()\n messages.add_message(\n request, messages.SUCCESS,\n 'Your settings were saved.',\n )\n return HttpResponseRedirect(request.path)\n\n context = {\n 'key': key,\n 'form': form,\n }\n\n return self.respond('sentry/organization-api-key-settings.html', context)\n"},"subject":{"kind":"string","value":"Allow key settings to be saved"},"message":{"kind":"string","value":"Allow key settings to be saved\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"hongliang5623/sentry,TedaLIEz/sentry,gg7/sentry,ifduyue/sentry,wujuguang/sentry,boneyao/sentry,fuziontech/sentry,fuziontech/sentry,pauloschilling/sentry,mvaled/sentry,JackDanger/sentry,imankulov/sentry,kevinlondon/sentry,kevinlondon/sentry,vperron/sentry,looker/sentry,Natim/sentry,looker/sentry,beeftornado/sentry,Kryz/sentry,llonchj/sentry,ngonzalvez/sentry,1tush/sentry,1tush/sentry,looker/sentry,mvaled/sentry,zenefits/sentry,drcapulet/sentry,hongliang5623/sentry,llonchj/sentry,JackDanger/sentry,ifduyue/sentry,korealerts1/sentry,mitsuhiko/sentry,zenefits/sentry,argonemyth/sentry,BuildingLink/sentry,gencer/sentry,zenefits/sentry,ngonzalvez/sentry,JamesMura/sentry,mvaled/sentry,drcapulet/sentry,argonemyth/sentry,ewdurbin/sentry,ifduyue/sentry,ngonzalvez/sentry,felixbuenemann/sentry,jean/sentry,ifduyue/sentry,JTCunning/sentry,kevinastone/sentry,JamesMura/sentry,gencer/sentry,JamesMura/sentry,alexm92/sentry,TedaLIEz/sentry,kevinlondon/sentry,nicholasserra/sentry,BuildingLink/sentry,argonemyth/sentry,daevaorn/sentry,mvaled/sentry,songyi199111/sentry,BayanGroup/sentry,korealerts1/sentry,wong2/sentry,fotinakis/sentry,JTCunning/sentry,1tush/sentry,llonchj/sentry,kevinastone/sentry,Kryz/sentry,beeftornado/sentry,BuildingLink/sentry,JamesMura/sentry,fotinakis/sentry,vperron/sentry,fotinakis/sentry,felixbuenemann/sentry,imankulov/sentry,mvaled/sentry,wong2/sentry,wong2/sentry,boneyao/sentry,pauloschilling/sentry,beeftornado/sentry,alexm92/sentry,daevaorn/sentry,jean/sentry,BuildingLink/sentry,Natim/sentry,daevaorn/sentry,zenefits/sentry,wujuguang/sentry,fuziontech/sentry,mvaled/sentry,hongliang5623/sentry,fotinakis/sentry,drcapulet/sentry,looker/sentry,JTCunning/sentry,jean/sentry,BayanGroup/sentry,vperron/sentry,jean/sentry,JackDanger/sentry,BayanGroup/sentry,boneyao/sentry,gencer/sentry,alexm92/sentry,looker/sentry,ifduyue/sentry,ewdurbin/sentry,daevaorn/sentry,gencer/sentry,JamesMura/sentry,wujuguang/sentry,imankulov/sentry,nicholasserra/sentry,Kryz/sentry,gg7/sentry,gencer/sentry,jean/sentry,BuildingLink/sentry,Natim/sentry,TedaLIEz/sentry,pauloschilling/sentry,nicholasserra/sentry,ewdurbin/sentry,songyi199111/sentry,mitsuhiko/sentry,felixbuenemann/sentry,gg7/sentry,songyi199111/sentry,korealerts1/sentry,kevinastone/sentry,zenefits/sentry"},"ndiff":{"kind":"string","value":" from __future__ import absolute_import\n \n from django import forms\n+ from django.contrib import messages\n+ from django.http import HttpResponseRedirect\n from django.utils.translation import ugettext_lazy as _\n \n from sentry.models import ApiKey, OrganizationMemberType\n from sentry.web.forms.fields import OriginsField\n from sentry.web.frontend.base import OrganizationView\n \n \n class ApiKeyForm(forms.ModelForm):\n allowed_origins = OriginsField(label=_('Allowed Domains'), required=False,\n help_text=_('Separate multiple entries with a newline.'))\n \n class Meta:\n model = ApiKey\n fields = ('label', 'scopes', 'allowed_origins')\n \n \n class OrganizationApiKeySettingsView(OrganizationView):\n required_access = OrganizationMemberType.ADMIN\n \n def handle(self, request, organization, key_id):\n key = ApiKey.objects.get(organization=organization, id=key_id)\n \n form = ApiKeyForm(request.POST or None, instance=key)\n+ if form.is_valid():\n+ key.save()\n+ messages.add_message(\n+ request, messages.SUCCESS,\n+ 'Your settings were saved.',\n+ )\n+ return HttpResponseRedirect(request.path)\n \n context = {\n 'key': key,\n 'form': form,\n }\n \n return self.respond('sentry/organization-api-key-settings.html', context)\n "},"instruction":{"kind":"string","value":"Allow key settings to be saved"},"content":{"kind":"string","value":"## Code Before:\nfrom __future__ import absolute_import\n\nfrom django import forms\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom sentry.models import ApiKey, OrganizationMemberType\nfrom sentry.web.forms.fields import OriginsField\nfrom sentry.web.frontend.base import OrganizationView\n\n\nclass ApiKeyForm(forms.ModelForm):\n allowed_origins = OriginsField(label=_('Allowed Domains'), required=False,\n help_text=_('Separate multiple entries with a newline.'))\n\n class Meta:\n model = ApiKey\n fields = ('label', 'scopes', 'allowed_origins')\n\n\nclass OrganizationApiKeySettingsView(OrganizationView):\n required_access = OrganizationMemberType.ADMIN\n\n def handle(self, request, organization, key_id):\n key = ApiKey.objects.get(organization=organization, id=key_id)\n\n form = ApiKeyForm(request.POST or None, instance=key)\n\n context = {\n 'key': key,\n 'form': form,\n }\n\n return self.respond('sentry/organization-api-key-settings.html', context)\n\n## Instruction:\nAllow key settings to be saved\n## Code After:\nfrom __future__ import absolute_import\n\nfrom django import forms\nfrom django.contrib import messages\nfrom django.http import HttpResponseRedirect\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom sentry.models import ApiKey, OrganizationMemberType\nfrom sentry.web.forms.fields import OriginsField\nfrom sentry.web.frontend.base import OrganizationView\n\n\nclass ApiKeyForm(forms.ModelForm):\n allowed_origins = OriginsField(label=_('Allowed Domains'), required=False,\n help_text=_('Separate multiple entries with a newline.'))\n\n class Meta:\n model = ApiKey\n fields = ('label', 'scopes', 'allowed_origins')\n\n\nclass OrganizationApiKeySettingsView(OrganizationView):\n required_access = OrganizationMemberType.ADMIN\n\n def handle(self, request, organization, key_id):\n key = ApiKey.objects.get(organization=organization, id=key_id)\n\n form = ApiKeyForm(request.POST or None, instance=key)\n if form.is_valid():\n key.save()\n messages.add_message(\n request, messages.SUCCESS,\n 'Your settings were saved.',\n )\n return HttpResponseRedirect(request.path)\n\n context = {\n 'key': key,\n 'form': form,\n }\n\n return self.respond('sentry/organization-api-key-settings.html', context)\n"},"diff":{"kind":"string","value":" from __future__ import absolute_import\n \n from django import forms\n+ from django.contrib import messages\n+ from django.http import HttpResponseRedirect\n from django.utils.translation import ugettext_lazy as _\n \n from sentry.models import ApiKey, OrganizationMemberType\n from sentry.web.forms.fields import OriginsField\n from sentry.web.frontend.base import OrganizationView\n \n \n class ApiKeyForm(forms.ModelForm):\n allowed_origins = OriginsField(label=_('Allowed Domains'), required=False,\n help_text=_('Separate multiple entries with a newline.'))\n \n class Meta:\n model = ApiKey\n fields = ('label', 'scopes', 'allowed_origins')\n \n \n class OrganizationApiKeySettingsView(OrganizationView):\n required_access = OrganizationMemberType.ADMIN\n \n def handle(self, request, organization, key_id):\n key = ApiKey.objects.get(organization=organization, id=key_id)\n \n form = ApiKeyForm(request.POST or None, instance=key)\n+ if form.is_valid():\n+ key.save()\n+ messages.add_message(\n+ request, messages.SUCCESS,\n+ 'Your settings were saved.',\n+ )\n+ return HttpResponseRedirect(request.path)\n \n context = {\n 'key': key,\n 'form': form,\n }\n \n return self.respond('sentry/organization-api-key-settings.html', context)"}}},{"rowIdx":3496,"cells":{"commit":{"kind":"string","value":"93e46310b8ea9e61dbabf02bd3dd4b6b6748dd6e"},"old_file":{"kind":"string","value":"erpnext/accounts/doctype/bank/bank_dashboard.py"},"new_file":{"kind":"string","value":"erpnext/accounts/doctype/bank/bank_dashboard.py"},"old_contents":{"kind":"string","value":"from __future__ import unicode_literals\n\nfrom frappe import _\n\n\ndef get_data():\n\treturn {\n\t\t'fieldname': 'bank',\n\t\t'non_standard_fieldnames': {\n\t\t\t'Paymnet Order': 'company_bank'\n\t\t},\n\t\t'transactions': [\n\t\t\t{\n\t\t\t\t'label': _('Bank Deatils'),\n\t\t\t\t'items': ['Bank Account', 'Bank Guarantee']\n\t\t\t},\n\t\t\t{\n\t\t\t\t'items': ['Payment Order']\n\t\t\t}\n\t\t]\n\t}\n"},"new_contents":{"kind":"string","value":"from __future__ import unicode_literals\n\nfrom frappe import _\n\n\ndef get_data():\n\treturn {\n\t\t'fieldname': 'bank',\n\t\t'transactions': [\n\t\t\t{\n\t\t\t\t'label': _('Bank Deatils'),\n\t\t\t\t'items': ['Bank Account', 'Bank Guarantee']\n\t\t\t}\n\t\t]\n\t}\n"},"subject":{"kind":"string","value":"Remove payment order from bank dashboard"},"message":{"kind":"string","value":"fix: Remove payment order from bank dashboard\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext"},"ndiff":{"kind":"string","value":" from __future__ import unicode_literals\n \n from frappe import _\n \n \n def get_data():\n \treturn {\n \t\t'fieldname': 'bank',\n- \t\t'non_standard_fieldnames': {\n- \t\t\t'Paymnet Order': 'company_bank'\n- \t\t},\n \t\t'transactions': [\n \t\t\t{\n \t\t\t\t'label': _('Bank Deatils'),\n \t\t\t\t'items': ['Bank Account', 'Bank Guarantee']\n- \t\t\t},\n- \t\t\t{\n- \t\t\t\t'items': ['Payment Order']\n \t\t\t}\n \t\t]\n \t}\n "},"instruction":{"kind":"string","value":"Remove payment order from bank dashboard"},"content":{"kind":"string","value":"## Code Before:\nfrom __future__ import unicode_literals\n\nfrom frappe import _\n\n\ndef get_data():\n\treturn {\n\t\t'fieldname': 'bank',\n\t\t'non_standard_fieldnames': {\n\t\t\t'Paymnet Order': 'company_bank'\n\t\t},\n\t\t'transactions': [\n\t\t\t{\n\t\t\t\t'label': _('Bank Deatils'),\n\t\t\t\t'items': ['Bank Account', 'Bank Guarantee']\n\t\t\t},\n\t\t\t{\n\t\t\t\t'items': ['Payment Order']\n\t\t\t}\n\t\t]\n\t}\n\n## Instruction:\nRemove payment order from bank dashboard\n## Code After:\nfrom __future__ import unicode_literals\n\nfrom frappe import _\n\n\ndef get_data():\n\treturn {\n\t\t'fieldname': 'bank',\n\t\t'transactions': [\n\t\t\t{\n\t\t\t\t'label': _('Bank Deatils'),\n\t\t\t\t'items': ['Bank Account', 'Bank Guarantee']\n\t\t\t}\n\t\t]\n\t}\n"},"diff":{"kind":"string","value":" from __future__ import unicode_literals\n \n from frappe import _\n \n \n def get_data():\n \treturn {\n \t\t'fieldname': 'bank',\n- \t\t'non_standard_fieldnames': {\n- \t\t\t'Paymnet Order': 'company_bank'\n- \t\t},\n \t\t'transactions': [\n \t\t\t{\n \t\t\t\t'label': _('Bank Deatils'),\n \t\t\t\t'items': ['Bank Account', 'Bank Guarantee']\n- \t\t\t},\n- \t\t\t{\n- \t\t\t\t'items': ['Payment Order']\n \t\t\t}\n \t\t]\n \t}"}}},{"rowIdx":3497,"cells":{"commit":{"kind":"string","value":"ae583132ade7370595d6d9d14dba2b720c5415d6"},"old_file":{"kind":"string","value":"cinemair/favorites/serializers.py"},"new_file":{"kind":"string","value":"cinemair/favorites/serializers.py"},"old_contents":{"kind":"string","value":"from rest_framework import serializers as drf_serializers\n\nfrom cinemair.common.api import serializers\nfrom cinemair.shows.serializers import ShowRelatedSerializer\n\nfrom . import models\n\n\nclass FavoriteSerializer(serializers.ModelSerializer):\n show_info = drf_serializers.SerializerMethodField()\n\n class Meta:\n model = models.Favorite\n\n def get_show_info(self, obj):\n data = ShowRelatedSerializer(obj.show).data\n del data[\"id\"]\n return data\n"},"new_contents":{"kind":"string","value":"from rest_framework import serializers as drf_serializers\n\nfrom cinemair.common.api import serializers\nfrom cinemair.shows.serializers import ShowRelatedSerializer\n\nfrom . import models\n\n\nclass FavoriteSerializer(serializers.ModelSerializer):\n show_info = drf_serializers.SerializerMethodField()\n\n class Meta:\n model = models.Favorite\n\n def get_show_info(self, obj):\n data = ShowRelatedSerializer(obj.show).data\n del data[\"id\"]\n return data\n\n def validate_user(self, value):\n \"\"\"\n Check that the user is the same as request.user.\n \"\"\"\n if \"request\" in self.context:\n current_user = self.context[\"request\"].user\n\n if current_user != value:\n raise drf_serializers.ValidationError(\"User must be you.\")\n return value\n"},"subject":{"kind":"string","value":"Validate user when favorite a show"},"message":{"kind":"string","value":"Validate user when favorite a show\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"Cinemair/cinemair-server,Cinemair/cinemair-server"},"ndiff":{"kind":"string","value":" from rest_framework import serializers as drf_serializers\n \n from cinemair.common.api import serializers\n from cinemair.shows.serializers import ShowRelatedSerializer\n \n from . import models\n \n \n class FavoriteSerializer(serializers.ModelSerializer):\n show_info = drf_serializers.SerializerMethodField()\n \n class Meta:\n model = models.Favorite\n \n def get_show_info(self, obj):\n data = ShowRelatedSerializer(obj.show).data\n del data[\"id\"]\n return data\n \n+ def validate_user(self, value):\n+ \"\"\"\n+ Check that the user is the same as request.user.\n+ \"\"\"\n+ if \"request\" in self.context:\n+ current_user = self.context[\"request\"].user\n+ \n+ if current_user != value:\n+ raise drf_serializers.ValidationError(\"User must be you.\")\n+ return value\n+ "},"instruction":{"kind":"string","value":"Validate user when favorite a show"},"content":{"kind":"string","value":"## Code Before:\nfrom rest_framework import serializers as drf_serializers\n\nfrom cinemair.common.api import serializers\nfrom cinemair.shows.serializers import ShowRelatedSerializer\n\nfrom . import models\n\n\nclass FavoriteSerializer(serializers.ModelSerializer):\n show_info = drf_serializers.SerializerMethodField()\n\n class Meta:\n model = models.Favorite\n\n def get_show_info(self, obj):\n data = ShowRelatedSerializer(obj.show).data\n del data[\"id\"]\n return data\n\n## Instruction:\nValidate user when favorite a show\n## Code After:\nfrom rest_framework import serializers as drf_serializers\n\nfrom cinemair.common.api import serializers\nfrom cinemair.shows.serializers import ShowRelatedSerializer\n\nfrom . import models\n\n\nclass FavoriteSerializer(serializers.ModelSerializer):\n show_info = drf_serializers.SerializerMethodField()\n\n class Meta:\n model = models.Favorite\n\n def get_show_info(self, obj):\n data = ShowRelatedSerializer(obj.show).data\n del data[\"id\"]\n return data\n\n def validate_user(self, value):\n \"\"\"\n Check that the user is the same as request.user.\n \"\"\"\n if \"request\" in self.context:\n current_user = self.context[\"request\"].user\n\n if current_user != value:\n raise drf_serializers.ValidationError(\"User must be you.\")\n return value\n"},"diff":{"kind":"string","value":" from rest_framework import serializers as drf_serializers\n \n from cinemair.common.api import serializers\n from cinemair.shows.serializers import ShowRelatedSerializer\n \n from . import models\n \n \n class FavoriteSerializer(serializers.ModelSerializer):\n show_info = drf_serializers.SerializerMethodField()\n \n class Meta:\n model = models.Favorite\n \n def get_show_info(self, obj):\n data = ShowRelatedSerializer(obj.show).data\n del data[\"id\"]\n return data\n+ \n+ def validate_user(self, value):\n+ \"\"\"\n+ Check that the user is the same as request.user.\n+ \"\"\"\n+ if \"request\" in self.context:\n+ current_user = self.context[\"request\"].user\n+ \n+ if current_user != value:\n+ raise drf_serializers.ValidationError(\"User must be you.\")\n+ return value"}}},{"rowIdx":3498,"cells":{"commit":{"kind":"string","value":"cfb0bda6096378de428a1460823626f3dc4c9059"},"old_file":{"kind":"string","value":"spyder_terminal/__init__.py"},"new_file":{"kind":"string","value":"spyder_terminal/__init__.py"},"old_contents":{"kind":"string","value":"\"\"\"Spyder Terminal Plugin.\"\"\"\n\nfrom .terminalplugin import TerminalPlugin as PLUGIN_CLASS\n\nPLUGIN_CLASS\n\nVERSION_INFO = (0, 2, 1)\n__version__ = '.'.join(map(str, VERSION_INFO))\n"},"new_contents":{"kind":"string","value":"\"\"\"Spyder Terminal Plugin.\"\"\"\n\nfrom .terminalplugin import TerminalPlugin as PLUGIN_CLASS\n\nPLUGIN_CLASS\n\nVERSION_INFO = (0, 3, 0, 'dev0')\n__version__ = '.'.join(map(str, VERSION_INFO))\n"},"subject":{"kind":"string","value":"Set package version info to 0.3.0.dev0"},"message":{"kind":"string","value":"Set package version info to 0.3.0.dev0\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"spyder-ide/spyder-terminal,spyder-ide/spyder-terminal,andfoy/spyder-terminal,andfoy/spyder-terminal,andfoy/spyder-terminal,spyder-ide/spyder-terminal,spyder-ide/spyder-terminal"},"ndiff":{"kind":"string","value":" \"\"\"Spyder Terminal Plugin.\"\"\"\n \n from .terminalplugin import TerminalPlugin as PLUGIN_CLASS\n \n PLUGIN_CLASS\n \n- VERSION_INFO = (0, 2, 1)\n+ VERSION_INFO = (0, 3, 0, 'dev0')\n __version__ = '.'.join(map(str, VERSION_INFO))\n "},"instruction":{"kind":"string","value":"Set package version info to 0.3.0.dev0"},"content":{"kind":"string","value":"## Code Before:\n\"\"\"Spyder Terminal Plugin.\"\"\"\n\nfrom .terminalplugin import TerminalPlugin as PLUGIN_CLASS\n\nPLUGIN_CLASS\n\nVERSION_INFO = (0, 2, 1)\n__version__ = '.'.join(map(str, VERSION_INFO))\n\n## Instruction:\nSet package version info to 0.3.0.dev0\n## Code After:\n\"\"\"Spyder Terminal Plugin.\"\"\"\n\nfrom .terminalplugin import TerminalPlugin as PLUGIN_CLASS\n\nPLUGIN_CLASS\n\nVERSION_INFO = (0, 3, 0, 'dev0')\n__version__ = '.'.join(map(str, VERSION_INFO))\n"},"diff":{"kind":"string","value":" \"\"\"Spyder Terminal Plugin.\"\"\"\n \n from .terminalplugin import TerminalPlugin as PLUGIN_CLASS\n \n PLUGIN_CLASS\n \n- VERSION_INFO = (0, 2, 1)\n? ^ ^\n\n+ VERSION_INFO = (0, 3, 0, 'dev0')\n? ^ ^^^^^^^^^\n\n __version__ = '.'.join(map(str, VERSION_INFO))"}}},{"rowIdx":3499,"cells":{"commit":{"kind":"string","value":"3b79447e1027cc4965ab3272c34740b82d79c66c"},"old_file":{"kind":"string","value":"tools/perf/benchmarks/start_with_url.py"},"new_file":{"kind":"string","value":"tools/perf/benchmarks/start_with_url.py"},"old_contents":{"kind":"string","value":"\nfrom measurements import startup\nimport page_sets\nfrom telemetry import benchmark\n\n\n@benchmark.Disabled\nclass StartWithUrlCold(benchmark.Benchmark):\n \"\"\"Measure time to start Chrome cold with startup URLs\"\"\"\n tag = 'cold'\n test = startup.StartWithUrl\n page_set = page_sets.StartupPagesPageSet\n options = {'cold': True,\n 'pageset_repeat': 5}\n\n\n@benchmark.Enabled('android', 'has tabs')\nclass StartWithUrlWarm(benchmark.Benchmark):\n \"\"\"Measure time to start Chrome warm with startup URLs\"\"\"\n tag = 'warm'\n test = startup.StartWithUrl\n page_set = page_sets.StartupPagesPageSet\n options = {'warm': True,\n 'pageset_repeat': 10}\n\n"},"new_contents":{"kind":"string","value":"\nfrom measurements import startup\nimport page_sets\nfrom telemetry import benchmark\n\n\n@benchmark.Enabled('android', 'has tabs')\nclass StartWithUrlCold(benchmark.Benchmark):\n \"\"\"Measure time to start Chrome cold with startup URLs\"\"\"\n tag = 'cold'\n test = startup.StartWithUrl\n page_set = page_sets.StartupPagesPageSet\n options = {'cold': True,\n 'pageset_repeat': 5}\n\n\n@benchmark.Enabled('android', 'has tabs')\nclass StartWithUrlWarm(benchmark.Benchmark):\n \"\"\"Measure time to start Chrome warm with startup URLs\"\"\"\n tag = 'warm'\n test = startup.StartWithUrl\n page_set = page_sets.StartupPagesPageSet\n options = {'warm': True,\n 'pageset_repeat': 10}\n\n"},"subject":{"kind":"string","value":"Enable statup_with_url.cold benchmark on android."},"message":{"kind":"string","value":"Enable statup_with_url.cold benchmark on android.\n\nThe benchmark works locally, and collects an important datapoint for our\ncurrent optimization work.\n\nReview URL: https://codereview.chromium.org/508303004\n\nCr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#298526}\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,jaruba/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,M4sse/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,Chilledheart/chromium,axinging/chromium-crosswalk,dednal/chromium.src,dushu1203/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,jaruba/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,Chilledheart/chromium,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium,dednal/chromium.src,Just-D/chromium-1,Chilledheart/chromium,Jonekee/chromium.src,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,markYoungH/chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,Jonekee/chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,ltilve/chromium,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,dednal/chromium.src,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,ltilve/chromium,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ltilve/chromium,ltilve/chromium,Just-D/chromium-1,dednal/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,Jonekee/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,jaruba/chromium.src,krieger-od/nwjs_chromium.src,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,Just-D/chromium-1,Chilledheart/chromium,jaruba/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,Just-D/chromium-1,M4sse/chromium.src,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk"},"ndiff":{"kind":"string","value":" \n from measurements import startup\n import page_sets\n from telemetry import benchmark\n \n \n- @benchmark.Disabled\n+ @benchmark.Enabled('android', 'has tabs')\n class StartWithUrlCold(benchmark.Benchmark):\n \"\"\"Measure time to start Chrome cold with startup URLs\"\"\"\n tag = 'cold'\n test = startup.StartWithUrl\n page_set = page_sets.StartupPagesPageSet\n options = {'cold': True,\n 'pageset_repeat': 5}\n \n \n @benchmark.Enabled('android', 'has tabs')\n class StartWithUrlWarm(benchmark.Benchmark):\n \"\"\"Measure time to start Chrome warm with startup URLs\"\"\"\n tag = 'warm'\n test = startup.StartWithUrl\n page_set = page_sets.StartupPagesPageSet\n options = {'warm': True,\n 'pageset_repeat': 10}\n \n "},"instruction":{"kind":"string","value":"Enable statup_with_url.cold benchmark on android."},"content":{"kind":"string","value":"## Code Before:\n\nfrom measurements import startup\nimport page_sets\nfrom telemetry import benchmark\n\n\n@benchmark.Disabled\nclass StartWithUrlCold(benchmark.Benchmark):\n \"\"\"Measure time to start Chrome cold with startup URLs\"\"\"\n tag = 'cold'\n test = startup.StartWithUrl\n page_set = page_sets.StartupPagesPageSet\n options = {'cold': True,\n 'pageset_repeat': 5}\n\n\n@benchmark.Enabled('android', 'has tabs')\nclass StartWithUrlWarm(benchmark.Benchmark):\n \"\"\"Measure time to start Chrome warm with startup URLs\"\"\"\n tag = 'warm'\n test = startup.StartWithUrl\n page_set = page_sets.StartupPagesPageSet\n options = {'warm': True,\n 'pageset_repeat': 10}\n\n\n## Instruction:\nEnable statup_with_url.cold benchmark on android.\n## Code After:\n\nfrom measurements import startup\nimport page_sets\nfrom telemetry import benchmark\n\n\n@benchmark.Enabled('android', 'has tabs')\nclass StartWithUrlCold(benchmark.Benchmark):\n \"\"\"Measure time to start Chrome cold with startup URLs\"\"\"\n tag = 'cold'\n test = startup.StartWithUrl\n page_set = page_sets.StartupPagesPageSet\n options = {'cold': True,\n 'pageset_repeat': 5}\n\n\n@benchmark.Enabled('android', 'has tabs')\nclass StartWithUrlWarm(benchmark.Benchmark):\n \"\"\"Measure time to start Chrome warm with startup URLs\"\"\"\n tag = 'warm'\n test = startup.StartWithUrl\n page_set = page_sets.StartupPagesPageSet\n options = {'warm': True,\n 'pageset_repeat': 10}\n\n"},"diff":{"kind":"string","value":" \n from measurements import startup\n import page_sets\n from telemetry import benchmark\n \n \n- @benchmark.Disabled\n+ @benchmark.Enabled('android', 'has tabs')\n class StartWithUrlCold(benchmark.Benchmark):\n \"\"\"Measure time to start Chrome cold with startup URLs\"\"\"\n tag = 'cold'\n test = startup.StartWithUrl\n page_set = page_sets.StartupPagesPageSet\n options = {'cold': True,\n 'pageset_repeat': 5}\n \n \n @benchmark.Enabled('android', 'has tabs')\n class StartWithUrlWarm(benchmark.Benchmark):\n \"\"\"Measure time to start Chrome warm with startup URLs\"\"\"\n tag = 'warm'\n test = startup.StartWithUrl\n page_set = page_sets.StartupPagesPageSet\n options = {'warm': True,\n 'pageset_repeat': 10}\n "}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":34,"numItemsPerPage":100,"numTotalItems":20341,"offset":3400,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1ODAyMTg2Miwic3ViIjoiL2RhdGFzZXRzL2tzZW5pYXN5Y2gvQ29kZS1FZGl0cy1FZGl0UGFja0ZUIiwiZXhwIjoxNzU4MDI1NDYyLCJpc3MiOiJodHRwczovL2h1Z2dpbmdmYWNlLmNvIn0.ioVfGJ7RWfw9UBkLM4GeH6NW-RQ2yNnEBuU25JUBrb-7SAoGpIvW1XfiBazRaPdDhLQzoU3XiYWUC_hm4hm7Bg","displayUrls":true},"discussionsStats":{"closed":0,"open":2,"total":2},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
commit
stringlengths
40
40
old_file
stringlengths
4
106
new_file
stringlengths
4
106
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
2.95k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
43k
ndiff
stringlengths
52
3.31k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
diff
stringlengths
49
3.61k
e54d753a3fb58032936cbf5e137bb5ef67e2813c
task_15.py
task_15.py
"""Provides variables for string and integer conversion.""" NOT_THE_QUESTION = 'The answer to life, the universe, and everything? It\'s ' ANSWER = 42
"""Provides variables for string and integer conversion.""" NOT_THE_QUESTION = 'The answer to life, the universe, and everything? It\'s ' ANSWER = 42 THANKS_FOR_THE_FISH = str(NOT_THE_QUESTION) + str(ANSWER)
Change the string to concatenate it by using str() and then make new variable equal the first str() to the second str()
Change the string to concatenate it by using str() and then make new variable equal the first str() to the second str()
Python
mpl-2.0
gracehyemin/is210-week-03-warmup,gracehyemin/is210-week-03-warmup
"""Provides variables for string and integer conversion.""" NOT_THE_QUESTION = 'The answer to life, the universe, and everything? It\'s ' ANSWER = 42 + THANKS_FOR_THE_FISH = str(NOT_THE_QUESTION) + str(ANSWER)
Change the string to concatenate it by using str() and then make new variable equal the first str() to the second str()
## Code Before: """Provides variables for string and integer conversion.""" NOT_THE_QUESTION = 'The answer to life, the universe, and everything? It\'s ' ANSWER = 42 ## Instruction: Change the string to concatenate it by using str() and then make new variable equal the first str() to the second str() ## Code After: """Provides variables for string and integer conversion.""" NOT_THE_QUESTION = 'The answer to life, the universe, and everything? It\'s ' ANSWER = 42 THANKS_FOR_THE_FISH = str(NOT_THE_QUESTION) + str(ANSWER)
"""Provides variables for string and integer conversion.""" NOT_THE_QUESTION = 'The answer to life, the universe, and everything? It\'s ' ANSWER = 42 + THANKS_FOR_THE_FISH = str(NOT_THE_QUESTION) + str(ANSWER)
db6b9761d51d45b2708ba6bca997196fc73fbe94
sheldon/__init__.py
sheldon/__init__.py
# Bot module contains bot's main class - Sheldon from sheldon.bot import * # Hooks module contains hooks for plugins from sheldon.hooks import * # Utils folder contains scripts for more # comfortable work with sending and parsing # messages. For example, script for downloading # files by url. from sheldon.utils import * __author__ = 'Seva Zhidkov' __version__ = '0.0.1#dev' __email__ = '[email protected]'
# Bot module contains bot's main class - Sheldon from sheldon.bot import * # Hooks module contains hooks for plugins from sheldon.hooks import * # Adapter module contains classes and tools # for plugins sending messages from sheldon.adapter import * # Utils folder contains scripts for more # comfortable work with sending and parsing # messages. For example, script for downloading # files by url. from sheldon.utils import * __author__ = 'Seva Zhidkov' __version__ = '0.0.1#dev' __email__ = '[email protected]'
Add adapter module to init file
Add adapter module to init file
Python
mit
lises/sheldon
# Bot module contains bot's main class - Sheldon from sheldon.bot import * # Hooks module contains hooks for plugins from sheldon.hooks import * + + # Adapter module contains classes and tools + # for plugins sending messages + from sheldon.adapter import * # Utils folder contains scripts for more # comfortable work with sending and parsing # messages. For example, script for downloading # files by url. from sheldon.utils import * __author__ = 'Seva Zhidkov' __version__ = '0.0.1#dev' __email__ = '[email protected]'
Add adapter module to init file
## Code Before: # Bot module contains bot's main class - Sheldon from sheldon.bot import * # Hooks module contains hooks for plugins from sheldon.hooks import * # Utils folder contains scripts for more # comfortable work with sending and parsing # messages. For example, script for downloading # files by url. from sheldon.utils import * __author__ = 'Seva Zhidkov' __version__ = '0.0.1#dev' __email__ = '[email protected]' ## Instruction: Add adapter module to init file ## Code After: # Bot module contains bot's main class - Sheldon from sheldon.bot import * # Hooks module contains hooks for plugins from sheldon.hooks import * # Adapter module contains classes and tools # for plugins sending messages from sheldon.adapter import * # Utils folder contains scripts for more # comfortable work with sending and parsing # messages. For example, script for downloading # files by url. from sheldon.utils import * __author__ = 'Seva Zhidkov' __version__ = '0.0.1#dev' __email__ = '[email protected]'
# Bot module contains bot's main class - Sheldon from sheldon.bot import * # Hooks module contains hooks for plugins from sheldon.hooks import * + + # Adapter module contains classes and tools + # for plugins sending messages + from sheldon.adapter import * # Utils folder contains scripts for more # comfortable work with sending and parsing # messages. For example, script for downloading # files by url. from sheldon.utils import * __author__ = 'Seva Zhidkov' __version__ = '0.0.1#dev' __email__ = '[email protected]'
caaf8e40c99f410b09580b81c4fa7a094395319c
test/test_progress_bar.py
test/test_progress_bar.py
import pytest from downloads.download import _progress_bar @pytest.mark.parametrize( "current,block_size,total_size", [ ( 100, 32, 100 * 32, ), ( 75, 32, 100 * 32, ), ( 50, 32, 100 * 32, ), ( 25, 32, 100 * 32, ), ( 0, 32, 100 * 32, ), ], ) def test_progress_bar(current, block_size, total_size): bar = _progress_bar( current=current, block_size=block_size, total_size=total_size ) assert bar.count("#") == current assert bar.split()[-1] == f"{current:.1f}%"
import pytest from downloads.download import _progress_bar @pytest.mark.parametrize( "current,block_size,total_size", [ ( 100, 32, 100 * 32, ), ( 75, 32, 100 * 32, ), ( 50, 32, 100 * 32, ), ( 25, 32, 100 * 32, ), ( 0, 32, 100 * 32, ), ], ) def test_progress_bar(current, block_size, total_size): bar = _progress_bar( current=current, block_size=block_size, total_size=total_size ) assert bar.count("#") == current assert bar.split()[-1] == f"{current:.1f}%" assert len(bar) == 100 + 8
Test width of progress bar
Test width of progress bar
Python
mit
audy/downloads
import pytest from downloads.download import _progress_bar @pytest.mark.parametrize( "current,block_size,total_size", [ ( 100, 32, 100 * 32, ), ( 75, 32, 100 * 32, ), ( 50, 32, 100 * 32, ), ( 25, 32, 100 * 32, ), ( 0, 32, 100 * 32, ), ], ) def test_progress_bar(current, block_size, total_size): bar = _progress_bar( current=current, block_size=block_size, total_size=total_size ) assert bar.count("#") == current assert bar.split()[-1] == f"{current:.1f}%" + assert len(bar) == 100 + 8
Test width of progress bar
## Code Before: import pytest from downloads.download import _progress_bar @pytest.mark.parametrize( "current,block_size,total_size", [ ( 100, 32, 100 * 32, ), ( 75, 32, 100 * 32, ), ( 50, 32, 100 * 32, ), ( 25, 32, 100 * 32, ), ( 0, 32, 100 * 32, ), ], ) def test_progress_bar(current, block_size, total_size): bar = _progress_bar( current=current, block_size=block_size, total_size=total_size ) assert bar.count("#") == current assert bar.split()[-1] == f"{current:.1f}%" ## Instruction: Test width of progress bar ## Code After: import pytest from downloads.download import _progress_bar @pytest.mark.parametrize( "current,block_size,total_size", [ ( 100, 32, 100 * 32, ), ( 75, 32, 100 * 32, ), ( 50, 32, 100 * 32, ), ( 25, 32, 100 * 32, ), ( 0, 32, 100 * 32, ), ], ) def test_progress_bar(current, block_size, total_size): bar = _progress_bar( current=current, block_size=block_size, total_size=total_size ) assert bar.count("#") == current assert bar.split()[-1] == f"{current:.1f}%" assert len(bar) == 100 + 8
import pytest from downloads.download import _progress_bar @pytest.mark.parametrize( "current,block_size,total_size", [ ( 100, 32, 100 * 32, ), ( 75, 32, 100 * 32, ), ( 50, 32, 100 * 32, ), ( 25, 32, 100 * 32, ), ( 0, 32, 100 * 32, ), ], ) def test_progress_bar(current, block_size, total_size): bar = _progress_bar( current=current, block_size=block_size, total_size=total_size ) assert bar.count("#") == current assert bar.split()[-1] == f"{current:.1f}%" + assert len(bar) == 100 + 8
bb42fe14165806caf8a2386c49cb602dbf9ad391
connectionless_service.py
connectionless_service.py
from simple.server import SimpleServer __author__ = "Facundo Victor" __license__ = "MIT" __email__ = "[email protected]" def handle_message(sockets=None): """ Handle a simple UDP client. """ if sockets is not None: (readable, writable, errors) = sockets try: while True: (data, address) = readable.recvfrom(1024) print('Received data: %s from %s' % (data, address)) if data: print('Sending a custom ACK to the client %s \ '.format(address)) writable.sendto("Received ;)\n", address) else: print('Received empty data') break finally: SS.close_connection() SS = SimpleServer(connection_oriented=False) SS.register_handler(handle_message) SS.bind_and_listeen("localhost", 8888)
from simple.server import SimpleServer __author__ = "Facundo Victor" __license__ = "MIT" __email__ = "[email protected]" def handle_message(sockets=None): """ Handle a simple UDP client. """ if sockets is not None: (readable, writable, errors) = sockets try: while True: (data, address) = readable.recvfrom(1024) print('Received data: %s from %s' % (data, address)) if data: print('Sending a custom ACK to the client %s' % (address.__str__())) writable.sendto("Received ;)\n", address) else: print('Received empty data') break finally: SS.close_connection() SS = SimpleServer(connection_oriented=False) SS.register_handler(handle_message) SS.bind_and_listeen("localhost", 8888)
Fix python 2.4 connectionless service
Fix python 2.4 connectionless service
Python
mit
facundovictor/non-blocking-socket-samples
from simple.server import SimpleServer __author__ = "Facundo Victor" __license__ = "MIT" __email__ = "[email protected]" def handle_message(sockets=None): """ Handle a simple UDP client. """ if sockets is not None: (readable, writable, errors) = sockets try: while True: (data, address) = readable.recvfrom(1024) print('Received data: %s from %s' % (data, address)) if data: - print('Sending a custom ACK to the client %s \ + print('Sending a custom ACK to the client %s' - '.format(address)) + % (address.__str__())) writable.sendto("Received ;)\n", address) else: print('Received empty data') break finally: SS.close_connection() SS = SimpleServer(connection_oriented=False) SS.register_handler(handle_message) SS.bind_and_listeen("localhost", 8888)
Fix python 2.4 connectionless service
## Code Before: from simple.server import SimpleServer __author__ = "Facundo Victor" __license__ = "MIT" __email__ = "[email protected]" def handle_message(sockets=None): """ Handle a simple UDP client. """ if sockets is not None: (readable, writable, errors) = sockets try: while True: (data, address) = readable.recvfrom(1024) print('Received data: %s from %s' % (data, address)) if data: print('Sending a custom ACK to the client %s \ '.format(address)) writable.sendto("Received ;)\n", address) else: print('Received empty data') break finally: SS.close_connection() SS = SimpleServer(connection_oriented=False) SS.register_handler(handle_message) SS.bind_and_listeen("localhost", 8888) ## Instruction: Fix python 2.4 connectionless service ## Code After: from simple.server import SimpleServer __author__ = "Facundo Victor" __license__ = "MIT" __email__ = "[email protected]" def handle_message(sockets=None): """ Handle a simple UDP client. """ if sockets is not None: (readable, writable, errors) = sockets try: while True: (data, address) = readable.recvfrom(1024) print('Received data: %s from %s' % (data, address)) if data: print('Sending a custom ACK to the client %s' % (address.__str__())) writable.sendto("Received ;)\n", address) else: print('Received empty data') break finally: SS.close_connection() SS = SimpleServer(connection_oriented=False) SS.register_handler(handle_message) SS.bind_and_listeen("localhost", 8888)
from simple.server import SimpleServer __author__ = "Facundo Victor" __license__ = "MIT" __email__ = "[email protected]" def handle_message(sockets=None): """ Handle a simple UDP client. """ if sockets is not None: (readable, writable, errors) = sockets try: while True: (data, address) = readable.recvfrom(1024) print('Received data: %s from %s' % (data, address)) if data: - print('Sending a custom ACK to the client %s \ ? ^^ + print('Sending a custom ACK to the client %s' ? ^ - '.format(address)) ? ^^^^^^^^ + % (address.__str__())) ? ^^ +++++++++ + writable.sendto("Received ;)\n", address) else: print('Received empty data') break finally: SS.close_connection() SS = SimpleServer(connection_oriented=False) SS.register_handler(handle_message) SS.bind_and_listeen("localhost", 8888)
59761e83b240fe7573370f542ea6e877c5850907
setup.py
setup.py
from distutils.core import setup, Extension uinput = Extension('libuinput', sources = ['src/uinput.c']) setup(name='python-steamcontroller', version='1.0', description='Steam Controller userland driver', author='Stany MARCEL', author_email='[email protected]', url='https://github.com/ynsta/steamcontroller', package_dir={'steamcontroller': 'src'}, packages=['steamcontroller'], scripts=['scripts/sc-dump.py', 'scripts/sc-xbox.py'], license='MIT', platforms=['Linux'], ext_modules=[uinput, ])
from distutils.core import setup, Extension uinput = Extension('libuinput', sources = ['src/uinput.c']) setup(name='python-steamcontroller', version='1.0', description='Steam Controller userland driver', author='Stany MARCEL', author_email='[email protected]', url='https://github.com/ynsta/steamcontroller', package_dir={'steamcontroller': 'src'}, packages=['steamcontroller'], scripts=['scripts/sc-dump.py', 'scripts/sc-xbox.py', 'scripts/vdf2json.py', 'scripts/json2vdf.py'], license='MIT', platforms=['Linux'], ext_modules=[uinput, ])
Add json to from vdf scripts
Add json to from vdf scripts Signed-off-by: Stany MARCEL <[email protected]>
Python
mit
ynsta/steamcontroller,oneru/steamcontroller,oneru/steamcontroller,ynsta/steamcontroller
from distutils.core import setup, Extension uinput = Extension('libuinput', sources = ['src/uinput.c']) setup(name='python-steamcontroller', version='1.0', description='Steam Controller userland driver', author='Stany MARCEL', author_email='[email protected]', url='https://github.com/ynsta/steamcontroller', package_dir={'steamcontroller': 'src'}, packages=['steamcontroller'], scripts=['scripts/sc-dump.py', - 'scripts/sc-xbox.py'], + 'scripts/sc-xbox.py', + 'scripts/vdf2json.py', + 'scripts/json2vdf.py'], license='MIT', platforms=['Linux'], ext_modules=[uinput, ])
Add json to from vdf scripts
## Code Before: from distutils.core import setup, Extension uinput = Extension('libuinput', sources = ['src/uinput.c']) setup(name='python-steamcontroller', version='1.0', description='Steam Controller userland driver', author='Stany MARCEL', author_email='[email protected]', url='https://github.com/ynsta/steamcontroller', package_dir={'steamcontroller': 'src'}, packages=['steamcontroller'], scripts=['scripts/sc-dump.py', 'scripts/sc-xbox.py'], license='MIT', platforms=['Linux'], ext_modules=[uinput, ]) ## Instruction: Add json to from vdf scripts ## Code After: from distutils.core import setup, Extension uinput = Extension('libuinput', sources = ['src/uinput.c']) setup(name='python-steamcontroller', version='1.0', description='Steam Controller userland driver', author='Stany MARCEL', author_email='[email protected]', url='https://github.com/ynsta/steamcontroller', package_dir={'steamcontroller': 'src'}, packages=['steamcontroller'], scripts=['scripts/sc-dump.py', 'scripts/sc-xbox.py', 'scripts/vdf2json.py', 'scripts/json2vdf.py'], license='MIT', platforms=['Linux'], ext_modules=[uinput, ])
from distutils.core import setup, Extension uinput = Extension('libuinput', sources = ['src/uinput.c']) setup(name='python-steamcontroller', version='1.0', description='Steam Controller userland driver', author='Stany MARCEL', author_email='[email protected]', url='https://github.com/ynsta/steamcontroller', package_dir={'steamcontroller': 'src'}, packages=['steamcontroller'], scripts=['scripts/sc-dump.py', - 'scripts/sc-xbox.py'], ? - + 'scripts/sc-xbox.py', + 'scripts/vdf2json.py', + 'scripts/json2vdf.py'], license='MIT', platforms=['Linux'], ext_modules=[uinput, ])
d4dd408e671d14518b3fabb964027cd006366fca
testfixtures/compat.py
testfixtures/compat.py
import sys if sys.version_info[:2] > (3, 0): PY2 = False PY3 = True Bytes = bytes Unicode = str basestring = str class_type_name = 'class' ClassType = type exception_module = 'builtins' new_class = type self_name = '__self__' from io import StringIO xrange = range else: PY2 = True PY3 = False Bytes = str Unicode = unicode basestring = basestring class_type_name = 'type' from types import ClassType exception_module = 'exceptions' from new import classobj as new_class self_name = 'im_self' from cStringIO import StringIO xrange = xrange try: from mock import call as mock_call except ImportError: # pragma: no cover class MockCall: pass mock_call = MockCall() try: from unittest.mock import call as unittest_mock_call except ImportError: class UnittestMockCall: pass unittest_mock_call = UnittestMockCall()
import sys if sys.version_info[:2] > (3, 0): PY2 = False PY3 = True Bytes = bytes Unicode = str basestring = str BytesLiteral = lambda x: x.encode('latin1') UnicodeLiteral = lambda x: x class_type_name = 'class' ClassType = type exception_module = 'builtins' new_class = type self_name = '__self__' from io import StringIO xrange = range else: PY2 = True PY3 = False Bytes = str Unicode = unicode basestring = basestring BytesLiteral = lambda x: x UnicodeLiteral = lambda x: x.decode('latin1') class_type_name = 'type' from types import ClassType exception_module = 'exceptions' from new import classobj as new_class self_name = 'im_self' from cStringIO import StringIO xrange = xrange try: from mock import call as mock_call except ImportError: # pragma: no cover class MockCall: pass mock_call = MockCall() try: from unittest.mock import call as unittest_mock_call except ImportError: class UnittestMockCall: pass unittest_mock_call = UnittestMockCall()
Add Python version agnostic helpers for creating byte and unicode literals.
Add Python version agnostic helpers for creating byte and unicode literals.
Python
mit
Simplistix/testfixtures,nebulans/testfixtures
import sys if sys.version_info[:2] > (3, 0): PY2 = False PY3 = True Bytes = bytes Unicode = str basestring = str + BytesLiteral = lambda x: x.encode('latin1') + UnicodeLiteral = lambda x: x class_type_name = 'class' ClassType = type exception_module = 'builtins' new_class = type self_name = '__self__' from io import StringIO xrange = range else: PY2 = True PY3 = False Bytes = str Unicode = unicode basestring = basestring + BytesLiteral = lambda x: x + UnicodeLiteral = lambda x: x.decode('latin1') class_type_name = 'type' from types import ClassType exception_module = 'exceptions' from new import classobj as new_class self_name = 'im_self' from cStringIO import StringIO xrange = xrange try: from mock import call as mock_call except ImportError: # pragma: no cover class MockCall: pass mock_call = MockCall() try: from unittest.mock import call as unittest_mock_call except ImportError: class UnittestMockCall: pass unittest_mock_call = UnittestMockCall()
Add Python version agnostic helpers for creating byte and unicode literals.
## Code Before: import sys if sys.version_info[:2] > (3, 0): PY2 = False PY3 = True Bytes = bytes Unicode = str basestring = str class_type_name = 'class' ClassType = type exception_module = 'builtins' new_class = type self_name = '__self__' from io import StringIO xrange = range else: PY2 = True PY3 = False Bytes = str Unicode = unicode basestring = basestring class_type_name = 'type' from types import ClassType exception_module = 'exceptions' from new import classobj as new_class self_name = 'im_self' from cStringIO import StringIO xrange = xrange try: from mock import call as mock_call except ImportError: # pragma: no cover class MockCall: pass mock_call = MockCall() try: from unittest.mock import call as unittest_mock_call except ImportError: class UnittestMockCall: pass unittest_mock_call = UnittestMockCall() ## Instruction: Add Python version agnostic helpers for creating byte and unicode literals. ## Code After: import sys if sys.version_info[:2] > (3, 0): PY2 = False PY3 = True Bytes = bytes Unicode = str basestring = str BytesLiteral = lambda x: x.encode('latin1') UnicodeLiteral = lambda x: x class_type_name = 'class' ClassType = type exception_module = 'builtins' new_class = type self_name = '__self__' from io import StringIO xrange = range else: PY2 = True PY3 = False Bytes = str Unicode = unicode basestring = basestring BytesLiteral = lambda x: x UnicodeLiteral = lambda x: x.decode('latin1') class_type_name = 'type' from types import ClassType exception_module = 'exceptions' from new import classobj as new_class self_name = 'im_self' from cStringIO import StringIO xrange = xrange try: from mock import call as mock_call except ImportError: # pragma: no cover class MockCall: pass mock_call = MockCall() try: from unittest.mock import call as unittest_mock_call except ImportError: class UnittestMockCall: pass unittest_mock_call = UnittestMockCall()
import sys if sys.version_info[:2] > (3, 0): PY2 = False PY3 = True Bytes = bytes Unicode = str basestring = str + BytesLiteral = lambda x: x.encode('latin1') + UnicodeLiteral = lambda x: x class_type_name = 'class' ClassType = type exception_module = 'builtins' new_class = type self_name = '__self__' from io import StringIO xrange = range else: PY2 = True PY3 = False Bytes = str Unicode = unicode basestring = basestring + BytesLiteral = lambda x: x + UnicodeLiteral = lambda x: x.decode('latin1') class_type_name = 'type' from types import ClassType exception_module = 'exceptions' from new import classobj as new_class self_name = 'im_self' from cStringIO import StringIO xrange = xrange try: from mock import call as mock_call except ImportError: # pragma: no cover class MockCall: pass mock_call = MockCall() try: from unittest.mock import call as unittest_mock_call except ImportError: class UnittestMockCall: pass unittest_mock_call = UnittestMockCall()
0004bde0d40dfea167d76a83c20acfffc0abfa28
poyo/__init__.py
poyo/__init__.py
from .exceptions import PoyoException from .parser import parse_string __author__ = 'Raphael Pierzina' __email__ = '[email protected]' __version__ = '0.3.0' __all__ = ['parse_string', 'PoyoException']
import logging from .exceptions import PoyoException from .parser import parse_string __author__ = 'Raphael Pierzina' __email__ = '[email protected]' __version__ = '0.3.0' logging.getLogger(__name__).addHandler(logging.NullHandler()) __all__ = ['parse_string', 'PoyoException']
Add NullHandler to poyo root logger
Add NullHandler to poyo root logger
Python
mit
hackebrot/poyo
+ + import logging from .exceptions import PoyoException from .parser import parse_string __author__ = 'Raphael Pierzina' __email__ = '[email protected]' __version__ = '0.3.0' + logging.getLogger(__name__).addHandler(logging.NullHandler()) + __all__ = ['parse_string', 'PoyoException']
Add NullHandler to poyo root logger
## Code Before: from .exceptions import PoyoException from .parser import parse_string __author__ = 'Raphael Pierzina' __email__ = '[email protected]' __version__ = '0.3.0' __all__ = ['parse_string', 'PoyoException'] ## Instruction: Add NullHandler to poyo root logger ## Code After: import logging from .exceptions import PoyoException from .parser import parse_string __author__ = 'Raphael Pierzina' __email__ = '[email protected]' __version__ = '0.3.0' logging.getLogger(__name__).addHandler(logging.NullHandler()) __all__ = ['parse_string', 'PoyoException']
+ + import logging from .exceptions import PoyoException from .parser import parse_string __author__ = 'Raphael Pierzina' __email__ = '[email protected]' __version__ = '0.3.0' + logging.getLogger(__name__).addHandler(logging.NullHandler()) + __all__ = ['parse_string', 'PoyoException']
121bcbfc873ce45667ec67bc6f22387b43f3aa52
openfisca_web_ui/uuidhelpers.py
openfisca_web_ui/uuidhelpers.py
"""Helpers to handle uuid""" import uuid def generate_uuid(): return unicode(uuid.uuid4()).replace('-', '')
"""Helpers to handle uuid""" import uuid def generate_uuid(): return unicode(uuid.uuid4().hex)
Use uuid.hex instead of reinventing it.
Use uuid.hex instead of reinventing it.
Python
agpl-3.0
openfisca/openfisca-web-ui,openfisca/openfisca-web-ui,openfisca/openfisca-web-ui
"""Helpers to handle uuid""" import uuid def generate_uuid(): - return unicode(uuid.uuid4()).replace('-', '') + return unicode(uuid.uuid4().hex)
Use uuid.hex instead of reinventing it.
## Code Before: """Helpers to handle uuid""" import uuid def generate_uuid(): return unicode(uuid.uuid4()).replace('-', '') ## Instruction: Use uuid.hex instead of reinventing it. ## Code After: """Helpers to handle uuid""" import uuid def generate_uuid(): return unicode(uuid.uuid4().hex)
"""Helpers to handle uuid""" import uuid def generate_uuid(): - return unicode(uuid.uuid4()).replace('-', '') ? ----------------- + return unicode(uuid.uuid4().hex) ? ++++
f62278c420429cfe9a3f2a8903f902ae24bdd95d
remoteappmanager/handlers/home_handler.py
remoteappmanager/handlers/home_handler.py
from tornado import gen, web from remoteappmanager.handlers.base_handler import BaseHandler class HomeHandler(BaseHandler): """Render the user's home page""" @web.authenticated @gen.coroutine def get(self): images_info = yield self._get_images_info() self.render('home.html', images_info=images_info) # private @gen.coroutine def _get_images_info(self): """Retrieves a dictionary containing the image and the associated container, if active, as values.""" container_manager = self.application.container_manager apps = self.application.db.get_apps_for_user( self.current_user.account) images_info = [] for mapping_id, app, policy in apps: image = yield container_manager.image(app.image) if image is None: # The user has access to an application that is no longer # available in docker. We just move on. continue containers = yield container_manager.containers_from_mapping_id( self.current_user.name, mapping_id) # We assume that we can only run one container only (although the # API considers a broader possibility for future extension. container = None if len(containers): container = containers[0] images_info.append({ "image": image, "mapping_id": mapping_id, "container": container }) return images_info
from tornado import gen, web from remoteappmanager.handlers.base_handler import BaseHandler class HomeHandler(BaseHandler): """Render the user's home page""" @web.authenticated @gen.coroutine def get(self): self.render('home.html')
Remove dead code now part of the REST API.
Remove dead code now part of the REST API.
Python
bsd-3-clause
simphony/simphony-remote,simphony/simphony-remote,simphony/simphony-remote,simphony/simphony-remote
from tornado import gen, web from remoteappmanager.handlers.base_handler import BaseHandler class HomeHandler(BaseHandler): """Render the user's home page""" @web.authenticated @gen.coroutine def get(self): + self.render('home.html') - images_info = yield self._get_images_info() - self.render('home.html', images_info=images_info) - # private - - @gen.coroutine - def _get_images_info(self): - """Retrieves a dictionary containing the image and the associated - container, if active, as values.""" - container_manager = self.application.container_manager - - apps = self.application.db.get_apps_for_user( - self.current_user.account) - - images_info = [] - - for mapping_id, app, policy in apps: - image = yield container_manager.image(app.image) - - if image is None: - # The user has access to an application that is no longer - # available in docker. We just move on. - continue - - containers = yield container_manager.containers_from_mapping_id( - self.current_user.name, - mapping_id) - - # We assume that we can only run one container only (although the - # API considers a broader possibility for future extension. - container = None - if len(containers): - container = containers[0] - - images_info.append({ - "image": image, - "mapping_id": mapping_id, - "container": container - }) - return images_info -
Remove dead code now part of the REST API.
## Code Before: from tornado import gen, web from remoteappmanager.handlers.base_handler import BaseHandler class HomeHandler(BaseHandler): """Render the user's home page""" @web.authenticated @gen.coroutine def get(self): images_info = yield self._get_images_info() self.render('home.html', images_info=images_info) # private @gen.coroutine def _get_images_info(self): """Retrieves a dictionary containing the image and the associated container, if active, as values.""" container_manager = self.application.container_manager apps = self.application.db.get_apps_for_user( self.current_user.account) images_info = [] for mapping_id, app, policy in apps: image = yield container_manager.image(app.image) if image is None: # The user has access to an application that is no longer # available in docker. We just move on. continue containers = yield container_manager.containers_from_mapping_id( self.current_user.name, mapping_id) # We assume that we can only run one container only (although the # API considers a broader possibility for future extension. container = None if len(containers): container = containers[0] images_info.append({ "image": image, "mapping_id": mapping_id, "container": container }) return images_info ## Instruction: Remove dead code now part of the REST API. ## Code After: from tornado import gen, web from remoteappmanager.handlers.base_handler import BaseHandler class HomeHandler(BaseHandler): """Render the user's home page""" @web.authenticated @gen.coroutine def get(self): self.render('home.html')
from tornado import gen, web from remoteappmanager.handlers.base_handler import BaseHandler class HomeHandler(BaseHandler): """Render the user's home page""" @web.authenticated @gen.coroutine def get(self): + self.render('home.html') - images_info = yield self._get_images_info() - self.render('home.html', images_info=images_info) - - # private - - @gen.coroutine - def _get_images_info(self): - """Retrieves a dictionary containing the image and the associated - container, if active, as values.""" - container_manager = self.application.container_manager - - apps = self.application.db.get_apps_for_user( - self.current_user.account) - - images_info = [] - - for mapping_id, app, policy in apps: - image = yield container_manager.image(app.image) - - if image is None: - # The user has access to an application that is no longer - # available in docker. We just move on. - continue - - containers = yield container_manager.containers_from_mapping_id( - self.current_user.name, - mapping_id) - - # We assume that we can only run one container only (although the - # API considers a broader possibility for future extension. - container = None - if len(containers): - container = containers[0] - - images_info.append({ - "image": image, - "mapping_id": mapping_id, - "container": container - }) - return images_info
b82d85114c13f945cc1976606d4d36d5b4b2885a
phonenumber_field/formfields.py
phonenumber_field/formfields.py
from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _(u'Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def to_python(self, value): phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number
from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _(u'Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def to_python(self, value): if value in self.empty_values: return '' phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number
Fix formfield to return an empty string if an empty value is given. This allows empty input for not null model fields with blank=True.
Fix formfield to return an empty string if an empty value is given. This allows empty input for not null model fields with blank=True.
Python
mit
bramd/django-phonenumber-field,bramd/django-phonenumber-field
from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _(u'Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def to_python(self, value): + if value in self.empty_values: + return '' phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number
Fix formfield to return an empty string if an empty value is given. This allows empty input for not null model fields with blank=True.
## Code Before: from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _(u'Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def to_python(self, value): phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number ## Instruction: Fix formfield to return an empty string if an empty value is given. This allows empty input for not null model fields with blank=True. ## Code After: from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _(u'Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def to_python(self, value): if value in self.empty_values: return '' phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number
from django.utils.translation import ugettext_lazy as _ from django.forms.fields import CharField from django.core.exceptions import ValidationError from phonenumber_field.validators import validate_international_phonenumber from phonenumber_field.phonenumber import to_python class PhoneNumberField(CharField): default_error_messages = { 'invalid': _(u'Enter a valid phone number.'), } default_validators = [validate_international_phonenumber] def to_python(self, value): + if value in self.empty_values: + return '' phone_number = to_python(value) if phone_number and not phone_number.is_valid(): raise ValidationError(self.error_messages['invalid']) return phone_number
873c5e8bf85a8be5a08852134967d29353ed3009
examples/simple.py
examples/simple.py
from lobster import cmssw from lobster.core import * storage = StorageConfiguration( output=[ "hdfs:///store/user/matze/test_shuffle_take29", "file:///hadoop/store/user/matze/test_shuffle_take29", "root://ndcms.crc.nd.edu//store/user/matze/test_shuffle_take29", "srm://T3_US_NotreDame/store/user/matze/test_shuffle_take29", ] ) processing = Category( name='processing', cores=1, runtime=900, memory=1000 ) workflows = [] single_mu = Workflow( label='single_mu', dataset=cmssw.Dataset( dataset='/SingleMu/Run2012A-recover-06Aug2012-v1/AOD', events_per_task=5000 ), category=processing, pset='slim.py', publish_label='test', merge_size='3.5G', outputs=['output.root'] ) workflows.append(single_mu) config = Config( label='shuffle', workdir='/tmpscratch/users/matze/test_shuffle_take30', plotdir='/afs/crc.nd.edu/user/m/mwolf3/www/lobster/test_shuffle_take29', storage=storage, workflows=workflows, advanced=AdvancedOptions(log_level=1) )
from lobster import cmssw from lobster.core import * storage = StorageConfiguration( output=[ "hdfs:///store/user/matze/test_shuffle_take29", "file:///hadoop/store/user/matze/test_shuffle_take29", "root://T3_US_NotreDame/store/user/matze/test_shuffle_take29", "srm://T3_US_NotreDame/store/user/matze/test_shuffle_take29", ] ) processing = Category( name='processing', cores=1, runtime=900, memory=1000 ) workflows = [] single_mu = Workflow( label='single_mu', dataset=cmssw.Dataset( dataset='/SingleMu/Run2012A-recover-06Aug2012-v1/AOD', events_per_task=5000 ), category=processing, pset='slim.py', publish_label='test', merge_size='3.5G', outputs=['output.root'] ) workflows.append(single_mu) config = Config( label='shuffle', workdir='/tmpscratch/users/matze/test_shuffle_take30', plotdir='/afs/crc.nd.edu/user/m/mwolf3/www/lobster/test_shuffle_take29', storage=storage, workflows=workflows, advanced=AdvancedOptions(log_level=1) )
Swap ndcms for generic T3 string.
Swap ndcms for generic T3 string.
Python
mit
matz-e/lobster,matz-e/lobster,matz-e/lobster
from lobster import cmssw from lobster.core import * storage = StorageConfiguration( output=[ "hdfs:///store/user/matze/test_shuffle_take29", "file:///hadoop/store/user/matze/test_shuffle_take29", - "root://ndcms.crc.nd.edu//store/user/matze/test_shuffle_take29", + "root://T3_US_NotreDame/store/user/matze/test_shuffle_take29", "srm://T3_US_NotreDame/store/user/matze/test_shuffle_take29", ] ) processing = Category( name='processing', cores=1, runtime=900, memory=1000 ) workflows = [] single_mu = Workflow( label='single_mu', dataset=cmssw.Dataset( dataset='/SingleMu/Run2012A-recover-06Aug2012-v1/AOD', events_per_task=5000 ), category=processing, pset='slim.py', publish_label='test', merge_size='3.5G', outputs=['output.root'] ) workflows.append(single_mu) config = Config( label='shuffle', workdir='/tmpscratch/users/matze/test_shuffle_take30', plotdir='/afs/crc.nd.edu/user/m/mwolf3/www/lobster/test_shuffle_take29', storage=storage, workflows=workflows, advanced=AdvancedOptions(log_level=1) )
Swap ndcms for generic T3 string.
## Code Before: from lobster import cmssw from lobster.core import * storage = StorageConfiguration( output=[ "hdfs:///store/user/matze/test_shuffle_take29", "file:///hadoop/store/user/matze/test_shuffle_take29", "root://ndcms.crc.nd.edu//store/user/matze/test_shuffle_take29", "srm://T3_US_NotreDame/store/user/matze/test_shuffle_take29", ] ) processing = Category( name='processing', cores=1, runtime=900, memory=1000 ) workflows = [] single_mu = Workflow( label='single_mu', dataset=cmssw.Dataset( dataset='/SingleMu/Run2012A-recover-06Aug2012-v1/AOD', events_per_task=5000 ), category=processing, pset='slim.py', publish_label='test', merge_size='3.5G', outputs=['output.root'] ) workflows.append(single_mu) config = Config( label='shuffle', workdir='/tmpscratch/users/matze/test_shuffle_take30', plotdir='/afs/crc.nd.edu/user/m/mwolf3/www/lobster/test_shuffle_take29', storage=storage, workflows=workflows, advanced=AdvancedOptions(log_level=1) ) ## Instruction: Swap ndcms for generic T3 string. ## Code After: from lobster import cmssw from lobster.core import * storage = StorageConfiguration( output=[ "hdfs:///store/user/matze/test_shuffle_take29", "file:///hadoop/store/user/matze/test_shuffle_take29", "root://T3_US_NotreDame/store/user/matze/test_shuffle_take29", "srm://T3_US_NotreDame/store/user/matze/test_shuffle_take29", ] ) processing = Category( name='processing', cores=1, runtime=900, memory=1000 ) workflows = [] single_mu = Workflow( label='single_mu', dataset=cmssw.Dataset( dataset='/SingleMu/Run2012A-recover-06Aug2012-v1/AOD', events_per_task=5000 ), category=processing, pset='slim.py', publish_label='test', merge_size='3.5G', outputs=['output.root'] ) workflows.append(single_mu) config = Config( label='shuffle', workdir='/tmpscratch/users/matze/test_shuffle_take30', plotdir='/afs/crc.nd.edu/user/m/mwolf3/www/lobster/test_shuffle_take29', storage=storage, workflows=workflows, advanced=AdvancedOptions(log_level=1) )
from lobster import cmssw from lobster.core import * storage = StorageConfiguration( output=[ "hdfs:///store/user/matze/test_shuffle_take29", "file:///hadoop/store/user/matze/test_shuffle_take29", - "root://ndcms.crc.nd.edu//store/user/matze/test_shuffle_take29", ? ^^^ --------- --- + "root://T3_US_NotreDame/store/user/matze/test_shuffle_take29", ? ^^^^^^^^^^^^^ "srm://T3_US_NotreDame/store/user/matze/test_shuffle_take29", ] ) processing = Category( name='processing', cores=1, runtime=900, memory=1000 ) workflows = [] single_mu = Workflow( label='single_mu', dataset=cmssw.Dataset( dataset='/SingleMu/Run2012A-recover-06Aug2012-v1/AOD', events_per_task=5000 ), category=processing, pset='slim.py', publish_label='test', merge_size='3.5G', outputs=['output.root'] ) workflows.append(single_mu) config = Config( label='shuffle', workdir='/tmpscratch/users/matze/test_shuffle_take30', plotdir='/afs/crc.nd.edu/user/m/mwolf3/www/lobster/test_shuffle_take29', storage=storage, workflows=workflows, advanced=AdvancedOptions(log_level=1) )
6b6181f1c2f902f20da440eb3bedb5d02ecfbf16
angr/engines/soot/expressions/cast.py
angr/engines/soot/expressions/cast.py
from .base import SimSootExpr from archinfo import ArchSoot import logging l = logging.getLogger("angr.engines.soot.expressions.cast") class SimSootExpr_Cast(SimSootExpr): def __init__(self, expr, state): super(SimSootExpr_Cast, self).__init__(expr, state) def _execute(self): if self.expr.cast_type in ['double', 'float']: l.error('Casting of double and float types not supported.') return # get value local = self._translate_value(self.expr.value) value_uncasted = self.state.memory.load(local) # lookup the type size and extract value value_size = ArchSoot.primitive_types[self.expr.cast_type] value_extracted = value_uncasted.reversed.get_bytes(index=0, size=value_size/8).reversed # determine size of Soot bitvector and resize bitvector # Note: smaller types than int's are stored in a 32-bit BV value_soot_size = value_size if value_size >= 32 else 32 if self.expr.cast_type in ['char', 'boolean']: # unsigned extend value_casted = value_extracted.zero_extend(value_soot_size-value_size) else: # signed extend value_casted = value_extracted.sign_extend(value_soot_size-value_size) self.expr = value_casted
from .base import SimSootExpr from archinfo import ArchSoot import logging l = logging.getLogger("angr.engines.soot.expressions.cast") class SimSootExpr_Cast(SimSootExpr): def __init__(self, expr, state): super(SimSootExpr_Cast, self).__init__(expr, state) def _execute(self): if self.expr.cast_type in ['double', 'float']: l.error('Casting of double and float types not supported.') return # get value local = self._translate_value(self.expr.value) value_uncasted = self.state.memory.load(local) # lookup the type size and extract value value_size = ArchSoot.sizeof[self.expr.cast_type] value_extracted = value_uncasted.reversed.get_bytes(index=0, size=value_size/8).reversed # determine size of Soot bitvector and resize bitvector # Note: smaller types than int's are stored in a 32-bit BV value_soot_size = value_size if value_size >= 32 else 32 if self.expr.cast_type in ['char', 'boolean']: # unsigned extend value_casted = value_extracted.zero_extend(value_soot_size-value_size) else: # signed extend value_casted = value_extracted.sign_extend(value_soot_size-value_size) self.expr = value_casted
Use correct dict for the type sizes
Use correct dict for the type sizes
Python
bsd-2-clause
iamahuman/angr,iamahuman/angr,iamahuman/angr,angr/angr,schieb/angr,schieb/angr,angr/angr,schieb/angr,angr/angr
from .base import SimSootExpr from archinfo import ArchSoot import logging l = logging.getLogger("angr.engines.soot.expressions.cast") class SimSootExpr_Cast(SimSootExpr): def __init__(self, expr, state): super(SimSootExpr_Cast, self).__init__(expr, state) def _execute(self): if self.expr.cast_type in ['double', 'float']: l.error('Casting of double and float types not supported.') return # get value local = self._translate_value(self.expr.value) value_uncasted = self.state.memory.load(local) # lookup the type size and extract value - value_size = ArchSoot.primitive_types[self.expr.cast_type] + value_size = ArchSoot.sizeof[self.expr.cast_type] value_extracted = value_uncasted.reversed.get_bytes(index=0, size=value_size/8).reversed # determine size of Soot bitvector and resize bitvector # Note: smaller types than int's are stored in a 32-bit BV value_soot_size = value_size if value_size >= 32 else 32 if self.expr.cast_type in ['char', 'boolean']: # unsigned extend value_casted = value_extracted.zero_extend(value_soot_size-value_size) else: # signed extend value_casted = value_extracted.sign_extend(value_soot_size-value_size) self.expr = value_casted
Use correct dict for the type sizes
## Code Before: from .base import SimSootExpr from archinfo import ArchSoot import logging l = logging.getLogger("angr.engines.soot.expressions.cast") class SimSootExpr_Cast(SimSootExpr): def __init__(self, expr, state): super(SimSootExpr_Cast, self).__init__(expr, state) def _execute(self): if self.expr.cast_type in ['double', 'float']: l.error('Casting of double and float types not supported.') return # get value local = self._translate_value(self.expr.value) value_uncasted = self.state.memory.load(local) # lookup the type size and extract value value_size = ArchSoot.primitive_types[self.expr.cast_type] value_extracted = value_uncasted.reversed.get_bytes(index=0, size=value_size/8).reversed # determine size of Soot bitvector and resize bitvector # Note: smaller types than int's are stored in a 32-bit BV value_soot_size = value_size if value_size >= 32 else 32 if self.expr.cast_type in ['char', 'boolean']: # unsigned extend value_casted = value_extracted.zero_extend(value_soot_size-value_size) else: # signed extend value_casted = value_extracted.sign_extend(value_soot_size-value_size) self.expr = value_casted ## Instruction: Use correct dict for the type sizes ## Code After: from .base import SimSootExpr from archinfo import ArchSoot import logging l = logging.getLogger("angr.engines.soot.expressions.cast") class SimSootExpr_Cast(SimSootExpr): def __init__(self, expr, state): super(SimSootExpr_Cast, self).__init__(expr, state) def _execute(self): if self.expr.cast_type in ['double', 'float']: l.error('Casting of double and float types not supported.') return # get value local = self._translate_value(self.expr.value) value_uncasted = self.state.memory.load(local) # lookup the type size and extract value value_size = ArchSoot.sizeof[self.expr.cast_type] value_extracted = value_uncasted.reversed.get_bytes(index=0, size=value_size/8).reversed # determine size of Soot bitvector and resize bitvector # Note: smaller types than int's are stored in a 32-bit BV value_soot_size = value_size if value_size >= 32 else 32 if self.expr.cast_type in ['char', 'boolean']: # unsigned extend value_casted = value_extracted.zero_extend(value_soot_size-value_size) else: # signed extend value_casted = value_extracted.sign_extend(value_soot_size-value_size) self.expr = value_casted
from .base import SimSootExpr from archinfo import ArchSoot import logging l = logging.getLogger("angr.engines.soot.expressions.cast") class SimSootExpr_Cast(SimSootExpr): def __init__(self, expr, state): super(SimSootExpr_Cast, self).__init__(expr, state) def _execute(self): if self.expr.cast_type in ['double', 'float']: l.error('Casting of double and float types not supported.') return # get value local = self._translate_value(self.expr.value) value_uncasted = self.state.memory.load(local) # lookup the type size and extract value - value_size = ArchSoot.primitive_types[self.expr.cast_type] ? ^^ ^^^^^ ^^^^^^ + value_size = ArchSoot.sizeof[self.expr.cast_type] ? ^ ^ ^^ value_extracted = value_uncasted.reversed.get_bytes(index=0, size=value_size/8).reversed # determine size of Soot bitvector and resize bitvector # Note: smaller types than int's are stored in a 32-bit BV value_soot_size = value_size if value_size >= 32 else 32 if self.expr.cast_type in ['char', 'boolean']: # unsigned extend value_casted = value_extracted.zero_extend(value_soot_size-value_size) else: # signed extend value_casted = value_extracted.sign_extend(value_soot_size-value_size) self.expr = value_casted
bcc3a4e4c8b3117deea4c7621653f65b588537f9
keystone/common/policies/token_revocation.py
keystone/common/policies/token_revocation.py
from oslo_policy import policy from keystone.common.policies import base token_revocation_policies = [ policy.DocumentedRuleDefault( name=base.IDENTITY % 'revocation_list', check_str=base.RULE_SERVICE_OR_ADMIN, description='List revoked PKI tokens.', operations=[{'path': '/v3/auth/tokens/OS-PKI/revoked', 'method': 'GET'}]) ] def list_rules(): return token_revocation_policies
from oslo_policy import policy from keystone.common.policies import base token_revocation_policies = [ policy.DocumentedRuleDefault( name=base.IDENTITY % 'revocation_list', check_str=base.RULE_SERVICE_OR_ADMIN, # NOTE(lbragstad): Documenting scope_types here doesn't really make a # difference since this API is going to return an empty list regardless # of the token scope used in the API call. More-or-less just doing this # for consistency with other policies. scope_types=['system', 'project'], description='List revoked PKI tokens.', operations=[{'path': '/v3/auth/tokens/OS-PKI/revoked', 'method': 'GET'}]) ] def list_rules(): return token_revocation_policies
Add scope_types to token revocation policies
Add scope_types to token revocation policies This doesn't seem useful since the API will return an empty list regardless because PKI support has been removed. More or less doing this for consistency. Change-Id: Iaa2925119fa6c9e2324546ed44aa54bac51dba05
Python
apache-2.0
mahak/keystone,openstack/keystone,openstack/keystone,mahak/keystone,openstack/keystone,mahak/keystone
from oslo_policy import policy from keystone.common.policies import base token_revocation_policies = [ policy.DocumentedRuleDefault( name=base.IDENTITY % 'revocation_list', check_str=base.RULE_SERVICE_OR_ADMIN, + # NOTE(lbragstad): Documenting scope_types here doesn't really make a + # difference since this API is going to return an empty list regardless + # of the token scope used in the API call. More-or-less just doing this + # for consistency with other policies. + scope_types=['system', 'project'], description='List revoked PKI tokens.', operations=[{'path': '/v3/auth/tokens/OS-PKI/revoked', 'method': 'GET'}]) ] def list_rules(): return token_revocation_policies
Add scope_types to token revocation policies
## Code Before: from oslo_policy import policy from keystone.common.policies import base token_revocation_policies = [ policy.DocumentedRuleDefault( name=base.IDENTITY % 'revocation_list', check_str=base.RULE_SERVICE_OR_ADMIN, description='List revoked PKI tokens.', operations=[{'path': '/v3/auth/tokens/OS-PKI/revoked', 'method': 'GET'}]) ] def list_rules(): return token_revocation_policies ## Instruction: Add scope_types to token revocation policies ## Code After: from oslo_policy import policy from keystone.common.policies import base token_revocation_policies = [ policy.DocumentedRuleDefault( name=base.IDENTITY % 'revocation_list', check_str=base.RULE_SERVICE_OR_ADMIN, # NOTE(lbragstad): Documenting scope_types here doesn't really make a # difference since this API is going to return an empty list regardless # of the token scope used in the API call. More-or-less just doing this # for consistency with other policies. scope_types=['system', 'project'], description='List revoked PKI tokens.', operations=[{'path': '/v3/auth/tokens/OS-PKI/revoked', 'method': 'GET'}]) ] def list_rules(): return token_revocation_policies
from oslo_policy import policy from keystone.common.policies import base token_revocation_policies = [ policy.DocumentedRuleDefault( name=base.IDENTITY % 'revocation_list', check_str=base.RULE_SERVICE_OR_ADMIN, + # NOTE(lbragstad): Documenting scope_types here doesn't really make a + # difference since this API is going to return an empty list regardless + # of the token scope used in the API call. More-or-less just doing this + # for consistency with other policies. + scope_types=['system', 'project'], description='List revoked PKI tokens.', operations=[{'path': '/v3/auth/tokens/OS-PKI/revoked', 'method': 'GET'}]) ] def list_rules(): return token_revocation_policies
503f92796b9368a78f39c41fb6bb596f32728b8d
herana/views.py
herana/views.py
import json from django.shortcuts import render from django.views.generic import View from models import Institute, ProjectDetail from forms import SelectInstituteForm, SelectOrgLevelForm def home(request): return render(request, 'index.html') class ResultsView(View): template_name = 'results.html' def get(self, request, *args, **kwargs): projects = ProjectDetail.objects.filter( record_status=2, is_rejected=False, is_deleted=False) institutes = {proj.institute for proj in projects} data = {} data['projects'] = [p.as_dict() for p in projects] data['institutes'] = [i.as_dict() for i in institutes] if request.user.is_proj_leader or request.user.is_institute_admin: data['user_institute'] = request.user.get_user_institute().as_dict() context = { "data": json.dumps(data), } return render( request, self.template_name, context=context)
import json from django.shortcuts import render from django.views.generic import View from models import Institute, ProjectDetail from forms import SelectInstituteForm, SelectOrgLevelForm def home(request): return render(request, 'index.html') class ResultsView(View): template_name = 'results.html' def get(self, request, *args, **kwargs): projects = ProjectDetail.objects.filter( record_status=2, is_rejected=False, is_deleted=False) institutes = {proj.institute for proj in projects} data = {} data['projects'] = [p.as_dict() for p in projects] data['institutes'] = [i.as_dict() for i in institutes] if request.user.is_authenticated(): if request.user.is_proj_leader or request.user.is_institute_admin: data['user_institute'] = request.user.get_user_institute().as_dict() context = { "data": json.dumps(data), } return render( request, self.template_name, context=context)
Check if user in logged in
Check if user in logged in
Python
mit
Code4SA/herana,Code4SA/herana,Code4SA/herana,Code4SA/herana
import json from django.shortcuts import render from django.views.generic import View from models import Institute, ProjectDetail from forms import SelectInstituteForm, SelectOrgLevelForm def home(request): return render(request, 'index.html') class ResultsView(View): template_name = 'results.html' def get(self, request, *args, **kwargs): projects = ProjectDetail.objects.filter( record_status=2, is_rejected=False, is_deleted=False) institutes = {proj.institute for proj in projects} data = {} data['projects'] = [p.as_dict() for p in projects] data['institutes'] = [i.as_dict() for i in institutes] + if request.user.is_authenticated(): - if request.user.is_proj_leader or request.user.is_institute_admin: + if request.user.is_proj_leader or request.user.is_institute_admin: - data['user_institute'] = request.user.get_user_institute().as_dict() + data['user_institute'] = request.user.get_user_institute().as_dict() context = { "data": json.dumps(data), } return render( request, self.template_name, context=context)
Check if user in logged in
## Code Before: import json from django.shortcuts import render from django.views.generic import View from models import Institute, ProjectDetail from forms import SelectInstituteForm, SelectOrgLevelForm def home(request): return render(request, 'index.html') class ResultsView(View): template_name = 'results.html' def get(self, request, *args, **kwargs): projects = ProjectDetail.objects.filter( record_status=2, is_rejected=False, is_deleted=False) institutes = {proj.institute for proj in projects} data = {} data['projects'] = [p.as_dict() for p in projects] data['institutes'] = [i.as_dict() for i in institutes] if request.user.is_proj_leader or request.user.is_institute_admin: data['user_institute'] = request.user.get_user_institute().as_dict() context = { "data": json.dumps(data), } return render( request, self.template_name, context=context) ## Instruction: Check if user in logged in ## Code After: import json from django.shortcuts import render from django.views.generic import View from models import Institute, ProjectDetail from forms import SelectInstituteForm, SelectOrgLevelForm def home(request): return render(request, 'index.html') class ResultsView(View): template_name = 'results.html' def get(self, request, *args, **kwargs): projects = ProjectDetail.objects.filter( record_status=2, is_rejected=False, is_deleted=False) institutes = {proj.institute for proj in projects} data = {} data['projects'] = [p.as_dict() for p in projects] data['institutes'] = [i.as_dict() for i in institutes] if request.user.is_authenticated(): if request.user.is_proj_leader or request.user.is_institute_admin: data['user_institute'] = request.user.get_user_institute().as_dict() context = { "data": json.dumps(data), } return render( request, self.template_name, context=context)
import json from django.shortcuts import render from django.views.generic import View from models import Institute, ProjectDetail from forms import SelectInstituteForm, SelectOrgLevelForm def home(request): return render(request, 'index.html') class ResultsView(View): template_name = 'results.html' def get(self, request, *args, **kwargs): projects = ProjectDetail.objects.filter( record_status=2, is_rejected=False, is_deleted=False) institutes = {proj.institute for proj in projects} data = {} data['projects'] = [p.as_dict() for p in projects] data['institutes'] = [i.as_dict() for i in institutes] + if request.user.is_authenticated(): - if request.user.is_proj_leader or request.user.is_institute_admin: + if request.user.is_proj_leader or request.user.is_institute_admin: ? ++++ - data['user_institute'] = request.user.get_user_institute().as_dict() + data['user_institute'] = request.user.get_user_institute().as_dict() ? ++++ context = { "data": json.dumps(data), } return render( request, self.template_name, context=context)
0d9c151d9f61d03e57f815d99158e1b90c9dca5e
erpnext/education/doctype/instructor/instructor.py
erpnext/education/doctype/instructor/instructor.py
from __future__ import unicode_literals import frappe from frappe import _ from frappe.model.document import Document from frappe.model.naming import set_name_by_naming_series class Instructor(Document): def autoname(self): naming_method = frappe.db.get_value("Education Settings", None, "instructor_created_by") if not naming_method: frappe.throw(_("Please setup Instructor Naming System in Education > Education Settings")) else: if naming_method == 'Naming Series': set_name_by_naming_series(self) elif naming_method == 'Employee Number': if not self.employee: frappe.throw(_("Please select Employee")) self.name = self.employee elif naming_method == 'Full Name': self.name = self.instructor_name def validate(self): self.validate_duplicate_employee() def validate_duplicate_employee(self): if self.employee and frappe.db.get_value("Instructor", {'employee': self.employee}, 'name'): frappe.throw(_("Employee ID is linked with another instructor"))
from __future__ import unicode_literals import frappe from frappe import _ from frappe.model.document import Document from frappe.model.naming import set_name_by_naming_series class Instructor(Document): def autoname(self): naming_method = frappe.db.get_value("Education Settings", None, "instructor_created_by") if not naming_method: frappe.throw(_("Please setup Instructor Naming System in Education > Education Settings")) else: if naming_method == 'Naming Series': set_name_by_naming_series(self) elif naming_method == 'Employee Number': if not self.employee: frappe.throw(_("Please select Employee")) self.name = self.employee elif naming_method == 'Full Name': self.name = self.instructor_name def validate(self): self.validate_duplicate_employee() def validate_duplicate_employee(self): if self.employee and frappe.db.get_value("Instructor", {'employee': self.employee, 'name': ['!=', self.name]}, 'name'): frappe.throw(_("Employee ID is linked with another instructor"))
Exclude current record while validating duplicate employee
fix: Exclude current record while validating duplicate employee
Python
agpl-3.0
gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext
from __future__ import unicode_literals import frappe from frappe import _ from frappe.model.document import Document from frappe.model.naming import set_name_by_naming_series class Instructor(Document): def autoname(self): naming_method = frappe.db.get_value("Education Settings", None, "instructor_created_by") if not naming_method: frappe.throw(_("Please setup Instructor Naming System in Education > Education Settings")) else: if naming_method == 'Naming Series': set_name_by_naming_series(self) elif naming_method == 'Employee Number': if not self.employee: frappe.throw(_("Please select Employee")) self.name = self.employee elif naming_method == 'Full Name': self.name = self.instructor_name def validate(self): self.validate_duplicate_employee() def validate_duplicate_employee(self): - if self.employee and frappe.db.get_value("Instructor", {'employee': self.employee}, 'name'): + if self.employee and frappe.db.get_value("Instructor", {'employee': self.employee, 'name': ['!=', self.name]}, 'name'): frappe.throw(_("Employee ID is linked with another instructor"))
Exclude current record while validating duplicate employee
## Code Before: from __future__ import unicode_literals import frappe from frappe import _ from frappe.model.document import Document from frappe.model.naming import set_name_by_naming_series class Instructor(Document): def autoname(self): naming_method = frappe.db.get_value("Education Settings", None, "instructor_created_by") if not naming_method: frappe.throw(_("Please setup Instructor Naming System in Education > Education Settings")) else: if naming_method == 'Naming Series': set_name_by_naming_series(self) elif naming_method == 'Employee Number': if not self.employee: frappe.throw(_("Please select Employee")) self.name = self.employee elif naming_method == 'Full Name': self.name = self.instructor_name def validate(self): self.validate_duplicate_employee() def validate_duplicate_employee(self): if self.employee and frappe.db.get_value("Instructor", {'employee': self.employee}, 'name'): frappe.throw(_("Employee ID is linked with another instructor")) ## Instruction: Exclude current record while validating duplicate employee ## Code After: from __future__ import unicode_literals import frappe from frappe import _ from frappe.model.document import Document from frappe.model.naming import set_name_by_naming_series class Instructor(Document): def autoname(self): naming_method = frappe.db.get_value("Education Settings", None, "instructor_created_by") if not naming_method: frappe.throw(_("Please setup Instructor Naming System in Education > Education Settings")) else: if naming_method == 'Naming Series': set_name_by_naming_series(self) elif naming_method == 'Employee Number': if not self.employee: frappe.throw(_("Please select Employee")) self.name = self.employee elif naming_method == 'Full Name': self.name = self.instructor_name def validate(self): self.validate_duplicate_employee() def validate_duplicate_employee(self): if self.employee and frappe.db.get_value("Instructor", {'employee': self.employee, 'name': ['!=', self.name]}, 'name'): frappe.throw(_("Employee ID is linked with another instructor"))
from __future__ import unicode_literals import frappe from frappe import _ from frappe.model.document import Document from frappe.model.naming import set_name_by_naming_series class Instructor(Document): def autoname(self): naming_method = frappe.db.get_value("Education Settings", None, "instructor_created_by") if not naming_method: frappe.throw(_("Please setup Instructor Naming System in Education > Education Settings")) else: if naming_method == 'Naming Series': set_name_by_naming_series(self) elif naming_method == 'Employee Number': if not self.employee: frappe.throw(_("Please select Employee")) self.name = self.employee elif naming_method == 'Full Name': self.name = self.instructor_name def validate(self): self.validate_duplicate_employee() def validate_duplicate_employee(self): - if self.employee and frappe.db.get_value("Instructor", {'employee': self.employee}, 'name'): + if self.employee and frappe.db.get_value("Instructor", {'employee': self.employee, 'name': ['!=', self.name]}, 'name'): ? +++++++++++++++++++++++++++ frappe.throw(_("Employee ID is linked with another instructor"))
81215120afffe54b17be3f38bbc2ac292452c0c4
addons/mail/models/ir_attachment.py
addons/mail/models/ir_attachment.py
from odoo import api, fields, models class IrAttachment(models.Model): _inherit = 'ir.attachment' @api.multi def _post_add_create(self): """ Overrides behaviour when the attachment is created through the controller """ super(IrAttachment, self)._post_add_create() for record in self: record.register_as_main_attachment(force=False) @api.multi def unlink(self): self.remove_as_main_attachment() super(IrAttachment, self).unlink() @api.multi def remove_as_main_attachment(self): for attachment in self: related_record = self.env[attachment.res_model].browse(attachment.res_id) if related_record and hasattr(related_record, 'message_main_attachment_id'): if related_record.message_main_attachment_id == attachment: related_record.message_main_attachment_id = False def register_as_main_attachment(self, force=True): """ Registers this attachment as the main one of the model it is attached to. """ self.ensure_one() related_record = self.env[self.res_model].browse(self.res_id) # message_main_attachment_id field can be empty, that's why we compare to False; # we are just checking that it exists on the model before writing it if related_record and hasattr(related_record, 'message_main_attachment_id'): if force or not related_record.message_main_attachment_id: related_record.message_main_attachment_id = self
from odoo import api, fields, models class IrAttachment(models.Model): _inherit = 'ir.attachment' @api.multi def _post_add_create(self): """ Overrides behaviour when the attachment is created through the controller """ super(IrAttachment, self)._post_add_create() for record in self: record.register_as_main_attachment(force=False) def register_as_main_attachment(self, force=True): """ Registers this attachment as the main one of the model it is attached to. """ self.ensure_one() related_record = self.env[self.res_model].browse(self.res_id) # message_main_attachment_id field can be empty, that's why we compare to False; # we are just checking that it exists on the model before writing it if related_record and hasattr(related_record, 'message_main_attachment_id'): if force or not related_record.message_main_attachment_id: related_record.message_main_attachment_id = self
Revert "[FIX] mail: remove attachment as main at unlink"
Revert "[FIX] mail: remove attachment as main at unlink" This reverts commit abc45b1 Since by default the ondelete attribute of a many2one is `set null`, this was completely unnecessary to begin with. Bug caused by this commit: Unlink a record that has some attachments. The unlink first removes the record, then its related attachments. It calls remove_as_main_attachment, which reads the attachment res_model and res_id. This triggers a check that the related record can be read. However the related record has already been removed, an exception is raised. It is thus impossible to unlink a record. Closes #32563 closes odoo/odoo#32572 Signed-off-by: Raphael Collet (rco) <[email protected]>
Python
agpl-3.0
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
from odoo import api, fields, models class IrAttachment(models.Model): _inherit = 'ir.attachment' @api.multi def _post_add_create(self): """ Overrides behaviour when the attachment is created through the controller """ super(IrAttachment, self)._post_add_create() for record in self: record.register_as_main_attachment(force=False) - @api.multi - def unlink(self): - self.remove_as_main_attachment() - super(IrAttachment, self).unlink() - - @api.multi - def remove_as_main_attachment(self): - for attachment in self: - related_record = self.env[attachment.res_model].browse(attachment.res_id) - if related_record and hasattr(related_record, 'message_main_attachment_id'): - if related_record.message_main_attachment_id == attachment: - related_record.message_main_attachment_id = False - def register_as_main_attachment(self, force=True): """ Registers this attachment as the main one of the model it is attached to. """ self.ensure_one() related_record = self.env[self.res_model].browse(self.res_id) # message_main_attachment_id field can be empty, that's why we compare to False; # we are just checking that it exists on the model before writing it if related_record and hasattr(related_record, 'message_main_attachment_id'): if force or not related_record.message_main_attachment_id: related_record.message_main_attachment_id = self
Revert "[FIX] mail: remove attachment as main at unlink"
## Code Before: from odoo import api, fields, models class IrAttachment(models.Model): _inherit = 'ir.attachment' @api.multi def _post_add_create(self): """ Overrides behaviour when the attachment is created through the controller """ super(IrAttachment, self)._post_add_create() for record in self: record.register_as_main_attachment(force=False) @api.multi def unlink(self): self.remove_as_main_attachment() super(IrAttachment, self).unlink() @api.multi def remove_as_main_attachment(self): for attachment in self: related_record = self.env[attachment.res_model].browse(attachment.res_id) if related_record and hasattr(related_record, 'message_main_attachment_id'): if related_record.message_main_attachment_id == attachment: related_record.message_main_attachment_id = False def register_as_main_attachment(self, force=True): """ Registers this attachment as the main one of the model it is attached to. """ self.ensure_one() related_record = self.env[self.res_model].browse(self.res_id) # message_main_attachment_id field can be empty, that's why we compare to False; # we are just checking that it exists on the model before writing it if related_record and hasattr(related_record, 'message_main_attachment_id'): if force or not related_record.message_main_attachment_id: related_record.message_main_attachment_id = self ## Instruction: Revert "[FIX] mail: remove attachment as main at unlink" ## Code After: from odoo import api, fields, models class IrAttachment(models.Model): _inherit = 'ir.attachment' @api.multi def _post_add_create(self): """ Overrides behaviour when the attachment is created through the controller """ super(IrAttachment, self)._post_add_create() for record in self: record.register_as_main_attachment(force=False) def register_as_main_attachment(self, force=True): """ Registers this attachment as the main one of the model it is attached to. """ self.ensure_one() related_record = self.env[self.res_model].browse(self.res_id) # message_main_attachment_id field can be empty, that's why we compare to False; # we are just checking that it exists on the model before writing it if related_record and hasattr(related_record, 'message_main_attachment_id'): if force or not related_record.message_main_attachment_id: related_record.message_main_attachment_id = self
from odoo import api, fields, models class IrAttachment(models.Model): _inherit = 'ir.attachment' @api.multi def _post_add_create(self): """ Overrides behaviour when the attachment is created through the controller """ super(IrAttachment, self)._post_add_create() for record in self: record.register_as_main_attachment(force=False) - @api.multi - def unlink(self): - self.remove_as_main_attachment() - super(IrAttachment, self).unlink() - - @api.multi - def remove_as_main_attachment(self): - for attachment in self: - related_record = self.env[attachment.res_model].browse(attachment.res_id) - if related_record and hasattr(related_record, 'message_main_attachment_id'): - if related_record.message_main_attachment_id == attachment: - related_record.message_main_attachment_id = False - def register_as_main_attachment(self, force=True): """ Registers this attachment as the main one of the model it is attached to. """ self.ensure_one() related_record = self.env[self.res_model].browse(self.res_id) # message_main_attachment_id field can be empty, that's why we compare to False; # we are just checking that it exists on the model before writing it if related_record and hasattr(related_record, 'message_main_attachment_id'): if force or not related_record.message_main_attachment_id: related_record.message_main_attachment_id = self
04de16d7287bad5023b34efc072e104d8b35c29a
test/test.py
test/test.py
from RPi import GPIO GPIO.setmode(GPIO.BCM) num_pins = 28 pins = range(num_pins) for pin in pins: GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP) pin_states = {pin: GPIO.input(pin) for pin in pins} print() for pin, state in pin_states.items(): print("%2d: %s" % (pin, state)) active = sum(pin_states.values()) inactive = num_pins - active print() print("Total active: %i" % inactive) print("Total inactive: %i" % active)
from RPi import GPIO GPIO.setmode(GPIO.BCM) GPIO.setwarnings(False) num_pins = 28 pins = range(num_pins) for pin in pins: GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP) pin_states = {pin: GPIO.input(pin) for pin in pins} print() for pin, state in pin_states.items(): print("%2d: %s" % (pin, state)) active = [pin for pin, state in pin_states.items() if not state] inactive = [pin for pin, state in pin_states.items() if state] print() print("Total active: %s" % len(active)) print("Total inactive: %s" % len(inactive)) print() print("Active pins: %s" % str(active)) print("Inactive pins: %s" % str(inactive))
Add printing of active/inactive pins
Add printing of active/inactive pins
Python
bsd-3-clause
raspberrypilearning/dots,RPi-Distro/python-rpi-dots
from RPi import GPIO GPIO.setmode(GPIO.BCM) + GPIO.setwarnings(False) num_pins = 28 pins = range(num_pins) for pin in pins: GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP) pin_states = {pin: GPIO.input(pin) for pin in pins} print() for pin, state in pin_states.items(): print("%2d: %s" % (pin, state)) - active = sum(pin_states.values()) - inactive = num_pins - active + active = [pin for pin, state in pin_states.items() if not state] + inactive = [pin for pin, state in pin_states.items() if state] print() - print("Total active: %i" % inactive) + print("Total active: %s" % len(active)) - print("Total inactive: %i" % active) + print("Total inactive: %s" % len(inactive)) + print() + print("Active pins: %s" % str(active)) + print("Inactive pins: %s" % str(inactive)) +
Add printing of active/inactive pins
## Code Before: from RPi import GPIO GPIO.setmode(GPIO.BCM) num_pins = 28 pins = range(num_pins) for pin in pins: GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP) pin_states = {pin: GPIO.input(pin) for pin in pins} print() for pin, state in pin_states.items(): print("%2d: %s" % (pin, state)) active = sum(pin_states.values()) inactive = num_pins - active print() print("Total active: %i" % inactive) print("Total inactive: %i" % active) ## Instruction: Add printing of active/inactive pins ## Code After: from RPi import GPIO GPIO.setmode(GPIO.BCM) GPIO.setwarnings(False) num_pins = 28 pins = range(num_pins) for pin in pins: GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP) pin_states = {pin: GPIO.input(pin) for pin in pins} print() for pin, state in pin_states.items(): print("%2d: %s" % (pin, state)) active = [pin for pin, state in pin_states.items() if not state] inactive = [pin for pin, state in pin_states.items() if state] print() print("Total active: %s" % len(active)) print("Total inactive: %s" % len(inactive)) print() print("Active pins: %s" % str(active)) print("Inactive pins: %s" % str(inactive))
from RPi import GPIO GPIO.setmode(GPIO.BCM) + GPIO.setwarnings(False) num_pins = 28 pins = range(num_pins) for pin in pins: GPIO.setup(pin, GPIO.IN, GPIO.PUD_UP) pin_states = {pin: GPIO.input(pin) for pin in pins} print() for pin, state in pin_states.items(): print("%2d: %s" % (pin, state)) - active = sum(pin_states.values()) - inactive = num_pins - active + active = [pin for pin, state in pin_states.items() if not state] + inactive = [pin for pin, state in pin_states.items() if state] print() - print("Total active: %i" % inactive) ? ^ ^ + print("Total active: %s" % len(active)) ? ^ ^^ + + - print("Total inactive: %i" % active) ? ^ + print("Total inactive: %s" % len(inactive)) ? ^ ++++++ + + print() + print("Active pins: %s" % str(active)) + print("Inactive pins: %s" % str(inactive)) +
89d9987f742fa74fc3646ccc163610d0c9400d75
dewbrick/utils.py
dewbrick/utils.py
import tldextract import pyphen from random import choice TITLES = ('Mister', 'Little Miss') SUFFIXES = ('Destroyer of Worlds', 'the Monkey Botherer', 'PhD') def generate_name(domain): title = choice(TITLES) _parts = tldextract.extract(domain) _parts = [_parts.subdomain, _parts.domain] parts = [] for i, part in enumerate(_parts): if part and part != 'www': parts.append('{}{}'.format(part[0].upper(), part[1:])) name = '-'.join(parts) dic = pyphen.Pyphen(lang='en_US') name = '{} {}'.format(title, dic.inserted(name)) if choice((True, False)): name = '{} {}'.format(name, choice(SUFFIXES)) return name
import tldextract import pyphen from random import choice TITLES = ('Mister', 'Little Miss', 'Señor', 'Queen') SUFFIXES = ('Destroyer of Worlds', 'the Monkey Botherer', 'PhD', 'Ah-gowan-gowan-gowan') def generate_name(domain): title = choice(TITLES) _parts = tldextract.extract(domain) _parts = [_parts.subdomain, _parts.domain] parts = [] for i, part in enumerate(_parts): if part and part != 'www': parts.append('{}{}'.format(part[0].upper(), part[1:])) name = '-'.join(parts) dic = pyphen.Pyphen(lang='en_US') name = '{} {}'.format(title, dic.inserted(name)) if choice((True, False)): name = '{} {}'.format(name, choice(SUFFIXES)) return name
Add more titles and suffixes
Add more titles and suffixes
Python
apache-2.0
ohmygourd/dewbrick,ohmygourd/dewbrick,ohmygourd/dewbrick
import tldextract import pyphen from random import choice - TITLES = ('Mister', 'Little Miss') + TITLES = ('Mister', 'Little Miss', 'Señor', 'Queen') - SUFFIXES = ('Destroyer of Worlds', 'the Monkey Botherer', 'PhD') + SUFFIXES = ('Destroyer of Worlds', 'the Monkey Botherer', 'PhD', + 'Ah-gowan-gowan-gowan') def generate_name(domain): title = choice(TITLES) _parts = tldextract.extract(domain) _parts = [_parts.subdomain, _parts.domain] parts = [] for i, part in enumerate(_parts): if part and part != 'www': parts.append('{}{}'.format(part[0].upper(), part[1:])) name = '-'.join(parts) dic = pyphen.Pyphen(lang='en_US') name = '{} {}'.format(title, dic.inserted(name)) if choice((True, False)): name = '{} {}'.format(name, choice(SUFFIXES)) return name
Add more titles and suffixes
## Code Before: import tldextract import pyphen from random import choice TITLES = ('Mister', 'Little Miss') SUFFIXES = ('Destroyer of Worlds', 'the Monkey Botherer', 'PhD') def generate_name(domain): title = choice(TITLES) _parts = tldextract.extract(domain) _parts = [_parts.subdomain, _parts.domain] parts = [] for i, part in enumerate(_parts): if part and part != 'www': parts.append('{}{}'.format(part[0].upper(), part[1:])) name = '-'.join(parts) dic = pyphen.Pyphen(lang='en_US') name = '{} {}'.format(title, dic.inserted(name)) if choice((True, False)): name = '{} {}'.format(name, choice(SUFFIXES)) return name ## Instruction: Add more titles and suffixes ## Code After: import tldextract import pyphen from random import choice TITLES = ('Mister', 'Little Miss', 'Señor', 'Queen') SUFFIXES = ('Destroyer of Worlds', 'the Monkey Botherer', 'PhD', 'Ah-gowan-gowan-gowan') def generate_name(domain): title = choice(TITLES) _parts = tldextract.extract(domain) _parts = [_parts.subdomain, _parts.domain] parts = [] for i, part in enumerate(_parts): if part and part != 'www': parts.append('{}{}'.format(part[0].upper(), part[1:])) name = '-'.join(parts) dic = pyphen.Pyphen(lang='en_US') name = '{} {}'.format(title, dic.inserted(name)) if choice((True, False)): name = '{} {}'.format(name, choice(SUFFIXES)) return name
import tldextract import pyphen from random import choice - TITLES = ('Mister', 'Little Miss') + TITLES = ('Mister', 'Little Miss', 'Señor', 'Queen') ? ++++++++++++++++++ - SUFFIXES = ('Destroyer of Worlds', 'the Monkey Botherer', 'PhD') ? ^ + SUFFIXES = ('Destroyer of Worlds', 'the Monkey Botherer', 'PhD', ? ^ + 'Ah-gowan-gowan-gowan') def generate_name(domain): title = choice(TITLES) _parts = tldextract.extract(domain) _parts = [_parts.subdomain, _parts.domain] parts = [] for i, part in enumerate(_parts): if part and part != 'www': parts.append('{}{}'.format(part[0].upper(), part[1:])) name = '-'.join(parts) dic = pyphen.Pyphen(lang='en_US') name = '{} {}'.format(title, dic.inserted(name)) if choice((True, False)): name = '{} {}'.format(name, choice(SUFFIXES)) return name
840efdbc3771f60881e4052feaea18a9ea7d8eda
SLA_bot/gameevent.py
SLA_bot/gameevent.py
class GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end)
class GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end) class MultiShipEvent(GameEvent): def __init__(self, name, ships, start, end = None): super().__init__(name, start, end) self.ships = ships self.unscheduled = False for event in self.ships[1:]: if event: self.unscheduled = True def multi_dur(self, targets, tz): if self.unscheduled == False: return self.ships[0] ship_events = [] for index in targets: line = '`ship {:02d}: `{}'.format(index, self.ships[index]) ship_events.append(line) if len(ship_events) < 1: return '' header = self.duration(tz) body = '\n'.join(ship_events) return '{}\n{}'.format(header, body) def __repr__(self): return 'MultiShipEvent({}, {}, {}, {})'.format(self.name, self.ships, self.start, self.end)
Add functions to get external alerts
Add functions to get external alerts For the purpose of getting unscheduled events
Python
mit
EsqWiggles/SLA-bot,EsqWiggles/SLA-bot
class GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end) + class MultiShipEvent(GameEvent): + def __init__(self, name, ships, start, end = None): + super().__init__(name, start, end) + self.ships = ships + self.unscheduled = False + for event in self.ships[1:]: + if event: + self.unscheduled = True + + def multi_dur(self, targets, tz): + if self.unscheduled == False: + return self.ships[0] + + ship_events = [] + for index in targets: + line = '`ship {:02d}: `{}'.format(index, self.ships[index]) + ship_events.append(line) + if len(ship_events) < 1: + return '' + + header = self.duration(tz) + body = '\n'.join(ship_events) + return '{}\n{}'.format(header, body) + + def __repr__(self): + return 'MultiShipEvent({}, {}, {}, {})'.format(self.name, self.ships, + self.start, self.end)
Add functions to get external alerts
## Code Before: class GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end) ## Instruction: Add functions to get external alerts ## Code After: class GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end) class MultiShipEvent(GameEvent): def __init__(self, name, ships, start, end = None): super().__init__(name, start, end) self.ships = ships self.unscheduled = False for event in self.ships[1:]: if event: self.unscheduled = True def multi_dur(self, targets, tz): if self.unscheduled == False: return self.ships[0] ship_events = [] for index in targets: line = '`ship {:02d}: `{}'.format(index, self.ships[index]) ship_events.append(line) if len(ship_events) < 1: return '' header = self.duration(tz) body = '\n'.join(ship_events) return '{}\n{}'.format(header, body) def __repr__(self): return 'MultiShipEvent({}, {}, {}, {})'.format(self.name, self.ships, self.start, self.end)
class GameEvent: def __init__(self, name, start, end = None): self.name = name self.start = start self.end = end @classmethod def from_ical(cls, component): n = component.get('summary') s = component.get('dtstart').dt e = getattr(component.get('dtend'), 'dt', None) return cls(n, s, e) def duration(self, tz): s = self.start.astimezone(tz) s_dt = s.strftime('%b %d, %H:%M') tz_str= s.strftime('%Z') try: e_time = self.end.astimezone(tz).strftime('%H:%M') except AttributeError: return '**{}** @ {} {}'.format(self.name, s_dt, tz_str) return '**{}** @ {} - {} {}'.format(self.name, s_dt, e_time, tz_str) def __repr__(self): return 'GameEvent({}, {}, {})'.format(self.name, self.start, self.end) + + class MultiShipEvent(GameEvent): + def __init__(self, name, ships, start, end = None): + super().__init__(name, start, end) + self.ships = ships + self.unscheduled = False + for event in self.ships[1:]: + if event: + self.unscheduled = True + + def multi_dur(self, targets, tz): + if self.unscheduled == False: + return self.ships[0] + + ship_events = [] + for index in targets: + line = '`ship {:02d}: `{}'.format(index, self.ships[index]) + ship_events.append(line) + if len(ship_events) < 1: + return '' + + header = self.duration(tz) + body = '\n'.join(ship_events) + return '{}\n{}'.format(header, body) + + def __repr__(self): + return 'MultiShipEvent({}, {}, {}, {})'.format(self.name, self.ships, + self.start, self.end)
ff3a7ad122af4cc1cdfa0b882b2d1d7366d640f2
tests/unit/test_secret.py
tests/unit/test_secret.py
import libnacl.secret # Import python libs import unittest class TestSecret(unittest.TestCase): ''' ''' def test_secret(self): msg = b'But then of course African swallows are not migratory.' box = libnacl.secret.SecretBox() ctxt = box.encrypt(msg) self.assertNotEqual(msg, ctxt) box2 = libnacl.secret.SecretBox(box.sk) clear1 = box.decrypt(ctxt) self.assertEqual(msg, clear1) clear2 = box2.decrypt(ctxt) self.assertEqual(clear1, clear2) ctxt2 = box2.encrypt(msg) clear3 = box.decrypt(ctxt2) self.assertEqual(clear3, msg)
import libnacl.secret # Import python libs import unittest class TestSecret(unittest.TestCase): ''' ''' def test_secret(self): msg = b'But then of course African swallows are not migratory.' box = libnacl.secret.SecretBox() ctxt = box.encrypt(msg) self.assertNotEqual(msg, ctxt) box2 = libnacl.secret.SecretBox(box.sk) clear1 = box.decrypt(ctxt) self.assertEqual(msg, clear1) clear2 = box2.decrypt(ctxt) self.assertEqual(clear1, clear2) ctxt2 = box2.encrypt(msg) clear3 = box.decrypt(ctxt2) self.assertEqual(clear3, msg) def test_unicode_issues(self): msg = u'Unicode string' box = libnacl.secret.SecretBox() # Encrypting a unicode string (in py2) should # probable assert, but instead it encryptes zeros, # perhaps the high bytes in UCS-16? ctxt = box.encrypt(msg) self.assertNotEqual(msg, ctxt) box2 = libnacl.secret.SecretBox(box.sk) clear1 = box.decrypt(ctxt) self.assertEqual(msg, clear1) clear2 = box2.decrypt(ctxt) self.assertEqual(clear1, clear2) ctxt2 = box2.encrypt(msg) clear3 = box.decrypt(ctxt2) self.assertEqual(clear3, msg)
Add failing test for unicode string encryption
Add failing test for unicode string encryption
Python
apache-2.0
coinkite/libnacl
import libnacl.secret # Import python libs import unittest class TestSecret(unittest.TestCase): ''' ''' def test_secret(self): msg = b'But then of course African swallows are not migratory.' box = libnacl.secret.SecretBox() ctxt = box.encrypt(msg) self.assertNotEqual(msg, ctxt) box2 = libnacl.secret.SecretBox(box.sk) clear1 = box.decrypt(ctxt) self.assertEqual(msg, clear1) clear2 = box2.decrypt(ctxt) self.assertEqual(clear1, clear2) ctxt2 = box2.encrypt(msg) clear3 = box.decrypt(ctxt2) self.assertEqual(clear3, msg) + def test_unicode_issues(self): + msg = u'Unicode string' + box = libnacl.secret.SecretBox() + # Encrypting a unicode string (in py2) should + # probable assert, but instead it encryptes zeros, + # perhaps the high bytes in UCS-16? + ctxt = box.encrypt(msg) + self.assertNotEqual(msg, ctxt) + + box2 = libnacl.secret.SecretBox(box.sk) + clear1 = box.decrypt(ctxt) + + self.assertEqual(msg, clear1) + clear2 = box2.decrypt(ctxt) + + self.assertEqual(clear1, clear2) + ctxt2 = box2.encrypt(msg) + clear3 = box.decrypt(ctxt2) + self.assertEqual(clear3, msg) +
Add failing test for unicode string encryption
## Code Before: import libnacl.secret # Import python libs import unittest class TestSecret(unittest.TestCase): ''' ''' def test_secret(self): msg = b'But then of course African swallows are not migratory.' box = libnacl.secret.SecretBox() ctxt = box.encrypt(msg) self.assertNotEqual(msg, ctxt) box2 = libnacl.secret.SecretBox(box.sk) clear1 = box.decrypt(ctxt) self.assertEqual(msg, clear1) clear2 = box2.decrypt(ctxt) self.assertEqual(clear1, clear2) ctxt2 = box2.encrypt(msg) clear3 = box.decrypt(ctxt2) self.assertEqual(clear3, msg) ## Instruction: Add failing test for unicode string encryption ## Code After: import libnacl.secret # Import python libs import unittest class TestSecret(unittest.TestCase): ''' ''' def test_secret(self): msg = b'But then of course African swallows are not migratory.' box = libnacl.secret.SecretBox() ctxt = box.encrypt(msg) self.assertNotEqual(msg, ctxt) box2 = libnacl.secret.SecretBox(box.sk) clear1 = box.decrypt(ctxt) self.assertEqual(msg, clear1) clear2 = box2.decrypt(ctxt) self.assertEqual(clear1, clear2) ctxt2 = box2.encrypt(msg) clear3 = box.decrypt(ctxt2) self.assertEqual(clear3, msg) def test_unicode_issues(self): msg = u'Unicode string' box = libnacl.secret.SecretBox() # Encrypting a unicode string (in py2) should # probable assert, but instead it encryptes zeros, # perhaps the high bytes in UCS-16? ctxt = box.encrypt(msg) self.assertNotEqual(msg, ctxt) box2 = libnacl.secret.SecretBox(box.sk) clear1 = box.decrypt(ctxt) self.assertEqual(msg, clear1) clear2 = box2.decrypt(ctxt) self.assertEqual(clear1, clear2) ctxt2 = box2.encrypt(msg) clear3 = box.decrypt(ctxt2) self.assertEqual(clear3, msg)
import libnacl.secret # Import python libs import unittest class TestSecret(unittest.TestCase): ''' ''' def test_secret(self): msg = b'But then of course African swallows are not migratory.' box = libnacl.secret.SecretBox() ctxt = box.encrypt(msg) self.assertNotEqual(msg, ctxt) box2 = libnacl.secret.SecretBox(box.sk) clear1 = box.decrypt(ctxt) self.assertEqual(msg, clear1) clear2 = box2.decrypt(ctxt) self.assertEqual(clear1, clear2) ctxt2 = box2.encrypt(msg) clear3 = box.decrypt(ctxt2) self.assertEqual(clear3, msg) + def test_unicode_issues(self): + msg = u'Unicode string' + box = libnacl.secret.SecretBox() + + # Encrypting a unicode string (in py2) should + # probable assert, but instead it encryptes zeros, + # perhaps the high bytes in UCS-16? + ctxt = box.encrypt(msg) + self.assertNotEqual(msg, ctxt) + + box2 = libnacl.secret.SecretBox(box.sk) + clear1 = box.decrypt(ctxt) + + self.assertEqual(msg, clear1) + clear2 = box2.decrypt(ctxt) + + self.assertEqual(clear1, clear2) + ctxt2 = box2.encrypt(msg) + clear3 = box.decrypt(ctxt2) + self.assertEqual(clear3, msg)
b3f206d9b8cbde42ce2def6d8b9d8c1d90abfeeb
pyexperiment/utils/interactive.py
pyexperiment/utils/interactive.py
from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from pyexperiment import state from pyexperiment import conf def embed_interactive(**kwargs): """Embed an interactive terminal into a running python process """ if not 'state' in kwargs: kwargs['state'] = state if not 'conf' in kwargs: kwargs['conf'] = conf try: import IPython ipython_config = IPython.Config() ipython_config.TerminalInteractiveShell.confirm_exit = False if IPython.__version__ == '1.2.1': IPython.embed(config=ipython_config, banner1='', user_ns=kwargs) else: IPython.embed(config=ipython_config, banner1='', local_ns=kwargs) except ImportError: import readline # pylint: disable=unused-variable import code code.InteractiveConsole(kwargs).interact()
from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from pyexperiment import state from pyexperiment import conf def embed_interactive(**kwargs): """Embed an interactive terminal into a running python process """ if 'state' not in kwargs: kwargs['state'] = state if 'conf' not in kwargs: kwargs['conf'] = conf try: import IPython ipython_config = IPython.Config() ipython_config.TerminalInteractiveShell.confirm_exit = False if IPython.__version__ == '1.2.1': IPython.embed(config=ipython_config, banner1='', user_ns=kwargs) else: IPython.embed(config=ipython_config, banner1='', local_ns=kwargs) except ImportError: import readline # pylint: disable=unused-variable import code code.InteractiveConsole(kwargs).interact()
Fix style: not foo in [] => foo not in
Fix style: not foo in [] => foo not in
Python
mit
duerrp/pyexperiment,kinverarity1/pyexperiment,DeercoderResearch/pyexperiment,shaunstanislaus/pyexperiment,shaunstanislaus/pyexperiment,kinverarity1/pyexperiment,kinverarity1/pyexperiment,duerrp/pyexperiment,DeercoderResearch/pyexperiment,shaunstanislaus/pyexperiment,duerrp/pyexperiment,DeercoderResearch/pyexperiment,DeercoderResearch/pyexperiment,kinverarity1/pyexperiment,shaunstanislaus/pyexperiment
from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from pyexperiment import state from pyexperiment import conf def embed_interactive(**kwargs): """Embed an interactive terminal into a running python process """ - if not 'state' in kwargs: + if 'state' not in kwargs: kwargs['state'] = state - if not 'conf' in kwargs: + if 'conf' not in kwargs: kwargs['conf'] = conf try: import IPython ipython_config = IPython.Config() ipython_config.TerminalInteractiveShell.confirm_exit = False if IPython.__version__ == '1.2.1': IPython.embed(config=ipython_config, banner1='', user_ns=kwargs) else: IPython.embed(config=ipython_config, banner1='', local_ns=kwargs) except ImportError: import readline # pylint: disable=unused-variable import code code.InteractiveConsole(kwargs).interact()
Fix style: not foo in [] => foo not in
## Code Before: from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from pyexperiment import state from pyexperiment import conf def embed_interactive(**kwargs): """Embed an interactive terminal into a running python process """ if not 'state' in kwargs: kwargs['state'] = state if not 'conf' in kwargs: kwargs['conf'] = conf try: import IPython ipython_config = IPython.Config() ipython_config.TerminalInteractiveShell.confirm_exit = False if IPython.__version__ == '1.2.1': IPython.embed(config=ipython_config, banner1='', user_ns=kwargs) else: IPython.embed(config=ipython_config, banner1='', local_ns=kwargs) except ImportError: import readline # pylint: disable=unused-variable import code code.InteractiveConsole(kwargs).interact() ## Instruction: Fix style: not foo in [] => foo not in ## Code After: from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from pyexperiment import state from pyexperiment import conf def embed_interactive(**kwargs): """Embed an interactive terminal into a running python process """ if 'state' not in kwargs: kwargs['state'] = state if 'conf' not in kwargs: kwargs['conf'] = conf try: import IPython ipython_config = IPython.Config() ipython_config.TerminalInteractiveShell.confirm_exit = False if IPython.__version__ == '1.2.1': IPython.embed(config=ipython_config, banner1='', user_ns=kwargs) else: IPython.embed(config=ipython_config, banner1='', local_ns=kwargs) except ImportError: import readline # pylint: disable=unused-variable import code code.InteractiveConsole(kwargs).interact()
from __future__ import print_function from __future__ import unicode_literals from __future__ import division from __future__ import absolute_import from pyexperiment import state from pyexperiment import conf def embed_interactive(**kwargs): """Embed an interactive terminal into a running python process """ - if not 'state' in kwargs: ? ---- + if 'state' not in kwargs: ? ++++ kwargs['state'] = state - if not 'conf' in kwargs: ? ---- + if 'conf' not in kwargs: ? ++++ kwargs['conf'] = conf try: import IPython ipython_config = IPython.Config() ipython_config.TerminalInteractiveShell.confirm_exit = False if IPython.__version__ == '1.2.1': IPython.embed(config=ipython_config, banner1='', user_ns=kwargs) else: IPython.embed(config=ipython_config, banner1='', local_ns=kwargs) except ImportError: import readline # pylint: disable=unused-variable import code code.InteractiveConsole(kwargs).interact()
7e71e21734abb2b12e309ea37910c90f7b837651
go/base/tests/test_decorators.py
go/base/tests/test_decorators.py
"""Test for go.base.decorators.""" from go.vumitools.tests.helpers import djangotest_imports with djangotest_imports(globals()): from go.base.tests.helpers import GoDjangoTestCase from go.base.decorators import render_exception from django.template.response import TemplateResponse class CatchableDummyError(Exception): """Error that will be caught by DummyView.post.""" class UncatchableDummyError(Exception): """Error that will not be caught by DummyView.post.""" class DummyView(object): @render_exception(CatchableDummyError, 400, "Meep.") def post(self, request, err=None): if err is None: return "Success" raise err class TestRenderException(GoDjangoTestCase): def test_no_exception(self): d = DummyView() self.assertEqual(d.post("request"), "Success") def test_expected_exception(self): d = DummyView() self.assertRaises( UncatchableDummyError, d.post, "request", UncatchableDummyError()) def test_other_exception(self): d = DummyView() response = d.post("request", CatchableDummyError("foo")) self.assertTrue(isinstance(response, TemplateResponse)) self.assertEqual(response.template_name, 'error.html') self.assertEqual(response.status_code, 400)
"""Test for go.base.decorators.""" from go.vumitools.tests.helpers import djangotest_imports with djangotest_imports(globals()): from go.base.tests.helpers import GoDjangoTestCase from go.base.decorators import render_exception from django.template.response import TemplateResponse class CatchableDummyError(Exception): """Error that will be caught by DummyView.post.""" class UncatchableDummyError(Exception): """Error that will not be caught by DummyView.post.""" class DummyView(object): @render_exception(CatchableDummyError, 400, "Meep.") def post(self, request, err=None): if err is None: return "Success" raise err class TestRenderException(GoDjangoTestCase): def test_no_exception(self): d = DummyView() self.assertEqual(d.post("request"), "Success") def test_expected_exception(self): d = DummyView() self.assertRaises( UncatchableDummyError, d.post, "request", UncatchableDummyError()) def test_other_exception(self): d = DummyView() response = d.post("request", CatchableDummyError("foo")) self.assertTrue(isinstance(response, TemplateResponse)) self.assertEqual(response.template_name, 'error.html') self.assertEqual(response.status_code, 400)
Move Django-specific pieces into the django_imports block.
Move Django-specific pieces into the django_imports block.
Python
bsd-3-clause
praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go
"""Test for go.base.decorators.""" from go.vumitools.tests.helpers import djangotest_imports with djangotest_imports(globals()): from go.base.tests.helpers import GoDjangoTestCase from go.base.decorators import render_exception from django.template.response import TemplateResponse + class CatchableDummyError(Exception): + """Error that will be caught by DummyView.post.""" - class CatchableDummyError(Exception): + class UncatchableDummyError(Exception): - """Error that will be caught by DummyView.post.""" + """Error that will not be caught by DummyView.post.""" - - class UncatchableDummyError(Exception): - """Error that will not be caught by DummyView.post.""" - - - class DummyView(object): + class DummyView(object): - @render_exception(CatchableDummyError, 400, "Meep.") + @render_exception(CatchableDummyError, 400, "Meep.") - def post(self, request, err=None): + def post(self, request, err=None): - if err is None: + if err is None: - return "Success" + return "Success" - raise err + raise err class TestRenderException(GoDjangoTestCase): def test_no_exception(self): d = DummyView() self.assertEqual(d.post("request"), "Success") def test_expected_exception(self): d = DummyView() self.assertRaises( UncatchableDummyError, d.post, "request", UncatchableDummyError()) def test_other_exception(self): d = DummyView() response = d.post("request", CatchableDummyError("foo")) self.assertTrue(isinstance(response, TemplateResponse)) self.assertEqual(response.template_name, 'error.html') self.assertEqual(response.status_code, 400)
Move Django-specific pieces into the django_imports block.
## Code Before: """Test for go.base.decorators.""" from go.vumitools.tests.helpers import djangotest_imports with djangotest_imports(globals()): from go.base.tests.helpers import GoDjangoTestCase from go.base.decorators import render_exception from django.template.response import TemplateResponse class CatchableDummyError(Exception): """Error that will be caught by DummyView.post.""" class UncatchableDummyError(Exception): """Error that will not be caught by DummyView.post.""" class DummyView(object): @render_exception(CatchableDummyError, 400, "Meep.") def post(self, request, err=None): if err is None: return "Success" raise err class TestRenderException(GoDjangoTestCase): def test_no_exception(self): d = DummyView() self.assertEqual(d.post("request"), "Success") def test_expected_exception(self): d = DummyView() self.assertRaises( UncatchableDummyError, d.post, "request", UncatchableDummyError()) def test_other_exception(self): d = DummyView() response = d.post("request", CatchableDummyError("foo")) self.assertTrue(isinstance(response, TemplateResponse)) self.assertEqual(response.template_name, 'error.html') self.assertEqual(response.status_code, 400) ## Instruction: Move Django-specific pieces into the django_imports block. ## Code After: """Test for go.base.decorators.""" from go.vumitools.tests.helpers import djangotest_imports with djangotest_imports(globals()): from go.base.tests.helpers import GoDjangoTestCase from go.base.decorators import render_exception from django.template.response import TemplateResponse class CatchableDummyError(Exception): """Error that will be caught by DummyView.post.""" class UncatchableDummyError(Exception): """Error that will not be caught by DummyView.post.""" class DummyView(object): @render_exception(CatchableDummyError, 400, "Meep.") def post(self, request, err=None): if err is None: return "Success" raise err class TestRenderException(GoDjangoTestCase): def test_no_exception(self): d = DummyView() self.assertEqual(d.post("request"), "Success") def test_expected_exception(self): d = DummyView() self.assertRaises( UncatchableDummyError, d.post, "request", UncatchableDummyError()) def test_other_exception(self): d = DummyView() response = d.post("request", CatchableDummyError("foo")) self.assertTrue(isinstance(response, TemplateResponse)) self.assertEqual(response.template_name, 'error.html') self.assertEqual(response.status_code, 400)
"""Test for go.base.decorators.""" from go.vumitools.tests.helpers import djangotest_imports with djangotest_imports(globals()): from go.base.tests.helpers import GoDjangoTestCase from go.base.decorators import render_exception from django.template.response import TemplateResponse + class CatchableDummyError(Exception): + """Error that will be caught by DummyView.post.""" - class CatchableDummyError(Exception): ? ^ + class UncatchableDummyError(Exception): ? ++++ ^^^ - """Error that will be caught by DummyView.post.""" + """Error that will not be caught by DummyView.post.""" ? ++++ ++++ - - class UncatchableDummyError(Exception): - """Error that will not be caught by DummyView.post.""" - - - class DummyView(object): + class DummyView(object): ? ++++ - @render_exception(CatchableDummyError, 400, "Meep.") + @render_exception(CatchableDummyError, 400, "Meep.") ? ++++ - def post(self, request, err=None): + def post(self, request, err=None): ? ++++ - if err is None: + if err is None: ? ++++ - return "Success" + return "Success" ? ++++ - raise err + raise err ? ++++ class TestRenderException(GoDjangoTestCase): def test_no_exception(self): d = DummyView() self.assertEqual(d.post("request"), "Success") def test_expected_exception(self): d = DummyView() self.assertRaises( UncatchableDummyError, d.post, "request", UncatchableDummyError()) def test_other_exception(self): d = DummyView() response = d.post("request", CatchableDummyError("foo")) self.assertTrue(isinstance(response, TemplateResponse)) self.assertEqual(response.template_name, 'error.html') self.assertEqual(response.status_code, 400)
c97339e3a121c48ec3eed38e1bf901e2bf1d323c
src/proposals/resources.py
src/proposals/resources.py
from import_export import fields, resources from .models import TalkProposal class TalkProposalResource(resources.ModelResource): name = fields.Field(attribute='submitter__speaker_name') email = fields.Field(attribute='submitter__email') class Meta: model = TalkProposal fields = [ 'id', 'title', 'category', 'python_level', 'duration', 'name', 'email', ] export_order = fields
from import_export import fields, resources from .models import TalkProposal class TalkProposalResource(resources.ModelResource): name = fields.Field(attribute='submitter__speaker_name') email = fields.Field(attribute='submitter__email') class Meta: model = TalkProposal fields = [ 'id', 'title', 'category', 'python_level', 'duration', 'language', 'name', 'email', ] export_order = fields
Add language field to proposal export
Add language field to proposal export
Python
mit
pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016
from import_export import fields, resources from .models import TalkProposal class TalkProposalResource(resources.ModelResource): name = fields.Field(attribute='submitter__speaker_name') email = fields.Field(attribute='submitter__email') class Meta: model = TalkProposal fields = [ 'id', 'title', 'category', 'python_level', 'duration', - 'name', 'email', + 'language', 'name', 'email', ] export_order = fields
Add language field to proposal export
## Code Before: from import_export import fields, resources from .models import TalkProposal class TalkProposalResource(resources.ModelResource): name = fields.Field(attribute='submitter__speaker_name') email = fields.Field(attribute='submitter__email') class Meta: model = TalkProposal fields = [ 'id', 'title', 'category', 'python_level', 'duration', 'name', 'email', ] export_order = fields ## Instruction: Add language field to proposal export ## Code After: from import_export import fields, resources from .models import TalkProposal class TalkProposalResource(resources.ModelResource): name = fields.Field(attribute='submitter__speaker_name') email = fields.Field(attribute='submitter__email') class Meta: model = TalkProposal fields = [ 'id', 'title', 'category', 'python_level', 'duration', 'language', 'name', 'email', ] export_order = fields
from import_export import fields, resources from .models import TalkProposal class TalkProposalResource(resources.ModelResource): name = fields.Field(attribute='submitter__speaker_name') email = fields.Field(attribute='submitter__email') class Meta: model = TalkProposal fields = [ 'id', 'title', 'category', 'python_level', 'duration', - 'name', 'email', + 'language', 'name', 'email', ? ++++++++++++ ] export_order = fields
cd4c268b0752f85f8dadac03e28f152767ce9f54
tinycontent/templatetags/tinycontent_tags.py
tinycontent/templatetags/tinycontent_tags.py
from django import template from django.template.base import TemplateSyntaxError from tinycontent.models import TinyContent register = template.Library() class TinyContentNode(template.Node): def __init__(self, content_name, nodelist): self.content_name = content_name self.nodelist = nodelist def get_content_name(self, context): if self.content_name[0] != '"' and self.content_name[0] != "'": try: return context[self.content_name] except KeyError: raise TinyContent.DoesNotExist if self.content_name[0] == '"' and self.content_name[-1] == '"': return self.content_name[1:-1] if self.content_name[0] == "'" and self.content_name[-1] == "'": return self.content_name[1:-1] raise TemplateSyntaxError("Unclosed argument to tinycontent.") def render(self, context): try: name = self.get_content_name(context) obj = TinyContent.objects.get(name=name) return obj.content except TinyContent.DoesNotExist: return self.nodelist.render(context) @register.tag def tinycontent(parser, token): args = token.split_contents() if len(args) != 2: raise TemplateSyntaxError("'tinycontent' tag takes exactly one" " argument.") content_name = args[1] nodelist = parser.parse(('endtinycontent',)) parser.delete_first_token() return TinyContentNode(content_name, nodelist) @register.simple_tag def tinycontent_simple(name): try: obj = TinyContent.objects.get(name=name) return obj.content except TinyContent.DoesNotExist: return ''
from django import template from django.template.base import TemplateSyntaxError from tinycontent.models import TinyContent register = template.Library() class TinyContentNode(template.Node): def __init__(self, content_name, nodelist): self.content_name = content_name self.nodelist = nodelist def render(self, context): try: name = self.content_name.resolve(context) obj = TinyContent.objects.get(name=name) return obj.content except TinyContent.DoesNotExist: return self.nodelist.render(context) @register.tag def tinycontent(parser, token): args = token.split_contents() if len(args) != 2: raise TemplateSyntaxError("'tinycontent' tag takes exactly one" " argument.") content_name = parser.compile_filter(args[1]) nodelist = parser.parse(('endtinycontent',)) parser.delete_first_token() return TinyContentNode(content_name, nodelist) @register.simple_tag def tinycontent_simple(name): try: obj = TinyContent.objects.get(name=name) return obj.content except TinyContent.DoesNotExist: return ''
Use parser.compile_filter instead of my half-baked attempt
Use parser.compile_filter instead of my half-baked attempt
Python
bsd-3-clause
dominicrodger/django-tinycontent,ad-m/django-tinycontent,watchdogpolska/django-tinycontent,ad-m/django-tinycontent,watchdogpolska/django-tinycontent,dominicrodger/django-tinycontent
from django import template from django.template.base import TemplateSyntaxError from tinycontent.models import TinyContent register = template.Library() class TinyContentNode(template.Node): def __init__(self, content_name, nodelist): self.content_name = content_name self.nodelist = nodelist - def get_content_name(self, context): - if self.content_name[0] != '"' and self.content_name[0] != "'": - try: - return context[self.content_name] - except KeyError: - raise TinyContent.DoesNotExist - - if self.content_name[0] == '"' and self.content_name[-1] == '"': - return self.content_name[1:-1] - - if self.content_name[0] == "'" and self.content_name[-1] == "'": - return self.content_name[1:-1] - - raise TemplateSyntaxError("Unclosed argument to tinycontent.") - def render(self, context): try: - name = self.get_content_name(context) + name = self.content_name.resolve(context) obj = TinyContent.objects.get(name=name) return obj.content except TinyContent.DoesNotExist: return self.nodelist.render(context) @register.tag def tinycontent(parser, token): args = token.split_contents() if len(args) != 2: raise TemplateSyntaxError("'tinycontent' tag takes exactly one" " argument.") - content_name = args[1] + content_name = parser.compile_filter(args[1]) nodelist = parser.parse(('endtinycontent',)) parser.delete_first_token() return TinyContentNode(content_name, nodelist) @register.simple_tag def tinycontent_simple(name): try: obj = TinyContent.objects.get(name=name) return obj.content except TinyContent.DoesNotExist: return ''
Use parser.compile_filter instead of my half-baked attempt
## Code Before: from django import template from django.template.base import TemplateSyntaxError from tinycontent.models import TinyContent register = template.Library() class TinyContentNode(template.Node): def __init__(self, content_name, nodelist): self.content_name = content_name self.nodelist = nodelist def get_content_name(self, context): if self.content_name[0] != '"' and self.content_name[0] != "'": try: return context[self.content_name] except KeyError: raise TinyContent.DoesNotExist if self.content_name[0] == '"' and self.content_name[-1] == '"': return self.content_name[1:-1] if self.content_name[0] == "'" and self.content_name[-1] == "'": return self.content_name[1:-1] raise TemplateSyntaxError("Unclosed argument to tinycontent.") def render(self, context): try: name = self.get_content_name(context) obj = TinyContent.objects.get(name=name) return obj.content except TinyContent.DoesNotExist: return self.nodelist.render(context) @register.tag def tinycontent(parser, token): args = token.split_contents() if len(args) != 2: raise TemplateSyntaxError("'tinycontent' tag takes exactly one" " argument.") content_name = args[1] nodelist = parser.parse(('endtinycontent',)) parser.delete_first_token() return TinyContentNode(content_name, nodelist) @register.simple_tag def tinycontent_simple(name): try: obj = TinyContent.objects.get(name=name) return obj.content except TinyContent.DoesNotExist: return '' ## Instruction: Use parser.compile_filter instead of my half-baked attempt ## Code After: from django import template from django.template.base import TemplateSyntaxError from tinycontent.models import TinyContent register = template.Library() class TinyContentNode(template.Node): def __init__(self, content_name, nodelist): self.content_name = content_name self.nodelist = nodelist def render(self, context): try: name = self.content_name.resolve(context) obj = TinyContent.objects.get(name=name) return obj.content except TinyContent.DoesNotExist: return self.nodelist.render(context) @register.tag def tinycontent(parser, token): args = token.split_contents() if len(args) != 2: raise TemplateSyntaxError("'tinycontent' tag takes exactly one" " argument.") content_name = parser.compile_filter(args[1]) nodelist = parser.parse(('endtinycontent',)) parser.delete_first_token() return TinyContentNode(content_name, nodelist) @register.simple_tag def tinycontent_simple(name): try: obj = TinyContent.objects.get(name=name) return obj.content except TinyContent.DoesNotExist: return ''
from django import template from django.template.base import TemplateSyntaxError from tinycontent.models import TinyContent register = template.Library() class TinyContentNode(template.Node): def __init__(self, content_name, nodelist): self.content_name = content_name self.nodelist = nodelist - def get_content_name(self, context): - if self.content_name[0] != '"' and self.content_name[0] != "'": - try: - return context[self.content_name] - except KeyError: - raise TinyContent.DoesNotExist - - if self.content_name[0] == '"' and self.content_name[-1] == '"': - return self.content_name[1:-1] - - if self.content_name[0] == "'" and self.content_name[-1] == "'": - return self.content_name[1:-1] - - raise TemplateSyntaxError("Unclosed argument to tinycontent.") - def render(self, context): try: - name = self.get_content_name(context) ? ---- + name = self.content_name.resolve(context) ? ++++++++ obj = TinyContent.objects.get(name=name) return obj.content except TinyContent.DoesNotExist: return self.nodelist.render(context) @register.tag def tinycontent(parser, token): args = token.split_contents() if len(args) != 2: raise TemplateSyntaxError("'tinycontent' tag takes exactly one" " argument.") - content_name = args[1] + content_name = parser.compile_filter(args[1]) nodelist = parser.parse(('endtinycontent',)) parser.delete_first_token() return TinyContentNode(content_name, nodelist) @register.simple_tag def tinycontent_simple(name): try: obj = TinyContent.objects.get(name=name) return obj.content except TinyContent.DoesNotExist: return ''
ce59932d485440c592abbacc16c1fc32a7cde6e2
jktest/testcase.py
jktest/testcase.py
import unittest from jktest.config import TestConfig from jktest.jkind import JKind from jktest.results import ResultList class TestCase( unittest.TestCase ): def assertTrue( self, expr, msg = None ): super( TestCase, self ).assertTrue( expr, msg ) class JKTestCase( unittest.TestCase ): # class JKTestCase( TestCase ): def __init__( self, methodName = 'runTest' ): unittest.TestCase.__init__( self, methodName = methodName ) def setUp( self ): self.results = ResultList() self.file = TestConfig().popFile() for arg in TestConfig().nextArg(): self.results.append( JKind( self.file, arg ).run() ) def tearDown( self ): pass def test1( self ): resultsList = self.results.copy() controlList = resultsList.pop() for each in resultsList: ok = ( controlList == each ) if( ok == False ): for jkr in controlList: for line in ( jkr.failures() ): print( line ) self.assertTrue( ok, 'Test File: ' + self.file )
import unittest from jktest.config import TestConfig from jktest.jkind import JKind from jktest.results import ResultList class TestCase( unittest.TestCase ): def assertTrue( self, expr, msg = None ): super( TestCase, self ).assertTrue( expr, msg ) class JKTestCase( unittest.TestCase ): # class JKTestCase( TestCase ): def __init__( self, methodName = 'runTest' ): unittest.TestCase.__init__( self, methodName = methodName ) def setUp( self ): self.results = ResultList() self.file = TestConfig().popFile() # Print test header for nicer output formatting print( '\n**********************************************' ) print( 'BEGIN TEST OF: ' + str( self.file ) ) for arg in TestConfig().nextArg(): self.results.append( JKind( self.file, arg ).run() ) def tearDown( self ): print( '\nEND TEST OF ' + str( self.file ) ) def test_result( self ): resultsList = self.results.copy() controlList = resultsList.pop() for each in resultsList: ok = ( controlList == each ) if( ok == False ): for jkr in controlList: for line in ( jkr.failures() ): print( line ) self.assertTrue( ok, 'Test File: ' + self.file )
Add prints for output formatting
Add prints for output formatting
Python
bsd-3-clause
agacek/jkindRegression,pr-martin/jkindRegression
import unittest from jktest.config import TestConfig from jktest.jkind import JKind from jktest.results import ResultList class TestCase( unittest.TestCase ): def assertTrue( self, expr, msg = None ): super( TestCase, self ).assertTrue( expr, msg ) class JKTestCase( unittest.TestCase ): # class JKTestCase( TestCase ): def __init__( self, methodName = 'runTest' ): unittest.TestCase.__init__( self, methodName = methodName ) def setUp( self ): self.results = ResultList() self.file = TestConfig().popFile() + # Print test header for nicer output formatting + print( '\n**********************************************' ) + print( 'BEGIN TEST OF: ' + str( self.file ) ) + for arg in TestConfig().nextArg(): self.results.append( JKind( self.file, arg ).run() ) def tearDown( self ): - pass + print( '\nEND TEST OF ' + str( self.file ) ) + - def test1( self ): + def test_result( self ): resultsList = self.results.copy() controlList = resultsList.pop() for each in resultsList: ok = ( controlList == each ) if( ok == False ): for jkr in controlList: for line in ( jkr.failures() ): print( line ) self.assertTrue( ok, 'Test File: ' + self.file )
Add prints for output formatting
## Code Before: import unittest from jktest.config import TestConfig from jktest.jkind import JKind from jktest.results import ResultList class TestCase( unittest.TestCase ): def assertTrue( self, expr, msg = None ): super( TestCase, self ).assertTrue( expr, msg ) class JKTestCase( unittest.TestCase ): # class JKTestCase( TestCase ): def __init__( self, methodName = 'runTest' ): unittest.TestCase.__init__( self, methodName = methodName ) def setUp( self ): self.results = ResultList() self.file = TestConfig().popFile() for arg in TestConfig().nextArg(): self.results.append( JKind( self.file, arg ).run() ) def tearDown( self ): pass def test1( self ): resultsList = self.results.copy() controlList = resultsList.pop() for each in resultsList: ok = ( controlList == each ) if( ok == False ): for jkr in controlList: for line in ( jkr.failures() ): print( line ) self.assertTrue( ok, 'Test File: ' + self.file ) ## Instruction: Add prints for output formatting ## Code After: import unittest from jktest.config import TestConfig from jktest.jkind import JKind from jktest.results import ResultList class TestCase( unittest.TestCase ): def assertTrue( self, expr, msg = None ): super( TestCase, self ).assertTrue( expr, msg ) class JKTestCase( unittest.TestCase ): # class JKTestCase( TestCase ): def __init__( self, methodName = 'runTest' ): unittest.TestCase.__init__( self, methodName = methodName ) def setUp( self ): self.results = ResultList() self.file = TestConfig().popFile() # Print test header for nicer output formatting print( '\n**********************************************' ) print( 'BEGIN TEST OF: ' + str( self.file ) ) for arg in TestConfig().nextArg(): self.results.append( JKind( self.file, arg ).run() ) def tearDown( self ): print( '\nEND TEST OF ' + str( self.file ) ) def test_result( self ): resultsList = self.results.copy() controlList = resultsList.pop() for each in resultsList: ok = ( controlList == each ) if( ok == False ): for jkr in controlList: for line in ( jkr.failures() ): print( line ) self.assertTrue( ok, 'Test File: ' + self.file )
import unittest from jktest.config import TestConfig from jktest.jkind import JKind from jktest.results import ResultList class TestCase( unittest.TestCase ): def assertTrue( self, expr, msg = None ): super( TestCase, self ).assertTrue( expr, msg ) class JKTestCase( unittest.TestCase ): # class JKTestCase( TestCase ): def __init__( self, methodName = 'runTest' ): unittest.TestCase.__init__( self, methodName = methodName ) def setUp( self ): self.results = ResultList() self.file = TestConfig().popFile() + # Print test header for nicer output formatting + print( '\n**********************************************' ) + print( 'BEGIN TEST OF: ' + str( self.file ) ) + for arg in TestConfig().nextArg(): self.results.append( JKind( self.file, arg ).run() ) def tearDown( self ): - pass + print( '\nEND TEST OF ' + str( self.file ) ) + - def test1( self ): ? ^ + def test_result( self ): ? ^^^^^^^ resultsList = self.results.copy() controlList = resultsList.pop() for each in resultsList: ok = ( controlList == each ) if( ok == False ): for jkr in controlList: for line in ( jkr.failures() ): print( line ) self.assertTrue( ok, 'Test File: ' + self.file )
c74fc42de3f052ac83342ed33afb2865080c8d67
threema/gateway/__init__.py
threema/gateway/__init__.py
import itertools # noinspection PyUnresolvedReferences from ._gateway import * # noqa # noinspection PyUnresolvedReferences from .exception import * # noqa # noinspection PyUnresolvedReferences from . import bin, simple, e2e, key, util # noqa __author__ = 'Lennart Grahl <[email protected]>' __status__ = 'Production' __version__ = '3.0.0' feature_level = 3 __all__ = tuple(itertools.chain( ('feature_level',), _gateway.__all__, # noqa exception.__all__, # noqa ('bin', 'simple', 'e2e', 'key', 'util') ))
import itertools # noinspection PyUnresolvedReferences from ._gateway import * # noqa # noinspection PyUnresolvedReferences from .exception import * # noqa __author__ = 'Lennart Grahl <[email protected]>' __status__ = 'Production' __version__ = '3.0.0' feature_level = 3 __all__ = tuple(itertools.chain( ('feature_level',), ('bin', 'simple', 'e2e', 'key', 'util'), _gateway.__all__, # noqa exception.__all__, # noqa ))
Remove unneeded imports leading to failures
Remove unneeded imports leading to failures
Python
mit
threema-ch/threema-msgapi-sdk-python,lgrahl/threema-msgapi-sdk-python
import itertools # noinspection PyUnresolvedReferences from ._gateway import * # noqa # noinspection PyUnresolvedReferences from .exception import * # noqa - # noinspection PyUnresolvedReferences - from . import bin, simple, e2e, key, util # noqa __author__ = 'Lennart Grahl <[email protected]>' __status__ = 'Production' __version__ = '3.0.0' feature_level = 3 __all__ = tuple(itertools.chain( ('feature_level',), + ('bin', 'simple', 'e2e', 'key', 'util'), _gateway.__all__, # noqa exception.__all__, # noqa - ('bin', 'simple', 'e2e', 'key', 'util') ))
Remove unneeded imports leading to failures
## Code Before: import itertools # noinspection PyUnresolvedReferences from ._gateway import * # noqa # noinspection PyUnresolvedReferences from .exception import * # noqa # noinspection PyUnresolvedReferences from . import bin, simple, e2e, key, util # noqa __author__ = 'Lennart Grahl <[email protected]>' __status__ = 'Production' __version__ = '3.0.0' feature_level = 3 __all__ = tuple(itertools.chain( ('feature_level',), _gateway.__all__, # noqa exception.__all__, # noqa ('bin', 'simple', 'e2e', 'key', 'util') )) ## Instruction: Remove unneeded imports leading to failures ## Code After: import itertools # noinspection PyUnresolvedReferences from ._gateway import * # noqa # noinspection PyUnresolvedReferences from .exception import * # noqa __author__ = 'Lennart Grahl <[email protected]>' __status__ = 'Production' __version__ = '3.0.0' feature_level = 3 __all__ = tuple(itertools.chain( ('feature_level',), ('bin', 'simple', 'e2e', 'key', 'util'), _gateway.__all__, # noqa exception.__all__, # noqa ))
import itertools # noinspection PyUnresolvedReferences from ._gateway import * # noqa # noinspection PyUnresolvedReferences from .exception import * # noqa - # noinspection PyUnresolvedReferences - from . import bin, simple, e2e, key, util # noqa __author__ = 'Lennart Grahl <[email protected]>' __status__ = 'Production' __version__ = '3.0.0' feature_level = 3 __all__ = tuple(itertools.chain( ('feature_level',), + ('bin', 'simple', 'e2e', 'key', 'util'), _gateway.__all__, # noqa exception.__all__, # noqa - ('bin', 'simple', 'e2e', 'key', 'util') ))
e62469c3572cf9bfa02cd153becc1b36ecf8b3df
run-hooks.py
run-hooks.py
from eve import Eve def piterpy(endpoint, response): for document in response['_items']: document['PITERPY'] = 'IS SO COOL!' app = Eve() app.on_fetched_resource += piterpy if __name__ == '__main__': app.run()
from eve import Eve def codemotion(endpoint, response): for document in response['_items']: document['CODEMOTION'] = 'IS SO FREAKING COOL!' app = Eve() app.on_fetched_resource += codemotion if __name__ == '__main__': app.run()
Prepare for Codemotion Rome 2017 demo
Prepare for Codemotion Rome 2017 demo
Python
bsd-3-clause
nicolaiarocci/eve-demo
from eve import Eve - def piterpy(endpoint, response): + def codemotion(endpoint, response): for document in response['_items']: - document['PITERPY'] = 'IS SO COOL!' + document['CODEMOTION'] = 'IS SO FREAKING COOL!' app = Eve() - app.on_fetched_resource += piterpy + app.on_fetched_resource += codemotion if __name__ == '__main__': app.run()
Prepare for Codemotion Rome 2017 demo
## Code Before: from eve import Eve def piterpy(endpoint, response): for document in response['_items']: document['PITERPY'] = 'IS SO COOL!' app = Eve() app.on_fetched_resource += piterpy if __name__ == '__main__': app.run() ## Instruction: Prepare for Codemotion Rome 2017 demo ## Code After: from eve import Eve def codemotion(endpoint, response): for document in response['_items']: document['CODEMOTION'] = 'IS SO FREAKING COOL!' app = Eve() app.on_fetched_resource += codemotion if __name__ == '__main__': app.run()
from eve import Eve - def piterpy(endpoint, response): ? ^ ^^^^^ + def codemotion(endpoint, response): ? ^^^^^^^ ^^ for document in response['_items']: - document['PITERPY'] = 'IS SO COOL!' ? ^ ^^^^^ + document['CODEMOTION'] = 'IS SO FREAKING COOL!' ? ^^^^^^^ ^^ +++++++++ app = Eve() - app.on_fetched_resource += piterpy ? ^ ^^^^^ + app.on_fetched_resource += codemotion ? ^^^^^^^ ^^ if __name__ == '__main__': app.run()
924766a6b56aba3a462600a70e5f4b7b322c677e
test/test_utils.py
test/test_utils.py
from piper.utils import DotDict from piper.utils import dynamic_load import pytest class TestDotDict(object): def test_get_nonexistant_raises_keyerror(self): with pytest.raises(KeyError): dd = DotDict({}) dd.does_not_exist def test_get_item(self): dd = DotDict({'danger': 'zone'}) assert dd.danger == 'zone' def test_get_item_dict_access(self): dd = DotDict({'danger': 'zone'}) assert dd['danger'] == 'zone' def test_dict_items_become_dotdicts(self): dd = DotDict({'highway': {'danger': 'zone'}}) assert isinstance(dd.highway, DotDict) is True def test_nested_access(self): dd = DotDict({'highway': {'danger': {'zone': True}}}) assert dd.highway.danger.zone is True class TestDynamicLoad(object): def test_proper_load(self): cls = dynamic_load('piper.utils.DotDict') assert cls is DotDict def test_nonexistant_target(self): with pytest.raises(ImportError): dynamic_load('gammaray.empire.Avalon')
from piper.utils import DotDict from piper.utils import dynamic_load import pytest class TestDotDict(object): def test_get_nonexistant_raises_keyerror(self): with pytest.raises(KeyError): dd = DotDict({}) dd.does_not_exist def test_get_item(self): dd = DotDict({'danger': 'zone'}) assert dd.danger == 'zone' def test_get_item_dict_access(self): dd = DotDict({'danger': 'zone'}) assert dd['danger'] == 'zone' def test_dict_items_become_dotdicts(self): dd = DotDict({'highway': {'danger': 'zone'}}) assert isinstance(dd.highway, DotDict) is True def test_dict_items_become_dotdicts_when_using_dict_access(self): dd = DotDict({'highway': {'danger': 'zone'}}) assert isinstance(dd['highway'], DotDict) is True def test_nested_access(self): dd = DotDict({'highway': {'danger': {'zone': True}}}) assert dd.highway.danger.zone is True class TestDynamicLoad(object): def test_proper_load(self): cls = dynamic_load('piper.utils.DotDict') assert cls is DotDict def test_nonexistant_target(self): with pytest.raises(ImportError): dynamic_load('gammaray.empire.Avalon')
Add extra DotDict subscriptability test
Add extra DotDict subscriptability test
Python
mit
thiderman/piper
from piper.utils import DotDict from piper.utils import dynamic_load import pytest class TestDotDict(object): def test_get_nonexistant_raises_keyerror(self): with pytest.raises(KeyError): dd = DotDict({}) dd.does_not_exist def test_get_item(self): dd = DotDict({'danger': 'zone'}) assert dd.danger == 'zone' def test_get_item_dict_access(self): dd = DotDict({'danger': 'zone'}) assert dd['danger'] == 'zone' def test_dict_items_become_dotdicts(self): dd = DotDict({'highway': {'danger': 'zone'}}) assert isinstance(dd.highway, DotDict) is True + def test_dict_items_become_dotdicts_when_using_dict_access(self): + dd = DotDict({'highway': {'danger': 'zone'}}) + assert isinstance(dd['highway'], DotDict) is True + def test_nested_access(self): dd = DotDict({'highway': {'danger': {'zone': True}}}) assert dd.highway.danger.zone is True class TestDynamicLoad(object): def test_proper_load(self): cls = dynamic_load('piper.utils.DotDict') assert cls is DotDict def test_nonexistant_target(self): with pytest.raises(ImportError): dynamic_load('gammaray.empire.Avalon')
Add extra DotDict subscriptability test
## Code Before: from piper.utils import DotDict from piper.utils import dynamic_load import pytest class TestDotDict(object): def test_get_nonexistant_raises_keyerror(self): with pytest.raises(KeyError): dd = DotDict({}) dd.does_not_exist def test_get_item(self): dd = DotDict({'danger': 'zone'}) assert dd.danger == 'zone' def test_get_item_dict_access(self): dd = DotDict({'danger': 'zone'}) assert dd['danger'] == 'zone' def test_dict_items_become_dotdicts(self): dd = DotDict({'highway': {'danger': 'zone'}}) assert isinstance(dd.highway, DotDict) is True def test_nested_access(self): dd = DotDict({'highway': {'danger': {'zone': True}}}) assert dd.highway.danger.zone is True class TestDynamicLoad(object): def test_proper_load(self): cls = dynamic_load('piper.utils.DotDict') assert cls is DotDict def test_nonexistant_target(self): with pytest.raises(ImportError): dynamic_load('gammaray.empire.Avalon') ## Instruction: Add extra DotDict subscriptability test ## Code After: from piper.utils import DotDict from piper.utils import dynamic_load import pytest class TestDotDict(object): def test_get_nonexistant_raises_keyerror(self): with pytest.raises(KeyError): dd = DotDict({}) dd.does_not_exist def test_get_item(self): dd = DotDict({'danger': 'zone'}) assert dd.danger == 'zone' def test_get_item_dict_access(self): dd = DotDict({'danger': 'zone'}) assert dd['danger'] == 'zone' def test_dict_items_become_dotdicts(self): dd = DotDict({'highway': {'danger': 'zone'}}) assert isinstance(dd.highway, DotDict) is True def test_dict_items_become_dotdicts_when_using_dict_access(self): dd = DotDict({'highway': {'danger': 'zone'}}) assert isinstance(dd['highway'], DotDict) is True def test_nested_access(self): dd = DotDict({'highway': {'danger': {'zone': True}}}) assert dd.highway.danger.zone is True class TestDynamicLoad(object): def test_proper_load(self): cls = dynamic_load('piper.utils.DotDict') assert cls is DotDict def test_nonexistant_target(self): with pytest.raises(ImportError): dynamic_load('gammaray.empire.Avalon')
from piper.utils import DotDict from piper.utils import dynamic_load import pytest class TestDotDict(object): def test_get_nonexistant_raises_keyerror(self): with pytest.raises(KeyError): dd = DotDict({}) dd.does_not_exist def test_get_item(self): dd = DotDict({'danger': 'zone'}) assert dd.danger == 'zone' def test_get_item_dict_access(self): dd = DotDict({'danger': 'zone'}) assert dd['danger'] == 'zone' def test_dict_items_become_dotdicts(self): dd = DotDict({'highway': {'danger': 'zone'}}) assert isinstance(dd.highway, DotDict) is True + def test_dict_items_become_dotdicts_when_using_dict_access(self): + dd = DotDict({'highway': {'danger': 'zone'}}) + assert isinstance(dd['highway'], DotDict) is True + def test_nested_access(self): dd = DotDict({'highway': {'danger': {'zone': True}}}) assert dd.highway.danger.zone is True class TestDynamicLoad(object): def test_proper_load(self): cls = dynamic_load('piper.utils.DotDict') assert cls is DotDict def test_nonexistant_target(self): with pytest.raises(ImportError): dynamic_load('gammaray.empire.Avalon')
484233e1c3140e7cca9cd1874c1cf984280e2c92
zeus/tasks/send_build_notifications.py
zeus/tasks/send_build_notifications.py
from uuid import UUID from zeus import auth from zeus.config import celery from zeus.constants import Result, Status from zeus.models import Build from zeus.notifications import email @celery.task(name='zeus.tasks.send_build_notifications', max_retries=None) def send_build_notifications(build_id: UUID): build = Build.query.get(build_id) if not build: raise ValueError('Unable to find build with id = {}'.format(build_id)) auth.set_current_tenant(auth.Tenant( repository_ids=[build.repository_id])) # double check that the build is still finished and only send when # its failing if build.result != Result.failed or build.status != Status.finished: return email.send_email_notification(build=build)
from uuid import UUID from zeus import auth from zeus.config import celery from zeus.constants import Result, Status from zeus.models import Build from zeus.notifications import email @celery.task(name='zeus.tasks.send_build_notifications', max_retries=None) def send_build_notifications(build_id: UUID): build = Build.query.unrestricted_unsafe().get(build_id) if not build: raise ValueError('Unable to find build with id = {}'.format(build_id)) auth.set_current_tenant(auth.Tenant( repository_ids=[build.repository_id])) # double check that the build is still finished and only send when # its failing if build.result != Result.failed or build.status != Status.finished: return email.send_email_notification(build=build)
Remove tenant req from task query
Remove tenant req from task query
Python
apache-2.0
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
from uuid import UUID from zeus import auth from zeus.config import celery from zeus.constants import Result, Status from zeus.models import Build from zeus.notifications import email @celery.task(name='zeus.tasks.send_build_notifications', max_retries=None) def send_build_notifications(build_id: UUID): - build = Build.query.get(build_id) + build = Build.query.unrestricted_unsafe().get(build_id) if not build: raise ValueError('Unable to find build with id = {}'.format(build_id)) auth.set_current_tenant(auth.Tenant( repository_ids=[build.repository_id])) # double check that the build is still finished and only send when # its failing if build.result != Result.failed or build.status != Status.finished: return email.send_email_notification(build=build)
Remove tenant req from task query
## Code Before: from uuid import UUID from zeus import auth from zeus.config import celery from zeus.constants import Result, Status from zeus.models import Build from zeus.notifications import email @celery.task(name='zeus.tasks.send_build_notifications', max_retries=None) def send_build_notifications(build_id: UUID): build = Build.query.get(build_id) if not build: raise ValueError('Unable to find build with id = {}'.format(build_id)) auth.set_current_tenant(auth.Tenant( repository_ids=[build.repository_id])) # double check that the build is still finished and only send when # its failing if build.result != Result.failed or build.status != Status.finished: return email.send_email_notification(build=build) ## Instruction: Remove tenant req from task query ## Code After: from uuid import UUID from zeus import auth from zeus.config import celery from zeus.constants import Result, Status from zeus.models import Build from zeus.notifications import email @celery.task(name='zeus.tasks.send_build_notifications', max_retries=None) def send_build_notifications(build_id: UUID): build = Build.query.unrestricted_unsafe().get(build_id) if not build: raise ValueError('Unable to find build with id = {}'.format(build_id)) auth.set_current_tenant(auth.Tenant( repository_ids=[build.repository_id])) # double check that the build is still finished and only send when # its failing if build.result != Result.failed or build.status != Status.finished: return email.send_email_notification(build=build)
from uuid import UUID from zeus import auth from zeus.config import celery from zeus.constants import Result, Status from zeus.models import Build from zeus.notifications import email @celery.task(name='zeus.tasks.send_build_notifications', max_retries=None) def send_build_notifications(build_id: UUID): - build = Build.query.get(build_id) + build = Build.query.unrestricted_unsafe().get(build_id) ? ++++++++++++++++++++++ if not build: raise ValueError('Unable to find build with id = {}'.format(build_id)) auth.set_current_tenant(auth.Tenant( repository_ids=[build.repository_id])) # double check that the build is still finished and only send when # its failing if build.result != Result.failed or build.status != Status.finished: return email.send_email_notification(build=build)
a426a460555e17d6969444f6dea2ef4e131d6eaf
iatidataquality/registry.py
iatidataquality/registry.py
from flask import Flask, render_template, flash, request, Markup, \ session, redirect, url_for, escape, Response, abort, send_file from flask.ext.sqlalchemy import SQLAlchemy from sqlalchemy import func from datetime import datetime from iatidataquality import app from iatidataquality import db from iatidq import dqdownload, dqregistry, dqindicators, dqorganisations, dqpackages import usermanagement @app.route("/registry/refresh/") @usermanagement.perms_required() def registry_refresh(): dqregistry.refresh_packages() return "Refreshed" @app.route("/registry/download/") @usermanagement.perms_required() def registry_download(): dqdownload.run() return "Downloading"
from flask import Flask, render_template, flash, request, Markup, \ session, redirect, url_for, escape, Response, abort, send_file from flask.ext.sqlalchemy import SQLAlchemy from sqlalchemy import func from datetime import datetime from iatidataquality import app from iatidataquality import db from iatidq import dqdownload, dqregistry, dqindicators, dqorganisations, dqpackages import usermanagement @app.route("/registry/refresh/") @usermanagement.perms_required() def registry_refresh(): dqregistry.refresh_packages() return "Refreshed" @app.route("/registry/download/") @usermanagement.perms_required() def registry_download(): dqdownload.run() return "Downloading" @app.route("/registry/deleted/") @usermanagement.perms_required() def registry_deleted(): num_deleted = dqregistry.check_deleted_packages() if num_deleted >0: msg = '%s packages were set to deleted' % num_deleted else: msg = "No packages were set to deleted" flash(msg, '') return redirect(url_for('packages_manage'))
Add URL for checking for deleted packages
Add URL for checking for deleted packages
Python
agpl-3.0
pwyf/IATI-Data-Quality,pwyf/IATI-Data-Quality,pwyf/IATI-Data-Quality,pwyf/IATI-Data-Quality
from flask import Flask, render_template, flash, request, Markup, \ session, redirect, url_for, escape, Response, abort, send_file from flask.ext.sqlalchemy import SQLAlchemy from sqlalchemy import func from datetime import datetime from iatidataquality import app from iatidataquality import db from iatidq import dqdownload, dqregistry, dqindicators, dqorganisations, dqpackages import usermanagement @app.route("/registry/refresh/") @usermanagement.perms_required() def registry_refresh(): dqregistry.refresh_packages() return "Refreshed" @app.route("/registry/download/") @usermanagement.perms_required() def registry_download(): dqdownload.run() return "Downloading" + @app.route("/registry/deleted/") + @usermanagement.perms_required() + def registry_deleted(): + num_deleted = dqregistry.check_deleted_packages() + if num_deleted >0: + msg = '%s packages were set to deleted' % num_deleted + else: + msg = "No packages were set to deleted" + + flash(msg, '') + return redirect(url_for('packages_manage')) +
Add URL for checking for deleted packages
## Code Before: from flask import Flask, render_template, flash, request, Markup, \ session, redirect, url_for, escape, Response, abort, send_file from flask.ext.sqlalchemy import SQLAlchemy from sqlalchemy import func from datetime import datetime from iatidataquality import app from iatidataquality import db from iatidq import dqdownload, dqregistry, dqindicators, dqorganisations, dqpackages import usermanagement @app.route("/registry/refresh/") @usermanagement.perms_required() def registry_refresh(): dqregistry.refresh_packages() return "Refreshed" @app.route("/registry/download/") @usermanagement.perms_required() def registry_download(): dqdownload.run() return "Downloading" ## Instruction: Add URL for checking for deleted packages ## Code After: from flask import Flask, render_template, flash, request, Markup, \ session, redirect, url_for, escape, Response, abort, send_file from flask.ext.sqlalchemy import SQLAlchemy from sqlalchemy import func from datetime import datetime from iatidataquality import app from iatidataquality import db from iatidq import dqdownload, dqregistry, dqindicators, dqorganisations, dqpackages import usermanagement @app.route("/registry/refresh/") @usermanagement.perms_required() def registry_refresh(): dqregistry.refresh_packages() return "Refreshed" @app.route("/registry/download/") @usermanagement.perms_required() def registry_download(): dqdownload.run() return "Downloading" @app.route("/registry/deleted/") @usermanagement.perms_required() def registry_deleted(): num_deleted = dqregistry.check_deleted_packages() if num_deleted >0: msg = '%s packages were set to deleted' % num_deleted else: msg = "No packages were set to deleted" flash(msg, '') return redirect(url_for('packages_manage'))
from flask import Flask, render_template, flash, request, Markup, \ session, redirect, url_for, escape, Response, abort, send_file from flask.ext.sqlalchemy import SQLAlchemy from sqlalchemy import func from datetime import datetime from iatidataquality import app from iatidataquality import db from iatidq import dqdownload, dqregistry, dqindicators, dqorganisations, dqpackages import usermanagement @app.route("/registry/refresh/") @usermanagement.perms_required() def registry_refresh(): dqregistry.refresh_packages() return "Refreshed" @app.route("/registry/download/") @usermanagement.perms_required() def registry_download(): dqdownload.run() return "Downloading" + + @app.route("/registry/deleted/") + @usermanagement.perms_required() + def registry_deleted(): + num_deleted = dqregistry.check_deleted_packages() + if num_deleted >0: + msg = '%s packages were set to deleted' % num_deleted + else: + msg = "No packages were set to deleted" + + flash(msg, '') + return redirect(url_for('packages_manage'))
984c395e3f43764a4d8125aea7556179bb4766dd
test/_mysqldb_test.py
test/_mysqldb_test.py
''' $ mysql Welcome to the MySQL monitor. Commands end with ; or \g. Your MySQL connection id is 211 Server version: 5.6.15 Homebrew Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved. Oracle is a registered trademark of Oracle Corporation and/or its affiliates. Other names may be trademarks of their respective owners. Type 'help;' or '\h' for help. Type '\c' to clear the current input statement. mysql> create database luigi; Query OK, 1 row affected (0.00 sec) ''' import mysql.connector from luigi.contrib.mysqldb import MySqlTarget import unittest host = 'localhost' port = 3306 database = 'luigi_test' username = None password = None table_updates = 'table_updates' def _create_test_database(): con = mysql.connector.connect(user=username, password=password, host=host, port=port, autocommit=True) con.cursor().execute('CREATE DATABASE IF NOT EXISTS %s' % database) _create_test_database() target = MySqlTarget(host, database, username, password, '', 'update_id') class MySqlTargetTest(unittest.TestCase): def test_touch_and_exists(self): drop() self.assertFalse(target.exists(), 'Target should not exist before touching it') target.touch() self.assertTrue(target.exists(), 'Target should exist after touching it') def drop(): con = target.connect(autocommit=True) con.cursor().execute('DROP TABLE IF EXISTS %s' % table_updates)
import mysql.connector from luigi.contrib.mysqldb import MySqlTarget import unittest host = 'localhost' port = 3306 database = 'luigi_test' username = None password = None table_updates = 'table_updates' def _create_test_database(): con = mysql.connector.connect(user=username, password=password, host=host, port=port, autocommit=True) con.cursor().execute('CREATE DATABASE IF NOT EXISTS %s' % database) _create_test_database() target = MySqlTarget(host, database, username, password, '', 'update_id') class MySqlTargetTest(unittest.TestCase): def test_touch_and_exists(self): drop() self.assertFalse(target.exists(), 'Target should not exist before touching it') target.touch() self.assertTrue(target.exists(), 'Target should exist after touching it') def drop(): con = target.connect(autocommit=True) con.cursor().execute('DROP TABLE IF EXISTS %s' % table_updates)
Remove the doc that describes the setup. Setup is automated now
Remove the doc that describes the setup. Setup is automated now
Python
apache-2.0
moritzschaefer/luigi,kalaidin/luigi,riga/luigi,foursquare/luigi,dylanjbarth/luigi,Dawny33/luigi,harveyxia/luigi,graingert/luigi,slvnperron/luigi,harveyxia/luigi,sahitya-pavurala/luigi,hadesbox/luigi,rayrrr/luigi,humanlongevity/luigi,Tarrasch/luigi,Magnetic/luigi,percyfal/luigi,torypages/luigi,stroykova/luigi,theoryno3/luigi,leafjungle/luigi,slvnperron/luigi,vine/luigi,Houzz/luigi,SeedScientific/luigi,stephenpascoe/luigi,fw1121/luigi,LamCiuLoeng/luigi,ivannotes/luigi,kalaidin/luigi,dhruvg/luigi,meyerson/luigi,ViaSat/luigi,jw0201/luigi,anyman/luigi,bowlofstew/luigi,kalaidin/luigi,Yoone/luigi,stephenpascoe/luigi,SkyTruth/luigi,dlstadther/luigi,dstandish/luigi,ZhenxingWu/luigi,theoryno3/luigi,Dawny33/luigi,PeteW/luigi,ZhenxingWu/luigi,mbruggmann/luigi,aeron15/luigi,foursquare/luigi,penelopy/luigi,laserson/luigi,h3biomed/luigi,wakamori/luigi,adaitche/luigi,kevhill/luigi,ContextLogic/luigi,slvnperron/luigi,dstandish/luigi,samepage-labs/luigi,huiyi1990/luigi,rayrrr/luigi,meyerson/luigi,ContextLogic/luigi,bmaggard/luigi,penelopy/luigi,torypages/luigi,slvnperron/luigi,hellais/luigi,linearregression/luigi,ehdr/luigi,springcoil/luigi,neilisaac/luigi,lungetech/luigi,spotify/luigi,Tarrasch/luigi,PeteW/luigi,dstandish/luigi,javrasya/luigi,dhruvg/luigi,torypages/luigi,wakamori/luigi,jw0201/luigi,rayrrr/luigi,springcoil/luigi,lungetech/luigi,laserson/luigi,h3biomed/luigi,realgo/luigi,mfcabrera/luigi,jw0201/luigi,joeshaw/luigi,oldpa/luigi,soxofaan/luigi,republic-analytics/luigi,ChrisBeaumont/luigi,rizzatti/luigi,graingert/luigi,linsomniac/luigi,penelopy/luigi,casey-green/luigi,DomainGroupOSS/luigi,neilisaac/luigi,altaf-ali/luigi,belevtsoff/luigi,gpoulin/luigi,edx/luigi,laserson/luigi,anyman/luigi,altaf-ali/luigi,DomainGroupOSS/luigi,dhruvg/luigi,adaitche/luigi,PeteW/luigi,huiyi1990/luigi,walkers-mv/luigi,dkroy/luigi,alkemics/luigi,JackDanger/luigi,moandcompany/luigi,altaf-ali/luigi,dlstadther/luigi,jamesmcm/luigi,tuulos/luigi,edx/luigi,vine/luigi,walkers-mv/luigi,anyman/luigi,linearregression/luigi,jamesmcm/luigi,glenndmello/luigi,JackDanger/luigi,moritzschaefer/luigi,thejens/luigi,soxofaan/luigi,kalaidin/luigi,stroykova/luigi,neilisaac/luigi,dkroy/luigi,ViaSat/luigi,17zuoye/luigi,anyman/luigi,ehdr/luigi,samuell/luigi,samuell/luigi,graingert/luigi,vine/luigi,belevtsoff/luigi,SkyTruth/luigi,upworthy/luigi,upworthy/luigi,lichia/luigi,joeshaw/luigi,fabriziodemaria/luigi,neilisaac/luigi,humanlongevity/luigi,wakamori/luigi,percyfal/luigi,Dawny33/luigi,lichia/luigi,ViaSat/luigi,hadesbox/luigi,dylanjbarth/luigi,17zuoye/luigi,Houzz/luigi,PeteW/luigi,joeshaw/luigi,samepage-labs/luigi,meyerson/luigi,ChrisBeaumont/luigi,ehdr/luigi,LamCiuLoeng/luigi,ZhenxingWu/luigi,dylanjbarth/luigi,Magnetic/luigi,theoryno3/luigi,samepage-labs/luigi,dhruvg/luigi,theoryno3/luigi,mfcabrera/luigi,kevhill/luigi,h3biomed/luigi,DomainGroupOSS/luigi,17zuoye/luigi,fabriziodemaria/luigi,leafjungle/luigi,drincruz/luigi,javrasya/luigi,LamCiuLoeng/luigi,mbruggmann/luigi,cpcloud/luigi,dstandish/luigi,lichia/luigi,ViaSat/luigi,fw1121/luigi,dylanjbarth/luigi,hadesbox/luigi,bmaggard/luigi,ivannotes/luigi,oldpa/luigi,Tarrasch/luigi,lichia/luigi,soxofaan/luigi,tuulos/luigi,linsomniac/luigi,realgo/luigi,qpxu007/luigi,qpxu007/luigi,bmaggard/luigi,moritzschaefer/luigi,graingert/luigi,springcoil/luigi,foursquare/luigi,soxofaan/luigi,riga/luigi,springcoil/luigi,spotify/luigi,realgo/luigi,upworthy/luigi,ivannotes/luigi,SeedScientific/luigi,percyfal/luigi,republic-analytics/luigi,pkexcellent/luigi,ehdr/luigi,walkers-mv/luigi,thejens/luigi,humanlongevity/luigi,rizzatti/luigi,ChrisBeaumont/luigi,ThQ/luigi,drincruz/luigi,fabriziodemaria/luigi,samepage-labs/luigi,glenndmello/luigi,vine/luigi,republic-analytics/luigi,ContextLogic/luigi,mfcabrera/luigi,republic-analytics/luigi,jamesmcm/luigi,Wattpad/luigi,mbruggmann/luigi,ThQ/luigi,rizzatti/luigi,bowlofstew/luigi,hadesbox/luigi,linsomniac/luigi,stephenpascoe/luigi,dlstadther/luigi,belevtsoff/luigi,stroykova/luigi,upworthy/luigi,stroykova/luigi,moritzschaefer/luigi,casey-green/luigi,leafjungle/luigi,SkyTruth/luigi,casey-green/luigi,dlstadther/luigi,JackDanger/luigi,moandcompany/luigi,moandcompany/luigi,Houzz/luigi,ivannotes/luigi,lungetech/luigi,javrasya/luigi,Tarrasch/luigi,mfcabrera/luigi,DomainGroupOSS/luigi,spotify/luigi,humanlongevity/luigi,wakamori/luigi,linearregression/luigi,qpxu007/luigi,moandcompany/luigi,hellais/luigi,17zuoye/luigi,bowlofstew/luigi,riga/luigi,pkexcellent/luigi,altaf-ali/luigi,harveyxia/luigi,riga/luigi,h3biomed/luigi,samuell/luigi,ChrisBeaumont/luigi,kevhill/luigi,rayrrr/luigi,LamCiuLoeng/luigi,penelopy/luigi,sahitya-pavurala/luigi,aeron15/luigi,samuell/luigi,rizzatti/luigi,jw0201/luigi,SkyTruth/luigi,alkemics/luigi,Dawny33/luigi,bmaggard/luigi,linearregression/luigi,alkemics/luigi,SeedScientific/luigi,ThQ/luigi,sahitya-pavurala/luigi,jamesmcm/luigi,dkroy/luigi,oldpa/luigi,lungetech/luigi,Wattpad/luigi,dkroy/luigi,aeron15/luigi,gpoulin/luigi,thejens/luigi,Magnetic/luigi,JackDanger/luigi,belevtsoff/luigi,sahitya-pavurala/luigi,pkexcellent/luigi,casey-green/luigi,walkers-mv/luigi,Yoone/luigi,glenndmello/luigi,Magnetic/luigi,qpxu007/luigi,drincruz/luigi,Wattpad/luigi,hellais/luigi,ZhenxingWu/luigi,glenndmello/luigi,huiyi1990/luigi,linsomniac/luigi,edx/luigi,aeron15/luigi,adaitche/luigi,fw1121/luigi,thejens/luigi,meyerson/luigi,fw1121/luigi,SeedScientific/luigi,kevhill/luigi,fabriziodemaria/luigi,harveyxia/luigi,foursquare/luigi,percyfal/luigi,laserson/luigi,leafjungle/luigi,tuulos/luigi,drincruz/luigi,bowlofstew/luigi,oldpa/luigi,spotify/luigi,tuulos/luigi,joeshaw/luigi,gpoulin/luigi,pkexcellent/luigi,gpoulin/luigi,Yoone/luigi,adaitche/luigi,huiyi1990/luigi,ThQ/luigi,torypages/luigi,hellais/luigi,stephenpascoe/luigi,edx/luigi,cpcloud/luigi,ContextLogic/luigi,javrasya/luigi,realgo/luigi,mbruggmann/luigi,Houzz/luigi,alkemics/luigi,Yoone/luigi
- ''' - $ mysql - Welcome to the MySQL monitor. Commands end with ; or \g. - Your MySQL connection id is 211 - Server version: 5.6.15 Homebrew - - Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved. - - Oracle is a registered trademark of Oracle Corporation and/or its - affiliates. Other names may be trademarks of their respective - owners. - - Type 'help;' or '\h' for help. Type '\c' to clear the current input statement. - - mysql> create database luigi; - Query OK, 1 row affected (0.00 sec) - ''' - import mysql.connector from luigi.contrib.mysqldb import MySqlTarget import unittest host = 'localhost' port = 3306 database = 'luigi_test' username = None password = None table_updates = 'table_updates' def _create_test_database(): con = mysql.connector.connect(user=username, password=password, host=host, port=port, autocommit=True) con.cursor().execute('CREATE DATABASE IF NOT EXISTS %s' % database) _create_test_database() target = MySqlTarget(host, database, username, password, '', 'update_id') class MySqlTargetTest(unittest.TestCase): def test_touch_and_exists(self): drop() self.assertFalse(target.exists(), 'Target should not exist before touching it') target.touch() self.assertTrue(target.exists(), 'Target should exist after touching it') def drop(): con = target.connect(autocommit=True) con.cursor().execute('DROP TABLE IF EXISTS %s' % table_updates)
Remove the doc that describes the setup. Setup is automated now
## Code Before: ''' $ mysql Welcome to the MySQL monitor. Commands end with ; or \g. Your MySQL connection id is 211 Server version: 5.6.15 Homebrew Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved. Oracle is a registered trademark of Oracle Corporation and/or its affiliates. Other names may be trademarks of their respective owners. Type 'help;' or '\h' for help. Type '\c' to clear the current input statement. mysql> create database luigi; Query OK, 1 row affected (0.00 sec) ''' import mysql.connector from luigi.contrib.mysqldb import MySqlTarget import unittest host = 'localhost' port = 3306 database = 'luigi_test' username = None password = None table_updates = 'table_updates' def _create_test_database(): con = mysql.connector.connect(user=username, password=password, host=host, port=port, autocommit=True) con.cursor().execute('CREATE DATABASE IF NOT EXISTS %s' % database) _create_test_database() target = MySqlTarget(host, database, username, password, '', 'update_id') class MySqlTargetTest(unittest.TestCase): def test_touch_and_exists(self): drop() self.assertFalse(target.exists(), 'Target should not exist before touching it') target.touch() self.assertTrue(target.exists(), 'Target should exist after touching it') def drop(): con = target.connect(autocommit=True) con.cursor().execute('DROP TABLE IF EXISTS %s' % table_updates) ## Instruction: Remove the doc that describes the setup. Setup is automated now ## Code After: import mysql.connector from luigi.contrib.mysqldb import MySqlTarget import unittest host = 'localhost' port = 3306 database = 'luigi_test' username = None password = None table_updates = 'table_updates' def _create_test_database(): con = mysql.connector.connect(user=username, password=password, host=host, port=port, autocommit=True) con.cursor().execute('CREATE DATABASE IF NOT EXISTS %s' % database) _create_test_database() target = MySqlTarget(host, database, username, password, '', 'update_id') class MySqlTargetTest(unittest.TestCase): def test_touch_and_exists(self): drop() self.assertFalse(target.exists(), 'Target should not exist before touching it') target.touch() self.assertTrue(target.exists(), 'Target should exist after touching it') def drop(): con = target.connect(autocommit=True) con.cursor().execute('DROP TABLE IF EXISTS %s' % table_updates)
- ''' - $ mysql - Welcome to the MySQL monitor. Commands end with ; or \g. - Your MySQL connection id is 211 - Server version: 5.6.15 Homebrew - - Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved. - - Oracle is a registered trademark of Oracle Corporation and/or its - affiliates. Other names may be trademarks of their respective - owners. - - Type 'help;' or '\h' for help. Type '\c' to clear the current input statement. - - mysql> create database luigi; - Query OK, 1 row affected (0.00 sec) - ''' - import mysql.connector from luigi.contrib.mysqldb import MySqlTarget import unittest host = 'localhost' port = 3306 database = 'luigi_test' username = None password = None table_updates = 'table_updates' def _create_test_database(): con = mysql.connector.connect(user=username, password=password, host=host, port=port, autocommit=True) con.cursor().execute('CREATE DATABASE IF NOT EXISTS %s' % database) _create_test_database() target = MySqlTarget(host, database, username, password, '', 'update_id') class MySqlTargetTest(unittest.TestCase): def test_touch_and_exists(self): drop() self.assertFalse(target.exists(), 'Target should not exist before touching it') target.touch() self.assertTrue(target.exists(), 'Target should exist after touching it') def drop(): con = target.connect(autocommit=True) con.cursor().execute('DROP TABLE IF EXISTS %s' % table_updates)
17bd35d7a2b442faebdb39aad07294612d8e7038
nflh/games.py
nflh/games.py
from datetime import datetime GAME_VIDEO_BASE_URL = "http://www.nfl.com/feeds-rs/videos/byGameCenter/{0}.json" LIVE_UPDATE_BASE_URL = "http://www.nfl.com/liveupdate/game-center/{0}/{0}_gtd.json" class Game(object): def __init__(self, id_, h, v): self.id_ = id_ self.date = self.id_[:-2] self.home = h self.vis = v self.latest_play_id = "" self.latest_clip_id = "" def is_today(self): return self.date == str((datetime.today()).strftime('%Y%m%d')) def video_url(self): return GAME_VIDEO_BASE_URL.format(self.id_) def live_update_url(self): return LIVE_UPDATE_BASE_URL.format(self.id_)
from datetime import datetime GAME_VIDEO_BASE_URL = "http://www.nfl.com/feeds-rs/videos/byGameCenter/{0}.json" LIVE_UPDATE_BASE_URL = "http://www.nfl.com/liveupdate/game-center/{0}/{0}_gtd.json" class Game(object): def __init__(self, id_, h, v): self.id_ = id_ self.date = self.id_[:-2] self.home = h self.vis = v self.latest_play_id = "" self.latest_clip_id = "" self.videos = {} def is_today(self): return self.date == str((datetime.today()).strftime('%Y%m%d')) def video_url(self): return GAME_VIDEO_BASE_URL.format(self.id_) def live_update_url(self): return LIVE_UPDATE_BASE_URL.format(self.id_)
Add videos dict to Games.
Add videos dict to Games.
Python
apache-2.0
twbarber/nfl-highlight-bot
from datetime import datetime GAME_VIDEO_BASE_URL = "http://www.nfl.com/feeds-rs/videos/byGameCenter/{0}.json" LIVE_UPDATE_BASE_URL = "http://www.nfl.com/liveupdate/game-center/{0}/{0}_gtd.json" class Game(object): def __init__(self, id_, h, v): self.id_ = id_ self.date = self.id_[:-2] self.home = h self.vis = v self.latest_play_id = "" self.latest_clip_id = "" + self.videos = {} def is_today(self): return self.date == str((datetime.today()).strftime('%Y%m%d')) def video_url(self): return GAME_VIDEO_BASE_URL.format(self.id_) def live_update_url(self): return LIVE_UPDATE_BASE_URL.format(self.id_)
Add videos dict to Games.
## Code Before: from datetime import datetime GAME_VIDEO_BASE_URL = "http://www.nfl.com/feeds-rs/videos/byGameCenter/{0}.json" LIVE_UPDATE_BASE_URL = "http://www.nfl.com/liveupdate/game-center/{0}/{0}_gtd.json" class Game(object): def __init__(self, id_, h, v): self.id_ = id_ self.date = self.id_[:-2] self.home = h self.vis = v self.latest_play_id = "" self.latest_clip_id = "" def is_today(self): return self.date == str((datetime.today()).strftime('%Y%m%d')) def video_url(self): return GAME_VIDEO_BASE_URL.format(self.id_) def live_update_url(self): return LIVE_UPDATE_BASE_URL.format(self.id_) ## Instruction: Add videos dict to Games. ## Code After: from datetime import datetime GAME_VIDEO_BASE_URL = "http://www.nfl.com/feeds-rs/videos/byGameCenter/{0}.json" LIVE_UPDATE_BASE_URL = "http://www.nfl.com/liveupdate/game-center/{0}/{0}_gtd.json" class Game(object): def __init__(self, id_, h, v): self.id_ = id_ self.date = self.id_[:-2] self.home = h self.vis = v self.latest_play_id = "" self.latest_clip_id = "" self.videos = {} def is_today(self): return self.date == str((datetime.today()).strftime('%Y%m%d')) def video_url(self): return GAME_VIDEO_BASE_URL.format(self.id_) def live_update_url(self): return LIVE_UPDATE_BASE_URL.format(self.id_)
from datetime import datetime GAME_VIDEO_BASE_URL = "http://www.nfl.com/feeds-rs/videos/byGameCenter/{0}.json" LIVE_UPDATE_BASE_URL = "http://www.nfl.com/liveupdate/game-center/{0}/{0}_gtd.json" class Game(object): def __init__(self, id_, h, v): self.id_ = id_ self.date = self.id_[:-2] self.home = h self.vis = v self.latest_play_id = "" self.latest_clip_id = "" + self.videos = {} def is_today(self): return self.date == str((datetime.today()).strftime('%Y%m%d')) def video_url(self): return GAME_VIDEO_BASE_URL.format(self.id_) def live_update_url(self): return LIVE_UPDATE_BASE_URL.format(self.id_)
6cac0b8531297dab6bdaff2959646d5a8a90dd01
parse_vcfFile.py
parse_vcfFile.py
import pandas def read_vcf(filename): """ Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample. :param filename: Path to VCF file :return: Pandas DataFrame representing VCF file with rows as SNPs and columns with info and samples """ vcf = open(filename) for l in vcf: if not l.startswith('##'): header = l.strip().split('\t') break snps = pandas.read_table(vcf, names=header) return snps
import pandas def read_vcf(filename): """ Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample. :param filename: Path to VCF file :return: Pandas DataFrame representing VCF file with columns as SNPs and rows with samples """ vcf = open(filename) for l in vcf: if not l.startswith('##'): header = l.strip().split('\t') break snps = pandas.read_table(vcf, names=header) snps.index = snps.ID snps = snps.iloc[:,9:].T return snps
Update VCF parsing to output SNP-column sample-row DataFrame
Update VCF parsing to output SNP-column sample-row DataFrame
Python
mit
NCBI-Hackathons/Network_Stats_Acc_Interop,NCBI-Hackathons/Network_Stats_Acc_Interop,NCBI-Hackathons/Network_Stats_Acc_Interop,NCBI-Hackathons/Network_Stats_Acc_Interop
import pandas def read_vcf(filename): """ Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample. :param filename: Path to VCF file - :return: Pandas DataFrame representing VCF file with rows as SNPs and columns with info and samples + :return: Pandas DataFrame representing VCF file with columns as SNPs and rows with samples """ vcf = open(filename) for l in vcf: if not l.startswith('##'): header = l.strip().split('\t') break snps = pandas.read_table(vcf, names=header) + snps.index = snps.ID + snps = snps.iloc[:,9:].T return snps
Update VCF parsing to output SNP-column sample-row DataFrame
## Code Before: import pandas def read_vcf(filename): """ Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample. :param filename: Path to VCF file :return: Pandas DataFrame representing VCF file with rows as SNPs and columns with info and samples """ vcf = open(filename) for l in vcf: if not l.startswith('##'): header = l.strip().split('\t') break snps = pandas.read_table(vcf, names=header) return snps ## Instruction: Update VCF parsing to output SNP-column sample-row DataFrame ## Code After: import pandas def read_vcf(filename): """ Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample. :param filename: Path to VCF file :return: Pandas DataFrame representing VCF file with columns as SNPs and rows with samples """ vcf = open(filename) for l in vcf: if not l.startswith('##'): header = l.strip().split('\t') break snps = pandas.read_table(vcf, names=header) snps.index = snps.ID snps = snps.iloc[:,9:].T return snps
import pandas def read_vcf(filename): """ Reads an input VCF file containing lines for each SNP and columns with genotype info for each sample. :param filename: Path to VCF file - :return: Pandas DataFrame representing VCF file with rows as SNPs and columns with info and samples ? ^ ^ ^ ^^^^ --------- + :return: Pandas DataFrame representing VCF file with columns as SNPs and rows with samples ? ^ ^^^^ ^ ^ """ vcf = open(filename) for l in vcf: if not l.startswith('##'): header = l.strip().split('\t') break snps = pandas.read_table(vcf, names=header) + snps.index = snps.ID + snps = snps.iloc[:,9:].T return snps
20079bf375149bb0e8646a2d81dd800028f49faa
captura/views.py
captura/views.py
from django.contrib.auth.decorators import user_passes_test, login_required from django.shortcuts import render from perfiles_usuario.utils import is_capturista from estudios_socioeconomicos.models import Estudio @login_required #@user_passes_test(is_capturista) def capturista_dashboard(request): """View to render the capturista control dashboard. This view shows the list of socio-economic studies that are under review and the action buttons to add and edit each socio-economic study. """ estudios = [] iduser = request.user.id rechazados = Estudio.objects.filter(status='rechazado') for estudio in rechazados: if estudio.capturista_id == iduser: estudios.append(estudio) return render(request, 'captura/dashboard_capturista.html', {'estudios': estudios})
from django.contrib.auth.decorators import user_passes_test, login_required from django.shortcuts import render from perfiles_usuario.utils import is_capturista from estudios_socioeconomicos.models import Estudio @login_required @user_passes_test(is_capturista) def capturista_dashboard(request): """View to render the capturista control dashboard. This view shows the list of socio-economic studies that are under review and the button to add a new socio-economic study. Also shows the edit and see feedback buttons to each socio-economic study shown in the list if this exist for the current user (capturist). """ estudios = [] iduser = request.user.id rechazados = Estudio.objects.filter(status='rechazado') for estudio in rechazados: if estudio.capturista_id == iduser: estudios.append(estudio) return render(request, 'captura/dashboard_capturista.html', {'estudios': estudios})
Add more comments in capturist dashboard view
Add more comments in capturist dashboard view
Python
mit
erikiado/jp2_online,erikiado/jp2_online,erikiado/jp2_online
from django.contrib.auth.decorators import user_passes_test, login_required from django.shortcuts import render from perfiles_usuario.utils import is_capturista from estudios_socioeconomicos.models import Estudio @login_required - #@user_passes_test(is_capturista) + @user_passes_test(is_capturista) def capturista_dashboard(request): """View to render the capturista control dashboard. This view shows the list of socio-economic studies that are under review - and the action buttons to add and edit each socio-economic study. + and the button to add a new socio-economic study. + Also shows the edit and see feedback buttons to each socio-economic study + shown in the list if this exist for the current user (capturist). """ estudios = [] iduser = request.user.id rechazados = Estudio.objects.filter(status='rechazado') for estudio in rechazados: if estudio.capturista_id == iduser: estudios.append(estudio) return render(request, 'captura/dashboard_capturista.html', {'estudios': estudios})
Add more comments in capturist dashboard view
## Code Before: from django.contrib.auth.decorators import user_passes_test, login_required from django.shortcuts import render from perfiles_usuario.utils import is_capturista from estudios_socioeconomicos.models import Estudio @login_required #@user_passes_test(is_capturista) def capturista_dashboard(request): """View to render the capturista control dashboard. This view shows the list of socio-economic studies that are under review and the action buttons to add and edit each socio-economic study. """ estudios = [] iduser = request.user.id rechazados = Estudio.objects.filter(status='rechazado') for estudio in rechazados: if estudio.capturista_id == iduser: estudios.append(estudio) return render(request, 'captura/dashboard_capturista.html', {'estudios': estudios}) ## Instruction: Add more comments in capturist dashboard view ## Code After: from django.contrib.auth.decorators import user_passes_test, login_required from django.shortcuts import render from perfiles_usuario.utils import is_capturista from estudios_socioeconomicos.models import Estudio @login_required @user_passes_test(is_capturista) def capturista_dashboard(request): """View to render the capturista control dashboard. This view shows the list of socio-economic studies that are under review and the button to add a new socio-economic study. Also shows the edit and see feedback buttons to each socio-economic study shown in the list if this exist for the current user (capturist). """ estudios = [] iduser = request.user.id rechazados = Estudio.objects.filter(status='rechazado') for estudio in rechazados: if estudio.capturista_id == iduser: estudios.append(estudio) return render(request, 'captura/dashboard_capturista.html', {'estudios': estudios})
from django.contrib.auth.decorators import user_passes_test, login_required from django.shortcuts import render from perfiles_usuario.utils import is_capturista from estudios_socioeconomicos.models import Estudio @login_required - #@user_passes_test(is_capturista) ? - + @user_passes_test(is_capturista) def capturista_dashboard(request): """View to render the capturista control dashboard. This view shows the list of socio-economic studies that are under review - and the action buttons to add and edit each socio-economic study. ? ------- - -- ^^^^^^^^ + and the button to add a new socio-economic study. ? + ^ + Also shows the edit and see feedback buttons to each socio-economic study + shown in the list if this exist for the current user (capturist). """ estudios = [] iduser = request.user.id rechazados = Estudio.objects.filter(status='rechazado') for estudio in rechazados: if estudio.capturista_id == iduser: estudios.append(estudio) return render(request, 'captura/dashboard_capturista.html', {'estudios': estudios})
685ae9d284a9df71563c05773e4110e5ddc16b38
backend/breach/forms.py
backend/breach/forms.py
from django.forms import ModelForm from breach.models import Target, Victim class TargetForm(ModelForm): class Meta: model = Target fields = ( 'name', 'endpoint', 'prefix', 'alphabet', 'secretlength', 'alignmentalphabet', 'recordscardinality', 'method' ) class VictimForm(ModelForm): class Meta: model = Victim fields = ( 'sourceip', ) class AttackForm(ModelForm): class Meta: model = Victim fields = ( 'id', )
from django.forms import ModelForm from breach.models import Target, Victim class TargetForm(ModelForm): class Meta: model = Target fields = ( 'name', 'endpoint', 'prefix', 'alphabet', 'secretlength', 'alignmentalphabet', 'recordscardinality', 'method' ) class VictimForm(ModelForm): class Meta: model = Victim fields = ( 'sourceip', ) class AttackForm(ModelForm): class Meta: model = Victim fields = ( 'sourceip', 'target' )
Add sourceip and target parameters to AttackForm
Add sourceip and target parameters to AttackForm
Python
mit
dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,dimriou/rupture,dimriou/rupture,dionyziz/rupture,dimkarakostas/rupture,dimriou/rupture,esarafianou/rupture,esarafianou/rupture,dionyziz/rupture,dimkarakostas/rupture,dionyziz/rupture,dimkarakostas/rupture,dimriou/rupture,dionyziz/rupture,dimkarakostas/rupture,esarafianou/rupture,esarafianou/rupture
from django.forms import ModelForm from breach.models import Target, Victim class TargetForm(ModelForm): class Meta: model = Target fields = ( 'name', 'endpoint', 'prefix', 'alphabet', 'secretlength', 'alignmentalphabet', 'recordscardinality', 'method' ) class VictimForm(ModelForm): class Meta: model = Victim fields = ( 'sourceip', ) class AttackForm(ModelForm): class Meta: model = Victim fields = ( - 'id', + 'sourceip', + 'target' )
Add sourceip and target parameters to AttackForm
## Code Before: from django.forms import ModelForm from breach.models import Target, Victim class TargetForm(ModelForm): class Meta: model = Target fields = ( 'name', 'endpoint', 'prefix', 'alphabet', 'secretlength', 'alignmentalphabet', 'recordscardinality', 'method' ) class VictimForm(ModelForm): class Meta: model = Victim fields = ( 'sourceip', ) class AttackForm(ModelForm): class Meta: model = Victim fields = ( 'id', ) ## Instruction: Add sourceip and target parameters to AttackForm ## Code After: from django.forms import ModelForm from breach.models import Target, Victim class TargetForm(ModelForm): class Meta: model = Target fields = ( 'name', 'endpoint', 'prefix', 'alphabet', 'secretlength', 'alignmentalphabet', 'recordscardinality', 'method' ) class VictimForm(ModelForm): class Meta: model = Victim fields = ( 'sourceip', ) class AttackForm(ModelForm): class Meta: model = Victim fields = ( 'sourceip', 'target' )
from django.forms import ModelForm from breach.models import Target, Victim class TargetForm(ModelForm): class Meta: model = Target fields = ( 'name', 'endpoint', 'prefix', 'alphabet', 'secretlength', 'alignmentalphabet', 'recordscardinality', 'method' ) class VictimForm(ModelForm): class Meta: model = Victim fields = ( 'sourceip', ) class AttackForm(ModelForm): class Meta: model = Victim fields = ( - 'id', ? ^ + 'sourceip', ? ++++++ ^ + 'target' )
ef50e82c3c1f49d63d013ac538d932d40c430a46
pyradiator/endpoint.py
pyradiator/endpoint.py
import queue import threading class Producer(object): def __init__(self, period_in_seconds, queue, function): self._period_in_seconds = period_in_seconds self._queue = queue self._function = function self._event = threading.Event() self._thread = threading.Thread(target=self._loop) def start(self): self._thread.start() def stop(self): self._event.set() self._thread.join() def _loop(self): while not self._event.wait(self._period_in_seconds): self.__put_item_into_the_queue() def __put_item_into_the_queue(self): try: self._queue.put(self._function) except queue.Full: pass class Consumer(object): STOP_SENTINEL = "STOP" def __init__(self, queue, function): self._queue = queue self._function = function self._thread = threading.Thread(target=self._loop) self.no_date_from_the_queue = True def start(self): self._thread.start() def stop(self): self._queue.put(self.STOP_SENTINEL) self._thread.join() def _loop(self): for result in iter(self._queue.get, self.STOP_SENTINEL): self.no_date_from_the_queue = result is None if result: self._function(result)
try: import queue except ImportError: import Queue import threading class Producer(object): def __init__(self, period_in_seconds, queue, function): self._period_in_seconds = period_in_seconds self._queue = queue self._function = function self._event = threading.Event() self._thread = threading.Thread(target=self._loop) def start(self): self._thread.start() def stop(self): self._event.set() self._thread.join() def _loop(self): while not self._event.wait(self._period_in_seconds): self.__put_item_into_the_queue() def __put_item_into_the_queue(self): try: self._queue.put(self._function) except queue.Full: pass class Consumer(object): STOP_SENTINEL = "STOP" def __init__(self, queue, function): self._queue = queue self._function = function self._thread = threading.Thread(target=self._loop) self.no_date_from_the_queue = True def start(self): self._thread.start() def stop(self): self._queue.put(self.STOP_SENTINEL) self._thread.join() def _loop(self): for result in iter(self._queue.get, self.STOP_SENTINEL): self.no_date_from_the_queue = result is None if result: self._function(result)
Handle import error on older python versions
Handle import error on older python versions
Python
mit
crashmaster/pyradiator
+ try: - import queue + import queue + except ImportError: + import Queue import threading class Producer(object): def __init__(self, period_in_seconds, queue, function): self._period_in_seconds = period_in_seconds self._queue = queue self._function = function self._event = threading.Event() self._thread = threading.Thread(target=self._loop) def start(self): self._thread.start() def stop(self): self._event.set() self._thread.join() def _loop(self): while not self._event.wait(self._period_in_seconds): self.__put_item_into_the_queue() def __put_item_into_the_queue(self): try: self._queue.put(self._function) except queue.Full: pass class Consumer(object): STOP_SENTINEL = "STOP" def __init__(self, queue, function): self._queue = queue self._function = function self._thread = threading.Thread(target=self._loop) self.no_date_from_the_queue = True def start(self): self._thread.start() def stop(self): self._queue.put(self.STOP_SENTINEL) self._thread.join() def _loop(self): for result in iter(self._queue.get, self.STOP_SENTINEL): self.no_date_from_the_queue = result is None if result: self._function(result)
Handle import error on older python versions
## Code Before: import queue import threading class Producer(object): def __init__(self, period_in_seconds, queue, function): self._period_in_seconds = period_in_seconds self._queue = queue self._function = function self._event = threading.Event() self._thread = threading.Thread(target=self._loop) def start(self): self._thread.start() def stop(self): self._event.set() self._thread.join() def _loop(self): while not self._event.wait(self._period_in_seconds): self.__put_item_into_the_queue() def __put_item_into_the_queue(self): try: self._queue.put(self._function) except queue.Full: pass class Consumer(object): STOP_SENTINEL = "STOP" def __init__(self, queue, function): self._queue = queue self._function = function self._thread = threading.Thread(target=self._loop) self.no_date_from_the_queue = True def start(self): self._thread.start() def stop(self): self._queue.put(self.STOP_SENTINEL) self._thread.join() def _loop(self): for result in iter(self._queue.get, self.STOP_SENTINEL): self.no_date_from_the_queue = result is None if result: self._function(result) ## Instruction: Handle import error on older python versions ## Code After: try: import queue except ImportError: import Queue import threading class Producer(object): def __init__(self, period_in_seconds, queue, function): self._period_in_seconds = period_in_seconds self._queue = queue self._function = function self._event = threading.Event() self._thread = threading.Thread(target=self._loop) def start(self): self._thread.start() def stop(self): self._event.set() self._thread.join() def _loop(self): while not self._event.wait(self._period_in_seconds): self.__put_item_into_the_queue() def __put_item_into_the_queue(self): try: self._queue.put(self._function) except queue.Full: pass class Consumer(object): STOP_SENTINEL = "STOP" def __init__(self, queue, function): self._queue = queue self._function = function self._thread = threading.Thread(target=self._loop) self.no_date_from_the_queue = True def start(self): self._thread.start() def stop(self): self._queue.put(self.STOP_SENTINEL) self._thread.join() def _loop(self): for result in iter(self._queue.get, self.STOP_SENTINEL): self.no_date_from_the_queue = result is None if result: self._function(result)
+ try: - import queue + import queue ? ++++ + except ImportError: + import Queue import threading class Producer(object): def __init__(self, period_in_seconds, queue, function): self._period_in_seconds = period_in_seconds self._queue = queue self._function = function self._event = threading.Event() self._thread = threading.Thread(target=self._loop) def start(self): self._thread.start() def stop(self): self._event.set() self._thread.join() def _loop(self): while not self._event.wait(self._period_in_seconds): self.__put_item_into_the_queue() def __put_item_into_the_queue(self): try: self._queue.put(self._function) except queue.Full: pass class Consumer(object): STOP_SENTINEL = "STOP" def __init__(self, queue, function): self._queue = queue self._function = function self._thread = threading.Thread(target=self._loop) self.no_date_from_the_queue = True def start(self): self._thread.start() def stop(self): self._queue.put(self.STOP_SENTINEL) self._thread.join() def _loop(self): for result in iter(self._queue.get, self.STOP_SENTINEL): self.no_date_from_the_queue = result is None if result: self._function(result)
4b7713a1891aa86c0f16fafdea8770495070bfcb
html_snapshots/utils.py
html_snapshots/utils.py
import os import rmc.shared.constants as c import rmc.models as m FILE_DIR = os.path.dirname(os.path.realpath(__file__)) HTML_DIR = os.path.join(c.SHARED_DATA_DIR, 'html_snapshots') def write(file_path, content): ensure_dir(file_path) with open(file_path, 'w') as f: f.write(content) def ensure_dir(file_path): d = os.path.dirname(file_path) if not os.path.exists(d): os.makedirs(d) def generate_urls(): urls = [] # Home page urls.append('') # Course pages for course in m.Course.objects: course_id = course.id urls.append('course/' + course_id) return urls
import os import mongoengine as me import rmc.shared.constants as c import rmc.models as m FILE_DIR = os.path.dirname(os.path.realpath(__file__)) HTML_DIR = os.path.join(c.SHARED_DATA_DIR, 'html_snapshots') me.connect(c.MONGO_DB_RMC, host=c.MONGO_HOST, port=c.MONGO_PORT) def write(file_path, content): ensure_dir(file_path) with open(file_path, 'w') as f: f.write(content) def ensure_dir(file_path): d = os.path.dirname(file_path) if not os.path.exists(d): os.makedirs(d) def generate_urls(): urls = [] # Home page urls.append('') # Course pages for course in m.Course.objects: course_id = course.id urls.append('course/' + course_id) return urls
Create mongoengine connection when taking phantom snapshots
Create mongoengine connection when taking phantom snapshots
Python
mit
ccqi/rmc,JGulbronson/rmc,JGulbronson/rmc,UWFlow/rmc,shakilkanji/rmc,ccqi/rmc,UWFlow/rmc,sachdevs/rmc,sachdevs/rmc,MichalKononenko/rmc,UWFlow/rmc,MichalKononenko/rmc,sachdevs/rmc,duaayousif/rmc,MichalKononenko/rmc,MichalKononenko/rmc,JGulbronson/rmc,ccqi/rmc,JGulbronson/rmc,sachdevs/rmc,UWFlow/rmc,UWFlow/rmc,duaayousif/rmc,ccqi/rmc,shakilkanji/rmc,ccqi/rmc,rageandqq/rmc,JGulbronson/rmc,rageandqq/rmc,rageandqq/rmc,MichalKononenko/rmc,shakilkanji/rmc,duaayousif/rmc,shakilkanji/rmc,duaayousif/rmc,rageandqq/rmc,rageandqq/rmc,sachdevs/rmc,duaayousif/rmc,shakilkanji/rmc
import os + + import mongoengine as me import rmc.shared.constants as c import rmc.models as m FILE_DIR = os.path.dirname(os.path.realpath(__file__)) HTML_DIR = os.path.join(c.SHARED_DATA_DIR, 'html_snapshots') + + me.connect(c.MONGO_DB_RMC, host=c.MONGO_HOST, port=c.MONGO_PORT) def write(file_path, content): ensure_dir(file_path) with open(file_path, 'w') as f: f.write(content) def ensure_dir(file_path): d = os.path.dirname(file_path) if not os.path.exists(d): os.makedirs(d) def generate_urls(): urls = [] # Home page urls.append('') # Course pages for course in m.Course.objects: course_id = course.id urls.append('course/' + course_id) return urls
Create mongoengine connection when taking phantom snapshots
## Code Before: import os import rmc.shared.constants as c import rmc.models as m FILE_DIR = os.path.dirname(os.path.realpath(__file__)) HTML_DIR = os.path.join(c.SHARED_DATA_DIR, 'html_snapshots') def write(file_path, content): ensure_dir(file_path) with open(file_path, 'w') as f: f.write(content) def ensure_dir(file_path): d = os.path.dirname(file_path) if not os.path.exists(d): os.makedirs(d) def generate_urls(): urls = [] # Home page urls.append('') # Course pages for course in m.Course.objects: course_id = course.id urls.append('course/' + course_id) return urls ## Instruction: Create mongoengine connection when taking phantom snapshots ## Code After: import os import mongoengine as me import rmc.shared.constants as c import rmc.models as m FILE_DIR = os.path.dirname(os.path.realpath(__file__)) HTML_DIR = os.path.join(c.SHARED_DATA_DIR, 'html_snapshots') me.connect(c.MONGO_DB_RMC, host=c.MONGO_HOST, port=c.MONGO_PORT) def write(file_path, content): ensure_dir(file_path) with open(file_path, 'w') as f: f.write(content) def ensure_dir(file_path): d = os.path.dirname(file_path) if not os.path.exists(d): os.makedirs(d) def generate_urls(): urls = [] # Home page urls.append('') # Course pages for course in m.Course.objects: course_id = course.id urls.append('course/' + course_id) return urls
import os + + import mongoengine as me import rmc.shared.constants as c import rmc.models as m FILE_DIR = os.path.dirname(os.path.realpath(__file__)) HTML_DIR = os.path.join(c.SHARED_DATA_DIR, 'html_snapshots') + + me.connect(c.MONGO_DB_RMC, host=c.MONGO_HOST, port=c.MONGO_PORT) def write(file_path, content): ensure_dir(file_path) with open(file_path, 'w') as f: f.write(content) def ensure_dir(file_path): d = os.path.dirname(file_path) if not os.path.exists(d): os.makedirs(d) def generate_urls(): urls = [] # Home page urls.append('') # Course pages for course in m.Course.objects: course_id = course.id urls.append('course/' + course_id) return urls
469688be2069182016b74e9132307755abc7ed77
lutrisweb/settings/local.py
lutrisweb/settings/local.py
from base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'lutris', 'USER': 'lutris', 'PASSWORD': 'admin', 'HOST': 'localhost', } } STEAM_API_KEY = os.environ['STEAM_API_KEY']
import os from base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'lutris', 'USER': 'lutris', 'PASSWORD': 'admin', 'HOST': 'localhost', } } STEAM_API_KEY = os.environ.get('STEAM_API_KEY')
Make Steam api key optional
Make Steam api key optional
Python
agpl-3.0
Turupawn/website,Turupawn/website,lutris/website,Turupawn/website,Turupawn/website,lutris/website,lutris/website,lutris/website
+ import os from base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'lutris', 'USER': 'lutris', 'PASSWORD': 'admin', 'HOST': 'localhost', } } - STEAM_API_KEY = os.environ['STEAM_API_KEY'] + STEAM_API_KEY = os.environ.get('STEAM_API_KEY')
Make Steam api key optional
## Code Before: from base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'lutris', 'USER': 'lutris', 'PASSWORD': 'admin', 'HOST': 'localhost', } } STEAM_API_KEY = os.environ['STEAM_API_KEY'] ## Instruction: Make Steam api key optional ## Code After: import os from base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'lutris', 'USER': 'lutris', 'PASSWORD': 'admin', 'HOST': 'localhost', } } STEAM_API_KEY = os.environ.get('STEAM_API_KEY')
+ import os from base import * # noqa DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'lutris', 'USER': 'lutris', 'PASSWORD': 'admin', 'HOST': 'localhost', } } - STEAM_API_KEY = os.environ['STEAM_API_KEY'] ? ^ ^ + STEAM_API_KEY = os.environ.get('STEAM_API_KEY') ? ^^^^^ ^
98ab2a2ac0279f504195e49d55ff7be817592a75
kirppu/app/checkout/urls.py
kirppu/app/checkout/urls.py
from django.conf.urls import url, patterns from .api import AJAX_FUNCTIONS __author__ = 'jyrkila' _urls = [url('^checkout.js$', 'checkout_js', name='checkout_js')] _urls.extend([ url(func.url, func.name, name=func.view_name) for func in AJAX_FUNCTIONS.itervalues() ]) urlpatterns = patterns('kirppu.app.checkout.api', *_urls)
from django.conf import settings from django.conf.urls import url, patterns from .api import AJAX_FUNCTIONS __author__ = 'jyrkila' if settings.KIRPPU_CHECKOUT_ACTIVE: # Only activate API when checkout is active. _urls = [url('^checkout.js$', 'checkout_js', name='checkout_js')] _urls.extend([ url(func.url, func.name, name=func.view_name) for func in AJAX_FUNCTIONS.itervalues() ]) else: _urls = [] urlpatterns = patterns('kirppu.app.checkout.api', *_urls)
Fix access to checkout API.
Fix access to checkout API. Prevent access to checkout API urls when checkout is not activated by not creating urlpatterns.
Python
mit
mniemela/kirppu,jlaunonen/kirppu,jlaunonen/kirppu,mniemela/kirppu,jlaunonen/kirppu,mniemela/kirppu,jlaunonen/kirppu
+ from django.conf import settings from django.conf.urls import url, patterns from .api import AJAX_FUNCTIONS __author__ = 'jyrkila' + if settings.KIRPPU_CHECKOUT_ACTIVE: + # Only activate API when checkout is active. + - _urls = [url('^checkout.js$', 'checkout_js', name='checkout_js')] + _urls = [url('^checkout.js$', 'checkout_js', name='checkout_js')] - _urls.extend([ + _urls.extend([ - url(func.url, func.name, name=func.view_name) + url(func.url, func.name, name=func.view_name) - for func in AJAX_FUNCTIONS.itervalues() + for func in AJAX_FUNCTIONS.itervalues() - ]) + ]) + + else: + _urls = [] urlpatterns = patterns('kirppu.app.checkout.api', *_urls)
Fix access to checkout API.
## Code Before: from django.conf.urls import url, patterns from .api import AJAX_FUNCTIONS __author__ = 'jyrkila' _urls = [url('^checkout.js$', 'checkout_js', name='checkout_js')] _urls.extend([ url(func.url, func.name, name=func.view_name) for func in AJAX_FUNCTIONS.itervalues() ]) urlpatterns = patterns('kirppu.app.checkout.api', *_urls) ## Instruction: Fix access to checkout API. ## Code After: from django.conf import settings from django.conf.urls import url, patterns from .api import AJAX_FUNCTIONS __author__ = 'jyrkila' if settings.KIRPPU_CHECKOUT_ACTIVE: # Only activate API when checkout is active. _urls = [url('^checkout.js$', 'checkout_js', name='checkout_js')] _urls.extend([ url(func.url, func.name, name=func.view_name) for func in AJAX_FUNCTIONS.itervalues() ]) else: _urls = [] urlpatterns = patterns('kirppu.app.checkout.api', *_urls)
+ from django.conf import settings from django.conf.urls import url, patterns from .api import AJAX_FUNCTIONS __author__ = 'jyrkila' + if settings.KIRPPU_CHECKOUT_ACTIVE: + # Only activate API when checkout is active. + - _urls = [url('^checkout.js$', 'checkout_js', name='checkout_js')] + _urls = [url('^checkout.js$', 'checkout_js', name='checkout_js')] ? ++++ - _urls.extend([ + _urls.extend([ ? ++++ - url(func.url, func.name, name=func.view_name) + url(func.url, func.name, name=func.view_name) ? ++++ - for func in AJAX_FUNCTIONS.itervalues() + for func in AJAX_FUNCTIONS.itervalues() ? ++++ - ]) + ]) + + else: + _urls = [] urlpatterns = patterns('kirppu.app.checkout.api', *_urls)
719777a0b2e3eed4f14355974c6673d20904ac83
models/shopping_item.py
models/shopping_item.py
from sqlalchemy import Column, Integer, Unicode, ForeignKey import base class ShoppingItem(base.Base): """Sqlalchemy deals model""" __tablename__ = "shopping_item" catId = 'shopping_category.id' visitId = 'visits.id' id = Column(Integer, primary_key=True) name = Column('name', Unicode) category_id = Column('category', Integer, ForeignKey(catId)) visit_id = Column('visit', Integer, ForeignKey(visitId))
from sqlalchemy import Column, Integer, Unicode, ForeignKey import base class ShoppingItem(base.Base): """Sqlalchemy deals model""" __tablename__ = "shopping_item" catId = 'shopping_category.id' visitId = 'visits.id' id = Column(Integer, primary_key=True) name = Column('name', Unicode) quantity = Column('name', Integer) category_id = Column('category', Integer, ForeignKey(catId)) visit_id = Column('visit', Integer, ForeignKey(visitId))
Add quantity to shopping item model
Add quantity to shopping item model
Python
mit
jlutz777/FreeStore,jlutz777/FreeStore,jlutz777/FreeStore
from sqlalchemy import Column, Integer, Unicode, ForeignKey import base class ShoppingItem(base.Base): """Sqlalchemy deals model""" __tablename__ = "shopping_item" catId = 'shopping_category.id' visitId = 'visits.id' id = Column(Integer, primary_key=True) name = Column('name', Unicode) + quantity = Column('name', Integer) category_id = Column('category', Integer, ForeignKey(catId)) visit_id = Column('visit', Integer, ForeignKey(visitId))
Add quantity to shopping item model
## Code Before: from sqlalchemy import Column, Integer, Unicode, ForeignKey import base class ShoppingItem(base.Base): """Sqlalchemy deals model""" __tablename__ = "shopping_item" catId = 'shopping_category.id' visitId = 'visits.id' id = Column(Integer, primary_key=True) name = Column('name', Unicode) category_id = Column('category', Integer, ForeignKey(catId)) visit_id = Column('visit', Integer, ForeignKey(visitId)) ## Instruction: Add quantity to shopping item model ## Code After: from sqlalchemy import Column, Integer, Unicode, ForeignKey import base class ShoppingItem(base.Base): """Sqlalchemy deals model""" __tablename__ = "shopping_item" catId = 'shopping_category.id' visitId = 'visits.id' id = Column(Integer, primary_key=True) name = Column('name', Unicode) quantity = Column('name', Integer) category_id = Column('category', Integer, ForeignKey(catId)) visit_id = Column('visit', Integer, ForeignKey(visitId))
from sqlalchemy import Column, Integer, Unicode, ForeignKey import base class ShoppingItem(base.Base): """Sqlalchemy deals model""" __tablename__ = "shopping_item" catId = 'shopping_category.id' visitId = 'visits.id' id = Column(Integer, primary_key=True) name = Column('name', Unicode) + quantity = Column('name', Integer) category_id = Column('category', Integer, ForeignKey(catId)) visit_id = Column('visit', Integer, ForeignKey(visitId))
27c3972a57e09faf35f86b82b35eb815dadc4688
mediachain/reader/dynamo.py
mediachain/reader/dynamo.py
import boto3 def get_table(name): dynamo = boto3.resource('dynamo') return dynamo.Table(name) def get_object(reference): table = get_table('mediachain') obj = table.get_item(Key={'multihash': reference}) byte_string = obj['Item']['data'] if byte_string is None: raise KeyError('Could not find key <%s> in Dynamo'.format(reference)) return cbor.loads(byte_string)
import boto3 import cbor def get_table(name): dynamo = boto3.resource('dynamodb', endpoint_url='http://localhost:8000', region_name='us-east-1', aws_access_key_id='', aws_secret_access_key='') return dynamo.Table(name) def get_object(reference): table = get_table('Mediachain') obj = table.get_item(Key={'multihash': reference}) if obj is None: raise KeyError('Could not find key <%s> in Dynamo'.format(reference)) byte_string = obj['Item']['data'].value return cbor.loads(byte_string)
Make get_object pull appropriate fields
Make get_object pull appropriate fields Temporarily set up dynamo to work internally
Python
mit
mediachain/mediachain-client,mediachain/mediachain-client
import boto3 + import cbor def get_table(name): - dynamo = boto3.resource('dynamo') + dynamo = boto3.resource('dynamodb', + endpoint_url='http://localhost:8000', + region_name='us-east-1', + aws_access_key_id='', + aws_secret_access_key='') return dynamo.Table(name) def get_object(reference): - table = get_table('mediachain') + table = get_table('Mediachain') obj = table.get_item(Key={'multihash': reference}) - byte_string = obj['Item']['data'] - if byte_string is None: + if obj is None: raise KeyError('Could not find key <%s> in Dynamo'.format(reference)) + byte_string = obj['Item']['data'].value return cbor.loads(byte_string)
Make get_object pull appropriate fields
## Code Before: import boto3 def get_table(name): dynamo = boto3.resource('dynamo') return dynamo.Table(name) def get_object(reference): table = get_table('mediachain') obj = table.get_item(Key={'multihash': reference}) byte_string = obj['Item']['data'] if byte_string is None: raise KeyError('Could not find key <%s> in Dynamo'.format(reference)) return cbor.loads(byte_string) ## Instruction: Make get_object pull appropriate fields ## Code After: import boto3 import cbor def get_table(name): dynamo = boto3.resource('dynamodb', endpoint_url='http://localhost:8000', region_name='us-east-1', aws_access_key_id='', aws_secret_access_key='') return dynamo.Table(name) def get_object(reference): table = get_table('Mediachain') obj = table.get_item(Key={'multihash': reference}) if obj is None: raise KeyError('Could not find key <%s> in Dynamo'.format(reference)) byte_string = obj['Item']['data'].value return cbor.loads(byte_string)
import boto3 + import cbor def get_table(name): - dynamo = boto3.resource('dynamo') ? ^ + dynamo = boto3.resource('dynamodb', ? ++ ^ + endpoint_url='http://localhost:8000', + region_name='us-east-1', + aws_access_key_id='', + aws_secret_access_key='') return dynamo.Table(name) def get_object(reference): - table = get_table('mediachain') ? ^ + table = get_table('Mediachain') ? ^ obj = table.get_item(Key={'multihash': reference}) - byte_string = obj['Item']['data'] - if byte_string is None: + if obj is None: raise KeyError('Could not find key <%s> in Dynamo'.format(reference)) + byte_string = obj['Item']['data'].value return cbor.loads(byte_string)
726c4f14fd5ddd49024163182917aeb9f4af504d
src/wirecloud/core/catalogue_manager.py
src/wirecloud/core/catalogue_manager.py
from catalogue.utils import add_resource_from_template from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace from wirecloud.markets.utils import MarketManager from wirecloudcommons.utils.template import TemplateParser class WirecloudCatalogueManager(MarketManager): def __init__(self, options): pass def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None): template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user)) resource = add_resource_from_template(published_workspace.get_template_url(request), template, user) resource.users.add(user)
from catalogue.utils import add_resource_from_template from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace from wirecloud.markets.utils import MarketManager from wirecloudcommons.utils.template import TemplateParser class WirecloudCatalogueManager(MarketManager): def __init__(self, options): pass def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None): template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user)) resource = add_resource_from_template(published_workspace.get_template_url(request), template, user) resource.publish = True resource.save()
Make published mashups visibles to all users
Make published mashups visibles to all users
Python
agpl-3.0
jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud
from catalogue.utils import add_resource_from_template from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace from wirecloud.markets.utils import MarketManager from wirecloudcommons.utils.template import TemplateParser class WirecloudCatalogueManager(MarketManager): def __init__(self, options): pass def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None): template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user)) resource = add_resource_from_template(published_workspace.get_template_url(request), template, user) + resource.publish = True - resource.users.add(user) + resource.save()
Make published mashups visibles to all users
## Code Before: from catalogue.utils import add_resource_from_template from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace from wirecloud.markets.utils import MarketManager from wirecloudcommons.utils.template import TemplateParser class WirecloudCatalogueManager(MarketManager): def __init__(self, options): pass def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None): template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user)) resource = add_resource_from_template(published_workspace.get_template_url(request), template, user) resource.users.add(user) ## Instruction: Make published mashups visibles to all users ## Code After: from catalogue.utils import add_resource_from_template from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace from wirecloud.markets.utils import MarketManager from wirecloudcommons.utils.template import TemplateParser class WirecloudCatalogueManager(MarketManager): def __init__(self, options): pass def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None): template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user)) resource = add_resource_from_template(published_workspace.get_template_url(request), template, user) resource.publish = True resource.save()
from catalogue.utils import add_resource_from_template from wirecloud.workspace.mashupTemplateGenerator import build_template_from_workspace from wirecloud.markets.utils import MarketManager from wirecloudcommons.utils.template import TemplateParser class WirecloudCatalogueManager(MarketManager): def __init__(self, options): pass def publish_mashup(self, endpoint, published_workspace, user, publish_options, request=None): template = TemplateParser(build_template_from_workspace(publish_options, published_workspace.workspace, user)) resource = add_resource_from_template(published_workspace.get_template_url(request), template, user) + resource.publish = True - resource.users.add(user) ? - ------ ---- + resource.save() ? ++
f5983348940e3acf937c7ddfded73f08d767c5a1
j1a/verilator/setup.py
j1a/verilator/setup.py
from distutils.core import setup from distutils.extension import Extension from os import system setup(name='vsimj1a', ext_modules=[ Extension('vsimj1a', ['vsim.cpp'], depends=["obj_dir/Vv3__ALL.a"], extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"], include_dirs=["obj_dir", "/usr/local/share/verilator/include/", "/usr/share/verilator/include/"], extra_compile_args=['-O2']) ], )
from distutils.core import setup from distutils.extension import Extension from os import system setup(name='vsimj1a', ext_modules=[ Extension('vsimj1a', ['vsim.cpp'], depends=["obj_dir/Vv3__ALL.a"], extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"], include_dirs=["obj_dir", "/usr/local/share/verilator/include/", "/usr/share/verilator/include/", "/usr/local/share/verilator/include/vltstd/", "/usr/share/verilator/include/vltstd/"], extra_compile_args=['-O2']) ], )
Add vltstd to include path
Add vltstd to include path
Python
bsd-3-clause
jamesbowman/swapforth,zuloloxi/swapforth,uho/swapforth,uho/swapforth,zuloloxi/swapforth,GuzTech/swapforth,jamesbowman/swapforth,jamesbowman/swapforth,GuzTech/swapforth,uho/swapforth,RGD2/swapforth,zuloloxi/swapforth,RGD2/swapforth,GuzTech/swapforth,zuloloxi/swapforth,GuzTech/swapforth,uho/swapforth,RGD2/swapforth,jamesbowman/swapforth,RGD2/swapforth
from distutils.core import setup from distutils.extension import Extension from os import system setup(name='vsimj1a', ext_modules=[ Extension('vsimj1a', ['vsim.cpp'], depends=["obj_dir/Vv3__ALL.a"], extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"], - include_dirs=["obj_dir", "/usr/local/share/verilator/include/", "/usr/share/verilator/include/"], + include_dirs=["obj_dir", + "/usr/local/share/verilator/include/", + "/usr/share/verilator/include/", + "/usr/local/share/verilator/include/vltstd/", + "/usr/share/verilator/include/vltstd/"], extra_compile_args=['-O2']) ], )
Add vltstd to include path
## Code Before: from distutils.core import setup from distutils.extension import Extension from os import system setup(name='vsimj1a', ext_modules=[ Extension('vsimj1a', ['vsim.cpp'], depends=["obj_dir/Vv3__ALL.a"], extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"], include_dirs=["obj_dir", "/usr/local/share/verilator/include/", "/usr/share/verilator/include/"], extra_compile_args=['-O2']) ], ) ## Instruction: Add vltstd to include path ## Code After: from distutils.core import setup from distutils.extension import Extension from os import system setup(name='vsimj1a', ext_modules=[ Extension('vsimj1a', ['vsim.cpp'], depends=["obj_dir/Vv3__ALL.a"], extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"], include_dirs=["obj_dir", "/usr/local/share/verilator/include/", "/usr/share/verilator/include/", "/usr/local/share/verilator/include/vltstd/", "/usr/share/verilator/include/vltstd/"], extra_compile_args=['-O2']) ], )
from distutils.core import setup from distutils.extension import Extension from os import system setup(name='vsimj1a', ext_modules=[ Extension('vsimj1a', ['vsim.cpp'], depends=["obj_dir/Vv3__ALL.a"], extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"], - include_dirs=["obj_dir", "/usr/local/share/verilator/include/", "/usr/share/verilator/include/"], + include_dirs=["obj_dir", + "/usr/local/share/verilator/include/", + "/usr/share/verilator/include/", + "/usr/local/share/verilator/include/vltstd/", + "/usr/share/verilator/include/vltstd/"], extra_compile_args=['-O2']) ], )
d1911215a0c7043c5011da55707f6a40938c7d59
alarme/extras/sensor/web/views/home.py
alarme/extras/sensor/web/views/home.py
from aiohttp.web import HTTPFound from .core import CoreView from ..util import login_required, handle_exception class Home(CoreView): @login_required async def req(self): return HTTPFound(self.request.app.router.get('control').url()) @handle_exception async def get(self): self.sensor.app.stop() return await self.req() @handle_exception async def post(self): return await self.req()
from aiohttp.web import HTTPFound from .core import CoreView from ..util import login_required, handle_exception class Home(CoreView): @login_required async def req(self): return HTTPFound(self.request.app.router.get('control').url()) @handle_exception async def get(self): return await self.req() @handle_exception async def post(self): return await self.req()
Remove debug app exit on / access (web sensor)
Remove debug app exit on / access (web sensor)
Python
mit
insolite/alarme,insolite/alarme,insolite/alarme
from aiohttp.web import HTTPFound from .core import CoreView from ..util import login_required, handle_exception class Home(CoreView): @login_required async def req(self): return HTTPFound(self.request.app.router.get('control').url()) @handle_exception async def get(self): - self.sensor.app.stop() return await self.req() @handle_exception async def post(self): return await self.req()
Remove debug app exit on / access (web sensor)
## Code Before: from aiohttp.web import HTTPFound from .core import CoreView from ..util import login_required, handle_exception class Home(CoreView): @login_required async def req(self): return HTTPFound(self.request.app.router.get('control').url()) @handle_exception async def get(self): self.sensor.app.stop() return await self.req() @handle_exception async def post(self): return await self.req() ## Instruction: Remove debug app exit on / access (web sensor) ## Code After: from aiohttp.web import HTTPFound from .core import CoreView from ..util import login_required, handle_exception class Home(CoreView): @login_required async def req(self): return HTTPFound(self.request.app.router.get('control').url()) @handle_exception async def get(self): return await self.req() @handle_exception async def post(self): return await self.req()
from aiohttp.web import HTTPFound from .core import CoreView from ..util import login_required, handle_exception class Home(CoreView): @login_required async def req(self): return HTTPFound(self.request.app.router.get('control').url()) @handle_exception async def get(self): - self.sensor.app.stop() return await self.req() @handle_exception async def post(self): return await self.req()
b7ce3042c67c17a203590dd78014590626abbc48
fragdev/urls.py
fragdev/urls.py
from django.conf import settings from django.conf.urls import patterns, include, url # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Blog URLs url(r'^blog/(?P<path>.*)', include('wiblog.urls', namespace='wiblog')), # Handle all of the "static" pages url(r'^$', 'fragdev.views.home', name='home'), url(r'^about$', 'fragdev.views.about', name='about'), url(r'^contact$', 'fragdev.views.contact', name='contact'), url(r'^contacted$', 'fragdev.views.contacted', name='contacted'), url(r'^projects$', 'fragdev.views.projects', name='projects'), url(r'^resume$', 'fragdev.views.resume', name='resume'), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), # Static files: Should be handled by the web server! #url(r'^css/(?P<path>.*)$', 'django.views.static.serve', # {'document_root': '/data/documents/web/fragdev4000/css'}), #url(r'^fonts/(?P<path>.*)$', 'django.views.static.serve', # {'document_root': '/data/documents/web/fragdev4000/fonts'}), )
from django.conf import settings from django.conf.urls import patterns, include, url # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Blog URLs #url(r'^blog/(?P<path>.*)', include('wiblog.urls', namespace='wiblog')), # Handle all of the "static" pages url(r'^$', 'fragdev.views.home', name='home'), url(r'^about$', 'fragdev.views.about', name='about'), url(r'^contact$', 'fragdev.views.contact', name='contact'), url(r'^contacted$', 'fragdev.views.contacted', name='contacted'), url(r'^projects$', 'fragdev.views.projects', name='projects'), url(r'^resume$', 'fragdev.views.resume', name='resume'), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), )
Remove Debugging Paths, Comment Out Unfinished Portions
Remove Debugging Paths, Comment Out Unfinished Portions
Python
agpl-3.0
lo-windigo/fragdev,lo-windigo/fragdev
from django.conf import settings from django.conf.urls import patterns, include, url # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Blog URLs - url(r'^blog/(?P<path>.*)', include('wiblog.urls', namespace='wiblog')), + #url(r'^blog/(?P<path>.*)', include('wiblog.urls', namespace='wiblog')), # Handle all of the "static" pages url(r'^$', 'fragdev.views.home', name='home'), url(r'^about$', 'fragdev.views.about', name='about'), url(r'^contact$', 'fragdev.views.contact', name='contact'), url(r'^contacted$', 'fragdev.views.contacted', name='contacted'), url(r'^projects$', 'fragdev.views.projects', name='projects'), url(r'^resume$', 'fragdev.views.resume', name='resume'), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), - - # Static files: Should be handled by the web server! - #url(r'^css/(?P<path>.*)$', 'django.views.static.serve', - # {'document_root': '/data/documents/web/fragdev4000/css'}), - #url(r'^fonts/(?P<path>.*)$', 'django.views.static.serve', - # {'document_root': '/data/documents/web/fragdev4000/fonts'}), )
Remove Debugging Paths, Comment Out Unfinished Portions
## Code Before: from django.conf import settings from django.conf.urls import patterns, include, url # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Blog URLs url(r'^blog/(?P<path>.*)', include('wiblog.urls', namespace='wiblog')), # Handle all of the "static" pages url(r'^$', 'fragdev.views.home', name='home'), url(r'^about$', 'fragdev.views.about', name='about'), url(r'^contact$', 'fragdev.views.contact', name='contact'), url(r'^contacted$', 'fragdev.views.contacted', name='contacted'), url(r'^projects$', 'fragdev.views.projects', name='projects'), url(r'^resume$', 'fragdev.views.resume', name='resume'), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), # Static files: Should be handled by the web server! #url(r'^css/(?P<path>.*)$', 'django.views.static.serve', # {'document_root': '/data/documents/web/fragdev4000/css'}), #url(r'^fonts/(?P<path>.*)$', 'django.views.static.serve', # {'document_root': '/data/documents/web/fragdev4000/fonts'}), ) ## Instruction: Remove Debugging Paths, Comment Out Unfinished Portions ## Code After: from django.conf import settings from django.conf.urls import patterns, include, url # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Blog URLs #url(r'^blog/(?P<path>.*)', include('wiblog.urls', namespace='wiblog')), # Handle all of the "static" pages url(r'^$', 'fragdev.views.home', name='home'), url(r'^about$', 'fragdev.views.about', name='about'), url(r'^contact$', 'fragdev.views.contact', name='contact'), url(r'^contacted$', 'fragdev.views.contacted', name='contacted'), url(r'^projects$', 'fragdev.views.projects', name='projects'), url(r'^resume$', 'fragdev.views.resume', name='resume'), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), )
from django.conf import settings from django.conf.urls import patterns, include, url # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Blog URLs - url(r'^blog/(?P<path>.*)', include('wiblog.urls', namespace='wiblog')), + #url(r'^blog/(?P<path>.*)', include('wiblog.urls', namespace='wiblog')), ? + # Handle all of the "static" pages url(r'^$', 'fragdev.views.home', name='home'), url(r'^about$', 'fragdev.views.about', name='about'), url(r'^contact$', 'fragdev.views.contact', name='contact'), url(r'^contacted$', 'fragdev.views.contacted', name='contacted'), url(r'^projects$', 'fragdev.views.projects', name='projects'), url(r'^resume$', 'fragdev.views.resume', name='resume'), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), - - # Static files: Should be handled by the web server! - #url(r'^css/(?P<path>.*)$', 'django.views.static.serve', - # {'document_root': '/data/documents/web/fragdev4000/css'}), - #url(r'^fonts/(?P<path>.*)$', 'django.views.static.serve', - # {'document_root': '/data/documents/web/fragdev4000/fonts'}), )
a9a55f87abc0a26d41e3fa3091f2f2efad7a2543
autoencoder/encode.py
autoencoder/encode.py
import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir)
import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) assert model.input_shape[1] == size, \ 'Input size of data and pretrained model must be same' encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir)
Check input dimensions of pretrained model and input file
Check input dimensions of pretrained model and input file
Python
apache-2.0
theislab/dca,theislab/dca,theislab/dca
import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) + assert model.input_shape[1] == size, \ + 'Input size of data and pretrained model must be same' + + encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir)
Check input dimensions of pretrained model and input file
## Code Before: import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir) ## Instruction: Check input dimensions of pretrained model and input file ## Code After: import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) assert model.input_shape[1] == size, \ 'Input size of data and pretrained model must be same' encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir)
import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) + assert model.input_shape[1] == size, \ + 'Input size of data and pretrained model must be same' + + encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir)
5293a24bc2ab6a3aa1c9fc98d857c79548509356
explanatory_style.py
explanatory_style.py
import gate class EventAttributionUnit: """event, attribution must be gate.Annotation objects """ def __init__(self, event, attribution): self._event = event self._attribution = attribution for annotation in [self._event, self._attribution]: # if type(anntotation) != "Annotation": if not isinstance(annotation, gate.Annotation): raise TypeError("Not a gate.Annotation object!") def get_event(self): return self._event def get_attribution(self): return self._attribution def get_event_attribution_units(events, attributions): return [ EventAttributionUnit( event, attribution ) for attribution in attributions for event in events if event._id == attribution._caused_event_id ] # def CoPos(): # def CoNeg():
import gate class EventAttributionUnit: def __init__(self, event, attribution): """event, attribution must be gate.Annotation objects """ self._event = event self._attribution = attribution for annotation in [self._event, self._attribution]: if not isinstance(annotation, gate.Annotation): raise TypeError("Not a gate.Annotation object!") def get_event(self): return self._event def get_attribution(self): return self._attribution def get_event_attribution_units(events, attributions): """Given an iterable of events and one of attributions, return a list of EventAttributionUnit objects """ return [ EventAttributionUnit( attribution.get_caused_event(events), attribution ) for attribution in attributions ] # def CoPos(): # def CoNeg(): if __name__ == "__main__": test_file = "/home/nick/hilt/pes/conversations/16/4-MG-2014-06-02_PES_3_consensus.xml" annotation_file = gate.AnnotationFile(test_file) text_with_nodes = annotation_file._text_with_nodes raw_events = [] raw_attributions = [] annotations = annotation_file.iter_annotations() for annotation in annotations: if "event" in annotation._type.lower(): raw_events.append(annotation) elif "attribution" in annotation._type.lower(): raw_attributions.append(annotation) events = gate.concatenate_annotations(raw_events) attributions = gate.concatenate_annotations(raw_attributions) event_attribution_units = get_event_attribution_units( events, attributions ) for x in event_attribution_units: print( x.get_event().get_concatenated_text(text_with_nodes, " "), x.get_attribution().get_concatenated_text(text_with_nodes, " ") )
Add __main__ program for running on files
Add __main__ program for running on files
Python
mit
nickwbarber/HILT-annotations
import gate class EventAttributionUnit: - """event, attribution must be gate.Annotation objects - """ def __init__(self, event, attribution): + """event, attribution must be gate.Annotation objects + """ self._event = event self._attribution = attribution for annotation in [self._event, self._attribution]: - # if type(anntotation) != "Annotation": if not isinstance(annotation, gate.Annotation): raise TypeError("Not a gate.Annotation object!") def get_event(self): return self._event def get_attribution(self): return self._attribution - def get_event_attribution_units(events, attributions): + def get_event_attribution_units(events, + attributions): + """Given an iterable of events and one of attributions, return a list of + EventAttributionUnit objects + """ return [ - EventAttributionUnit( event, attribution ) + EventAttributionUnit( + attribution.get_caused_event(events), + attribution + ) for attribution in attributions - for event in events - if event._id == attribution._caused_event_id ] # def CoPos(): # def CoNeg(): + if __name__ == "__main__": + + test_file = "/home/nick/hilt/pes/conversations/16/4-MG-2014-06-02_PES_3_consensus.xml" + + + annotation_file = gate.AnnotationFile(test_file) + text_with_nodes = annotation_file._text_with_nodes + + raw_events = [] + raw_attributions = [] + annotations = annotation_file.iter_annotations() + for annotation in annotations: + if "event" in annotation._type.lower(): + raw_events.append(annotation) + elif "attribution" in annotation._type.lower(): + raw_attributions.append(annotation) + + events = gate.concatenate_annotations(raw_events) + attributions = gate.concatenate_annotations(raw_attributions) + + event_attribution_units = get_event_attribution_units( + events, + attributions + ) + + for x in event_attribution_units: + print( + x.get_event().get_concatenated_text(text_with_nodes, " "), + x.get_attribution().get_concatenated_text(text_with_nodes, " ") + ) +
Add __main__ program for running on files
## Code Before: import gate class EventAttributionUnit: """event, attribution must be gate.Annotation objects """ def __init__(self, event, attribution): self._event = event self._attribution = attribution for annotation in [self._event, self._attribution]: # if type(anntotation) != "Annotation": if not isinstance(annotation, gate.Annotation): raise TypeError("Not a gate.Annotation object!") def get_event(self): return self._event def get_attribution(self): return self._attribution def get_event_attribution_units(events, attributions): return [ EventAttributionUnit( event, attribution ) for attribution in attributions for event in events if event._id == attribution._caused_event_id ] # def CoPos(): # def CoNeg(): ## Instruction: Add __main__ program for running on files ## Code After: import gate class EventAttributionUnit: def __init__(self, event, attribution): """event, attribution must be gate.Annotation objects """ self._event = event self._attribution = attribution for annotation in [self._event, self._attribution]: if not isinstance(annotation, gate.Annotation): raise TypeError("Not a gate.Annotation object!") def get_event(self): return self._event def get_attribution(self): return self._attribution def get_event_attribution_units(events, attributions): """Given an iterable of events and one of attributions, return a list of EventAttributionUnit objects """ return [ EventAttributionUnit( attribution.get_caused_event(events), attribution ) for attribution in attributions ] # def CoPos(): # def CoNeg(): if __name__ == "__main__": test_file = "/home/nick/hilt/pes/conversations/16/4-MG-2014-06-02_PES_3_consensus.xml" annotation_file = gate.AnnotationFile(test_file) text_with_nodes = annotation_file._text_with_nodes raw_events = [] raw_attributions = [] annotations = annotation_file.iter_annotations() for annotation in annotations: if "event" in annotation._type.lower(): raw_events.append(annotation) elif "attribution" in annotation._type.lower(): raw_attributions.append(annotation) events = gate.concatenate_annotations(raw_events) attributions = gate.concatenate_annotations(raw_attributions) event_attribution_units = get_event_attribution_units( events, attributions ) for x in event_attribution_units: print( x.get_event().get_concatenated_text(text_with_nodes, " "), x.get_attribution().get_concatenated_text(text_with_nodes, " ") )
import gate class EventAttributionUnit: - """event, attribution must be gate.Annotation objects - """ def __init__(self, event, attribution): + """event, attribution must be gate.Annotation objects + """ self._event = event self._attribution = attribution for annotation in [self._event, self._attribution]: - # if type(anntotation) != "Annotation": if not isinstance(annotation, gate.Annotation): raise TypeError("Not a gate.Annotation object!") def get_event(self): return self._event def get_attribution(self): return self._attribution - def get_event_attribution_units(events, attributions): ? --------------- + def get_event_attribution_units(events, + attributions): + """Given an iterable of events and one of attributions, return a list of + EventAttributionUnit objects + """ return [ - EventAttributionUnit( event, attribution ) + EventAttributionUnit( + attribution.get_caused_event(events), + attribution + ) for attribution in attributions - for event in events - if event._id == attribution._caused_event_id ] # def CoPos(): # def CoNeg(): + + if __name__ == "__main__": + + test_file = "/home/nick/hilt/pes/conversations/16/4-MG-2014-06-02_PES_3_consensus.xml" + + + annotation_file = gate.AnnotationFile(test_file) + text_with_nodes = annotation_file._text_with_nodes + + raw_events = [] + raw_attributions = [] + annotations = annotation_file.iter_annotations() + for annotation in annotations: + if "event" in annotation._type.lower(): + raw_events.append(annotation) + elif "attribution" in annotation._type.lower(): + raw_attributions.append(annotation) + + events = gate.concatenate_annotations(raw_events) + attributions = gate.concatenate_annotations(raw_attributions) + + event_attribution_units = get_event_attribution_units( + events, + attributions + ) + + for x in event_attribution_units: + print( + x.get_event().get_concatenated_text(text_with_nodes, " "), + x.get_attribution().get_concatenated_text(text_with_nodes, " ") + )
73feddb22ad3a2543ad4f8047061d909c64fd75d
server/system/CommonMySQL.py
server/system/CommonMySQL.py
from system.DBMysql import connect from system.ConfigLoader import getCfg def loginUser(login, password): ''' Try to login a user regarding login/password ''' userContent = None table = getCfg('MYSQL', 'table') tableId = getCfg('MYSQL', 'idField') tableLogin = getCfg('MYSQL', 'loginField') tablePassword = getCfg('MYSQL', 'passwordField') try: # Starting con = connect() cur = con.cursor() cur.execute( 'SELECT ' + tableId + ' FROM ' + table + ' WHERE ' + tableLogin + '=%s AND ' + tablePassword + '=%s', ( login, password ) ) userContent = cur.fetchone() if userContent is not None: userContent = userContent[0] except db.Error as e: logging.error('loginUser: Error from MySQL => %s' % e) finally: if con: con.close() return userContent
from system.DBMysql import connect from system.ConfigLoader import getCfg import logging def loginUser(login, password): ''' Try to login a user regarding login/password ''' userContent = None table = getCfg('MYSQL', 'table') tableId = getCfg('MYSQL', 'idField') tableLogin = getCfg('MYSQL', 'loginField') tablePassword = getCfg('MYSQL', 'passwordField') con = None try: # Starting con = connect() cur = con.cursor() cur.execute( 'SELECT ' + tableId + ' FROM ' + table + ' WHERE ' + tableLogin + '=%s AND ' + tablePassword + '=%s', ( login, password ) ) userContent = cur.fetchone() if userContent is not None: userContent = userContent[0] except Exception as e: logging.error('loginUser: Error from MySQL => %s' % e) finally: if con: con.close() return userContent
Remove some bug on mysql elements.
Remove some bug on mysql elements.
Python
mit
Deisss/webservice-notification,Deisss/webservice-notification
from system.DBMysql import connect from system.ConfigLoader import getCfg + import logging def loginUser(login, password): ''' Try to login a user regarding login/password ''' userContent = None table = getCfg('MYSQL', 'table') tableId = getCfg('MYSQL', 'idField') tableLogin = getCfg('MYSQL', 'loginField') tablePassword = getCfg('MYSQL', 'passwordField') + con = None try: # Starting con = connect() cur = con.cursor() cur.execute( 'SELECT ' + tableId + ' FROM ' + table + ' WHERE ' + tableLogin + '=%s AND ' + tablePassword + '=%s', ( login, password ) ) userContent = cur.fetchone() if userContent is not None: userContent = userContent[0] - except db.Error as e: + except Exception as e: logging.error('loginUser: Error from MySQL => %s' % e) finally: if con: con.close() return userContent
Remove some bug on mysql elements.
## Code Before: from system.DBMysql import connect from system.ConfigLoader import getCfg def loginUser(login, password): ''' Try to login a user regarding login/password ''' userContent = None table = getCfg('MYSQL', 'table') tableId = getCfg('MYSQL', 'idField') tableLogin = getCfg('MYSQL', 'loginField') tablePassword = getCfg('MYSQL', 'passwordField') try: # Starting con = connect() cur = con.cursor() cur.execute( 'SELECT ' + tableId + ' FROM ' + table + ' WHERE ' + tableLogin + '=%s AND ' + tablePassword + '=%s', ( login, password ) ) userContent = cur.fetchone() if userContent is not None: userContent = userContent[0] except db.Error as e: logging.error('loginUser: Error from MySQL => %s' % e) finally: if con: con.close() return userContent ## Instruction: Remove some bug on mysql elements. ## Code After: from system.DBMysql import connect from system.ConfigLoader import getCfg import logging def loginUser(login, password): ''' Try to login a user regarding login/password ''' userContent = None table = getCfg('MYSQL', 'table') tableId = getCfg('MYSQL', 'idField') tableLogin = getCfg('MYSQL', 'loginField') tablePassword = getCfg('MYSQL', 'passwordField') con = None try: # Starting con = connect() cur = con.cursor() cur.execute( 'SELECT ' + tableId + ' FROM ' + table + ' WHERE ' + tableLogin + '=%s AND ' + tablePassword + '=%s', ( login, password ) ) userContent = cur.fetchone() if userContent is not None: userContent = userContent[0] except Exception as e: logging.error('loginUser: Error from MySQL => %s' % e) finally: if con: con.close() return userContent
from system.DBMysql import connect from system.ConfigLoader import getCfg + import logging def loginUser(login, password): ''' Try to login a user regarding login/password ''' userContent = None table = getCfg('MYSQL', 'table') tableId = getCfg('MYSQL', 'idField') tableLogin = getCfg('MYSQL', 'loginField') tablePassword = getCfg('MYSQL', 'passwordField') + con = None try: # Starting con = connect() cur = con.cursor() cur.execute( 'SELECT ' + tableId + ' FROM ' + table + ' WHERE ' + tableLogin + '=%s AND ' + tablePassword + '=%s', ( login, password ) ) userContent = cur.fetchone() if userContent is not None: userContent = userContent[0] - except db.Error as e: + except Exception as e: logging.error('loginUser: Error from MySQL => %s' % e) finally: if con: con.close() return userContent
a9cc67b9defeffc76091bd204f230a431db80196
traftrack/image.py
traftrack/image.py
import PIL.Image import PIL.ImageMath import urllib.request from io import BytesIO def load_img_url(url): req = urllib.request.urlopen(url) data = BytesIO(req.read()) return PIL.Image.open(data) def load_img_file(fname): return PIL.Image.open(fname) def compute_histo_RYG(img, mask): img = img.convert(mode='RGB') mask = mask.convert(mode='1') black = PIL.Image.new('RGB', mask.size, color=(0, 0, 0, 0)) masked = PIL.Image.composite(img, black, mask) palette = PIL.Image.new('P', (1, 1)) palette.putpalette( [0, 0, 0, # black 255, 0, 0, # red 255, 255, 0, # yellow 0, 255, 0]) # green quantized = masked.quantize(palette=palette) colors = quantized.getcolors() return colors[1][0], colors[2][0], colors[3][0]
import PIL.Image import PIL.ImageMath import urllib.request from io import BytesIO def load_img_url(url): req = urllib.request.urlopen(url) data = BytesIO(req.read()) return PIL.Image.open(data) def load_img_file(fname): return PIL.Image.open(fname) def compute_histo_RYG(img, mask): img = img.convert(mode='RGB') mask = mask.convert(mode='1') black = PIL.Image.new('RGB', mask.size, color=(0, 0, 0, 0)) masked = PIL.Image.composite(img, black, mask) palette = PIL.Image.new('P', (1, 1)) palette.putpalette( [0, 0, 0, # black 255, 0, 0, # red 255, 255, 0, # yellow 0, 255, 0]) # green quantized = masked.quantize(palette=palette) colors = quantized.getcolors() r = next((c[0] for c in colors if c[1] == 1), 0) y = next((c[0] for c in colors if c[1] == 2), 0) g = next((c[0] for c in colors if c[1] == 3), 0) return r, y, g
Fix issue with non-existing color in compute_histo_RYG
Fix issue with non-existing color in compute_histo_RYG
Python
mit
asavonic/traftrack
import PIL.Image import PIL.ImageMath import urllib.request from io import BytesIO def load_img_url(url): req = urllib.request.urlopen(url) data = BytesIO(req.read()) return PIL.Image.open(data) def load_img_file(fname): return PIL.Image.open(fname) def compute_histo_RYG(img, mask): img = img.convert(mode='RGB') mask = mask.convert(mode='1') black = PIL.Image.new('RGB', mask.size, color=(0, 0, 0, 0)) masked = PIL.Image.composite(img, black, mask) palette = PIL.Image.new('P', (1, 1)) palette.putpalette( [0, 0, 0, # black 255, 0, 0, # red 255, 255, 0, # yellow 0, 255, 0]) # green quantized = masked.quantize(palette=palette) colors = quantized.getcolors() - return colors[1][0], colors[2][0], colors[3][0] + r = next((c[0] for c in colors if c[1] == 1), 0) + y = next((c[0] for c in colors if c[1] == 2), 0) + g = next((c[0] for c in colors if c[1] == 3), 0) + return r, y, g +
Fix issue with non-existing color in compute_histo_RYG
## Code Before: import PIL.Image import PIL.ImageMath import urllib.request from io import BytesIO def load_img_url(url): req = urllib.request.urlopen(url) data = BytesIO(req.read()) return PIL.Image.open(data) def load_img_file(fname): return PIL.Image.open(fname) def compute_histo_RYG(img, mask): img = img.convert(mode='RGB') mask = mask.convert(mode='1') black = PIL.Image.new('RGB', mask.size, color=(0, 0, 0, 0)) masked = PIL.Image.composite(img, black, mask) palette = PIL.Image.new('P', (1, 1)) palette.putpalette( [0, 0, 0, # black 255, 0, 0, # red 255, 255, 0, # yellow 0, 255, 0]) # green quantized = masked.quantize(palette=palette) colors = quantized.getcolors() return colors[1][0], colors[2][0], colors[3][0] ## Instruction: Fix issue with non-existing color in compute_histo_RYG ## Code After: import PIL.Image import PIL.ImageMath import urllib.request from io import BytesIO def load_img_url(url): req = urllib.request.urlopen(url) data = BytesIO(req.read()) return PIL.Image.open(data) def load_img_file(fname): return PIL.Image.open(fname) def compute_histo_RYG(img, mask): img = img.convert(mode='RGB') mask = mask.convert(mode='1') black = PIL.Image.new('RGB', mask.size, color=(0, 0, 0, 0)) masked = PIL.Image.composite(img, black, mask) palette = PIL.Image.new('P', (1, 1)) palette.putpalette( [0, 0, 0, # black 255, 0, 0, # red 255, 255, 0, # yellow 0, 255, 0]) # green quantized = masked.quantize(palette=palette) colors = quantized.getcolors() r = next((c[0] for c in colors if c[1] == 1), 0) y = next((c[0] for c in colors if c[1] == 2), 0) g = next((c[0] for c in colors if c[1] == 3), 0) return r, y, g
import PIL.Image import PIL.ImageMath import urllib.request from io import BytesIO def load_img_url(url): req = urllib.request.urlopen(url) data = BytesIO(req.read()) return PIL.Image.open(data) def load_img_file(fname): return PIL.Image.open(fname) def compute_histo_RYG(img, mask): img = img.convert(mode='RGB') mask = mask.convert(mode='1') black = PIL.Image.new('RGB', mask.size, color=(0, 0, 0, 0)) masked = PIL.Image.composite(img, black, mask) palette = PIL.Image.new('P', (1, 1)) palette.putpalette( [0, 0, 0, # black 255, 0, 0, # red 255, 255, 0, # yellow 0, 255, 0]) # green quantized = masked.quantize(palette=palette) colors = quantized.getcolors() - return colors[1][0], colors[2][0], colors[3][0] + r = next((c[0] for c in colors if c[1] == 1), 0) + y = next((c[0] for c in colors if c[1] == 2), 0) + g = next((c[0] for c in colors if c[1] == 3), 0) + + return r, y, g
9e6621ac7e4f07b9272ddb144aebbb75826d2405
src/flock.py
src/flock.py
import cherrypy from jinja2 import Environment, FileSystemLoader j2_env = Environment(loader = FileSystemLoader('templates')) class Root(object): @cherrypy.expose def index(self): template = j2_env.get_template('base.html') return template.render() cherrypy.config.update('app.config') cherrypy.tree.mount(Root(), '/', 'app.config') cherrypy.engine.start() cherrypy.engine.block()
from flask import Flask, redirect, render_template, request, session, url_for from flask_oauthlib.client import OAuth, OAuthException app = Flask(__name__) app.config['FACEBOKK_APP_ID'] = '' app.config['FACEBOOK_APP_SECRET'] = '' app.config['GOOGLE_APP_ID'] = '' app.config['GOOGLE_APP_SECRET'] = '' app.secret_key = 'development' oauth = OAuth(app) facebook = oauth.remote_app( 'facebook', consumer_key = app.config.get('FACEBOOK_APP_ID'), consumer_secret = app.config.get('FACEBOOK_APP_SECRET'), request_token_params = {'scope' : 'email'}, base_url = 'https://graph.facebook.com', request_token_url = None, access_token_url = '/oauth/access_token', authorize_url = 'https://www.facebook.com/dialog/oauth' ) google = oauth.remote_app( 'google', consumer_key = app.config.get('GOOGLE_APP_ID'), consumer_secret = app.config.get('GOOGLE_APP_SECRET'), request_token_params = {'scope' : 'https://www.googleapis.com/auth/userinfo.email'}, base_url = 'https://www.googleapis.com/oauth2/v1/', request_token_url = None, access_token_url = 'https://accounts.google.com/o/auth2/token', authorize_url = 'https://accounts.google.com/o/oauth2/auth' ) @app.route('/') def hello_world(): return render_template('base.html') @facebook.togengetter def get_facebook_oauth_token(): return session.get('facebook_token') @google.tokengetter def get_google_oauth_token(): return session.get('google_token') if __name__ == '__main__': app.run()
Switch to Flask, add oauth
Switch to Flask, add oauth
Python
agpl-3.0
DavidJFelix/hatchit,DavidJFelix/hatchit,DavidJFelix/hatchit
- import cherrypy - from jinja2 import Environment, FileSystemLoader + from flask import Flask, redirect, render_template, request, session, url_for + from flask_oauthlib.client import OAuth, OAuthException - j2_env = Environment(loader = FileSystemLoader('templates')) + app = Flask(__name__) + app.config['FACEBOKK_APP_ID'] = '' + app.config['FACEBOOK_APP_SECRET'] = '' + app.config['GOOGLE_APP_ID'] = '' + app.config['GOOGLE_APP_SECRET'] = '' + app.secret_key = 'development' + oauth = OAuth(app) - class Root(object): - @cherrypy.expose - def index(self): - template = j2_env.get_template('base.html') - return template.render() + facebook = oauth.remote_app( + 'facebook', + consumer_key = app.config.get('FACEBOOK_APP_ID'), + consumer_secret = app.config.get('FACEBOOK_APP_SECRET'), + request_token_params = {'scope' : 'email'}, + base_url = 'https://graph.facebook.com', + request_token_url = None, + access_token_url = '/oauth/access_token', + authorize_url = 'https://www.facebook.com/dialog/oauth' + ) - cherrypy.config.update('app.config') - cherrypy.tree.mount(Root(), '/', 'app.config') - cherrypy.engine.start() - cherrypy.engine.block() + google = oauth.remote_app( + 'google', + consumer_key = app.config.get('GOOGLE_APP_ID'), + consumer_secret = app.config.get('GOOGLE_APP_SECRET'), + request_token_params = {'scope' : 'https://www.googleapis.com/auth/userinfo.email'}, + base_url = 'https://www.googleapis.com/oauth2/v1/', + request_token_url = None, + access_token_url = 'https://accounts.google.com/o/auth2/token', + authorize_url = 'https://accounts.google.com/o/oauth2/auth' + ) + @app.route('/') + def hello_world(): + return render_template('base.html') + + @facebook.togengetter + def get_facebook_oauth_token(): + return session.get('facebook_token') + + @google.tokengetter + def get_google_oauth_token(): + return session.get('google_token') + + if __name__ == '__main__': + app.run() + +
Switch to Flask, add oauth
## Code Before: import cherrypy from jinja2 import Environment, FileSystemLoader j2_env = Environment(loader = FileSystemLoader('templates')) class Root(object): @cherrypy.expose def index(self): template = j2_env.get_template('base.html') return template.render() cherrypy.config.update('app.config') cherrypy.tree.mount(Root(), '/', 'app.config') cherrypy.engine.start() cherrypy.engine.block() ## Instruction: Switch to Flask, add oauth ## Code After: from flask import Flask, redirect, render_template, request, session, url_for from flask_oauthlib.client import OAuth, OAuthException app = Flask(__name__) app.config['FACEBOKK_APP_ID'] = '' app.config['FACEBOOK_APP_SECRET'] = '' app.config['GOOGLE_APP_ID'] = '' app.config['GOOGLE_APP_SECRET'] = '' app.secret_key = 'development' oauth = OAuth(app) facebook = oauth.remote_app( 'facebook', consumer_key = app.config.get('FACEBOOK_APP_ID'), consumer_secret = app.config.get('FACEBOOK_APP_SECRET'), request_token_params = {'scope' : 'email'}, base_url = 'https://graph.facebook.com', request_token_url = None, access_token_url = '/oauth/access_token', authorize_url = 'https://www.facebook.com/dialog/oauth' ) google = oauth.remote_app( 'google', consumer_key = app.config.get('GOOGLE_APP_ID'), consumer_secret = app.config.get('GOOGLE_APP_SECRET'), request_token_params = {'scope' : 'https://www.googleapis.com/auth/userinfo.email'}, base_url = 'https://www.googleapis.com/oauth2/v1/', request_token_url = None, access_token_url = 'https://accounts.google.com/o/auth2/token', authorize_url = 'https://accounts.google.com/o/oauth2/auth' ) @app.route('/') def hello_world(): return render_template('base.html') @facebook.togengetter def get_facebook_oauth_token(): return session.get('facebook_token') @google.tokengetter def get_google_oauth_token(): return session.get('google_token') if __name__ == '__main__': app.run()
- import cherrypy - from jinja2 import Environment, FileSystemLoader + from flask import Flask, redirect, render_template, request, session, url_for + from flask_oauthlib.client import OAuth, OAuthException - j2_env = Environment(loader = FileSystemLoader('templates')) + app = Flask(__name__) + app.config['FACEBOKK_APP_ID'] = '' + app.config['FACEBOOK_APP_SECRET'] = '' + app.config['GOOGLE_APP_ID'] = '' + app.config['GOOGLE_APP_SECRET'] = '' + app.secret_key = 'development' + oauth = OAuth(app) - class Root(object): - @cherrypy.expose - def index(self): - template = j2_env.get_template('base.html') - return template.render() + facebook = oauth.remote_app( + 'facebook', + consumer_key = app.config.get('FACEBOOK_APP_ID'), + consumer_secret = app.config.get('FACEBOOK_APP_SECRET'), + request_token_params = {'scope' : 'email'}, + base_url = 'https://graph.facebook.com', + request_token_url = None, + access_token_url = '/oauth/access_token', + authorize_url = 'https://www.facebook.com/dialog/oauth' + ) - cherrypy.config.update('app.config') - cherrypy.tree.mount(Root(), '/', 'app.config') - cherrypy.engine.start() - cherrypy.engine.block() + google = oauth.remote_app( + 'google', + consumer_key = app.config.get('GOOGLE_APP_ID'), + consumer_secret = app.config.get('GOOGLE_APP_SECRET'), + request_token_params = {'scope' : 'https://www.googleapis.com/auth/userinfo.email'}, + base_url = 'https://www.googleapis.com/oauth2/v1/', + request_token_url = None, + access_token_url = 'https://accounts.google.com/o/auth2/token', + authorize_url = 'https://accounts.google.com/o/oauth2/auth' + ) + + @app.route('/') + def hello_world(): + return render_template('base.html') + + @facebook.togengetter + def get_facebook_oauth_token(): + return session.get('facebook_token') + + @google.tokengetter + def get_google_oauth_token(): + return session.get('google_token') + + if __name__ == '__main__': + app.run() +
7e87a91f48ef9d5a031033991ce68c2596193f01
tests/test_pipe.py
tests/test_pipe.py
from slug import Pipe def test_goesthrough(): p = Pipe() p.side_in.write(b"Hello") p.side_in.close() data = p.side_out.read() assert data == b'Hello' def test_eof(): p = Pipe() p.side_in.write(b"spam") data = p.side_out.read() assert data == b'spam' p.side_in.close() data = p.side_out.read() assert data == b''
import pytest from slug import Pipe def test_goesthrough(): p = Pipe() p.side_in.write(b"Hello") p.side_in.close() data = p.side_out.read() assert data == b'Hello' def test_eof(): p = Pipe() p.side_in.write(b"spam") data = p.side_out.read() assert data == b'spam' p.side_in.close() data = p.side_out.read() assert data == b'' def test_iter(): p = Pipe() p.side_in.write(b"Hello") p.side_in.close() riter = iter(p.side_out) data = next(riter) assert data == b'Hello' with pytest.raises(StopIteration): next(riter) def test_iter_eof(): p = Pipe() riter = iter(p.side_out) p.side_in.write(b"Hello\n") data = next(riter) assert data == b'Hello\n' p.side_in.close() with pytest.raises(StopIteration): next(riter)
Add iteration tests on pipes
Add iteration tests on pipes
Python
bsd-3-clause
xonsh/slug
+ import pytest from slug import Pipe def test_goesthrough(): p = Pipe() p.side_in.write(b"Hello") p.side_in.close() data = p.side_out.read() assert data == b'Hello' def test_eof(): p = Pipe() p.side_in.write(b"spam") data = p.side_out.read() assert data == b'spam' p.side_in.close() data = p.side_out.read() assert data == b'' + + def test_iter(): + p = Pipe() + p.side_in.write(b"Hello") + p.side_in.close() + + riter = iter(p.side_out) + + data = next(riter) + assert data == b'Hello' + + with pytest.raises(StopIteration): + next(riter) + + + def test_iter_eof(): + p = Pipe() + riter = iter(p.side_out) + + p.side_in.write(b"Hello\n") + + data = next(riter) + assert data == b'Hello\n' + + p.side_in.close() + + with pytest.raises(StopIteration): + next(riter) +
Add iteration tests on pipes
## Code Before: from slug import Pipe def test_goesthrough(): p = Pipe() p.side_in.write(b"Hello") p.side_in.close() data = p.side_out.read() assert data == b'Hello' def test_eof(): p = Pipe() p.side_in.write(b"spam") data = p.side_out.read() assert data == b'spam' p.side_in.close() data = p.side_out.read() assert data == b'' ## Instruction: Add iteration tests on pipes ## Code After: import pytest from slug import Pipe def test_goesthrough(): p = Pipe() p.side_in.write(b"Hello") p.side_in.close() data = p.side_out.read() assert data == b'Hello' def test_eof(): p = Pipe() p.side_in.write(b"spam") data = p.side_out.read() assert data == b'spam' p.side_in.close() data = p.side_out.read() assert data == b'' def test_iter(): p = Pipe() p.side_in.write(b"Hello") p.side_in.close() riter = iter(p.side_out) data = next(riter) assert data == b'Hello' with pytest.raises(StopIteration): next(riter) def test_iter_eof(): p = Pipe() riter = iter(p.side_out) p.side_in.write(b"Hello\n") data = next(riter) assert data == b'Hello\n' p.side_in.close() with pytest.raises(StopIteration): next(riter)
+ import pytest from slug import Pipe def test_goesthrough(): p = Pipe() p.side_in.write(b"Hello") p.side_in.close() data = p.side_out.read() assert data == b'Hello' def test_eof(): p = Pipe() p.side_in.write(b"spam") data = p.side_out.read() assert data == b'spam' p.side_in.close() data = p.side_out.read() assert data == b'' + + + def test_iter(): + p = Pipe() + p.side_in.write(b"Hello") + p.side_in.close() + + riter = iter(p.side_out) + + data = next(riter) + assert data == b'Hello' + + with pytest.raises(StopIteration): + next(riter) + + + def test_iter_eof(): + p = Pipe() + riter = iter(p.side_out) + + p.side_in.write(b"Hello\n") + + data = next(riter) + assert data == b'Hello\n' + + p.side_in.close() + + with pytest.raises(StopIteration): + next(riter)
6d13b3b041e3e6cd6089814ad3276a905aa10bc3
troposphere/fms.py
troposphere/fms.py
from . import AWSProperty, AWSObject, Tags from .validators import json_checker, boolean class IEMap(AWSProperty): props = { 'ACCOUNT': ([basestring], False), } class Policy(AWSObject): resource_type = "AWS::FMS::Policy" props = { 'DeleteAllPolicyResources': (boolean, False), 'ExcludeMap': (IEMap, False), 'ExcludeResourceTags': (boolean, True), 'IncludeMap': (IEMap, False), 'PolicyName': (basestring, True), 'RemediationEnabled': (boolean, True), 'ResourceTags': (Tags, False), 'ResourceType': (basestring, True), 'ResourceTypeList': ([basestring], True), 'SecurityServicePolicyData': (json_checker, True), 'Tags': (Tags, False), } class NotificationChannel(AWSObject): resource_type = "AWS::FMS::NotificationChannel" props = { 'SnsRoleName': (basestring, True), 'SnsTopicArn': (basestring, True), }
from . import AWSProperty, AWSObject, Tags from .validators import json_checker, boolean class IEMap(AWSProperty): props = { 'ACCOUNT': ([basestring], False), 'ORGUNIT': ([basestring], False), } class Policy(AWSObject): resource_type = "AWS::FMS::Policy" props = { 'DeleteAllPolicyResources': (boolean, False), 'ExcludeMap': (IEMap, False), 'ExcludeResourceTags': (boolean, True), 'IncludeMap': (IEMap, False), 'PolicyName': (basestring, True), 'RemediationEnabled': (boolean, True), 'ResourceTags': (Tags, False), 'ResourceType': (basestring, True), 'ResourceTypeList': ([basestring], True), 'SecurityServicePolicyData': (json_checker, True), 'Tags': (Tags, False), } class NotificationChannel(AWSObject): resource_type = "AWS::FMS::NotificationChannel" props = { 'SnsRoleName': (basestring, True), 'SnsTopicArn': (basestring, True), }
Update AWS::FMS::Policy per 2020-06-18 changes
Update AWS::FMS::Policy per 2020-06-18 changes
Python
bsd-2-clause
cloudtools/troposphere,cloudtools/troposphere
from . import AWSProperty, AWSObject, Tags from .validators import json_checker, boolean class IEMap(AWSProperty): props = { 'ACCOUNT': ([basestring], False), + 'ORGUNIT': ([basestring], False), } class Policy(AWSObject): resource_type = "AWS::FMS::Policy" props = { 'DeleteAllPolicyResources': (boolean, False), 'ExcludeMap': (IEMap, False), 'ExcludeResourceTags': (boolean, True), 'IncludeMap': (IEMap, False), 'PolicyName': (basestring, True), 'RemediationEnabled': (boolean, True), 'ResourceTags': (Tags, False), 'ResourceType': (basestring, True), 'ResourceTypeList': ([basestring], True), 'SecurityServicePolicyData': (json_checker, True), 'Tags': (Tags, False), } class NotificationChannel(AWSObject): resource_type = "AWS::FMS::NotificationChannel" props = { 'SnsRoleName': (basestring, True), 'SnsTopicArn': (basestring, True), }
Update AWS::FMS::Policy per 2020-06-18 changes
## Code Before: from . import AWSProperty, AWSObject, Tags from .validators import json_checker, boolean class IEMap(AWSProperty): props = { 'ACCOUNT': ([basestring], False), } class Policy(AWSObject): resource_type = "AWS::FMS::Policy" props = { 'DeleteAllPolicyResources': (boolean, False), 'ExcludeMap': (IEMap, False), 'ExcludeResourceTags': (boolean, True), 'IncludeMap': (IEMap, False), 'PolicyName': (basestring, True), 'RemediationEnabled': (boolean, True), 'ResourceTags': (Tags, False), 'ResourceType': (basestring, True), 'ResourceTypeList': ([basestring], True), 'SecurityServicePolicyData': (json_checker, True), 'Tags': (Tags, False), } class NotificationChannel(AWSObject): resource_type = "AWS::FMS::NotificationChannel" props = { 'SnsRoleName': (basestring, True), 'SnsTopicArn': (basestring, True), } ## Instruction: Update AWS::FMS::Policy per 2020-06-18 changes ## Code After: from . import AWSProperty, AWSObject, Tags from .validators import json_checker, boolean class IEMap(AWSProperty): props = { 'ACCOUNT': ([basestring], False), 'ORGUNIT': ([basestring], False), } class Policy(AWSObject): resource_type = "AWS::FMS::Policy" props = { 'DeleteAllPolicyResources': (boolean, False), 'ExcludeMap': (IEMap, False), 'ExcludeResourceTags': (boolean, True), 'IncludeMap': (IEMap, False), 'PolicyName': (basestring, True), 'RemediationEnabled': (boolean, True), 'ResourceTags': (Tags, False), 'ResourceType': (basestring, True), 'ResourceTypeList': ([basestring], True), 'SecurityServicePolicyData': (json_checker, True), 'Tags': (Tags, False), } class NotificationChannel(AWSObject): resource_type = "AWS::FMS::NotificationChannel" props = { 'SnsRoleName': (basestring, True), 'SnsTopicArn': (basestring, True), }
from . import AWSProperty, AWSObject, Tags from .validators import json_checker, boolean class IEMap(AWSProperty): props = { 'ACCOUNT': ([basestring], False), + 'ORGUNIT': ([basestring], False), } class Policy(AWSObject): resource_type = "AWS::FMS::Policy" props = { 'DeleteAllPolicyResources': (boolean, False), 'ExcludeMap': (IEMap, False), 'ExcludeResourceTags': (boolean, True), 'IncludeMap': (IEMap, False), 'PolicyName': (basestring, True), 'RemediationEnabled': (boolean, True), 'ResourceTags': (Tags, False), 'ResourceType': (basestring, True), 'ResourceTypeList': ([basestring], True), 'SecurityServicePolicyData': (json_checker, True), 'Tags': (Tags, False), } class NotificationChannel(AWSObject): resource_type = "AWS::FMS::NotificationChannel" props = { 'SnsRoleName': (basestring, True), 'SnsTopicArn': (basestring, True), }
70a97ab38d2b30652c41d1e058ef4447fdd54863
test_settings.py
test_settings.py
import os SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn&#3#tz26vuq4" DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}} INSTALLED_APPS = [ "django.contrib.sites", "django.contrib.sessions", "django.contrib.contenttypes", "tz_detect", ] MIDDLEWARE_CLASSES = [ "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "tz_detect.middleware.TimezoneMiddleware", ] MIDDLEWARE = MIDDLEWARE_CLASSES SITE_ID = 1
import os SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn&#3#tz26vuq4" DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}} INSTALLED_APPS = [ "django.contrib.sites", "django.contrib.sessions", "django.contrib.contenttypes", "tz_detect", ] MIDDLEWARE_CLASSES = [ "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "tz_detect.middleware.TimezoneMiddleware", ] MIDDLEWARE = MIDDLEWARE_CLASSES SITE_ID = 1 USE_TZ = True
Fix Django 5.0 deprecation warning.
Fix Django 5.0 deprecation warning.
Python
mit
adamcharnock/django-tz-detect,adamcharnock/django-tz-detect,adamcharnock/django-tz-detect
import os SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn&#3#tz26vuq4" DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}} INSTALLED_APPS = [ "django.contrib.sites", "django.contrib.sessions", "django.contrib.contenttypes", "tz_detect", ] MIDDLEWARE_CLASSES = [ "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "tz_detect.middleware.TimezoneMiddleware", ] MIDDLEWARE = MIDDLEWARE_CLASSES SITE_ID = 1 + USE_TZ = True +
Fix Django 5.0 deprecation warning.
## Code Before: import os SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn&#3#tz26vuq4" DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}} INSTALLED_APPS = [ "django.contrib.sites", "django.contrib.sessions", "django.contrib.contenttypes", "tz_detect", ] MIDDLEWARE_CLASSES = [ "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "tz_detect.middleware.TimezoneMiddleware", ] MIDDLEWARE = MIDDLEWARE_CLASSES SITE_ID = 1 ## Instruction: Fix Django 5.0 deprecation warning. ## Code After: import os SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn&#3#tz26vuq4" DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}} INSTALLED_APPS = [ "django.contrib.sites", "django.contrib.sessions", "django.contrib.contenttypes", "tz_detect", ] MIDDLEWARE_CLASSES = [ "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "tz_detect.middleware.TimezoneMiddleware", ] MIDDLEWARE = MIDDLEWARE_CLASSES SITE_ID = 1 USE_TZ = True
import os SECRET_KEY = "h_ekayhzss(0lzsacd5cat7d=pu#51sh3w&uqn&#3#tz26vuq4" DATABASES = {"default": {"ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:"}} INSTALLED_APPS = [ "django.contrib.sites", "django.contrib.sessions", "django.contrib.contenttypes", "tz_detect", ] MIDDLEWARE_CLASSES = [ "django.contrib.sessions.middleware.SessionMiddleware", "django.middleware.common.CommonMiddleware", "tz_detect.middleware.TimezoneMiddleware", ] MIDDLEWARE = MIDDLEWARE_CLASSES SITE_ID = 1 + + USE_TZ = True
8c1cc6895f5f8772d2b09a9efab7395b0a6b39ba
wake/filters.py
wake/filters.py
import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): return Markup(markdown.markdown(text))
import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): md = markdown.Markdown(extensions=['meta']) return Markup(md.convert(text))
Update Markdown filter to recognize metadata.
Update Markdown filter to recognize metadata.
Python
bsd-3-clause
chromakode/wake
import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): - return Markup(markdown.markdown(text)) + md = markdown.Markdown(extensions=['meta']) + return Markup(md.convert(text))
Update Markdown filter to recognize metadata.
## Code Before: import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): return Markup(markdown.markdown(text)) ## Instruction: Update Markdown filter to recognize metadata. ## Code After: import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): md = markdown.Markdown(extensions=['meta']) return Markup(md.convert(text))
import markdown from datetime import datetime from twitter_text import TwitterText from flask import Markup def relative_time(timestamp): delta = (datetime.now() - datetime.fromtimestamp(timestamp)) delta_s = delta.days * 86400 + delta.seconds if delta_s < 60: return "less than a minute ago" elif delta_s < 120: return "about a minute ago" elif delta_s < (60 * 60): return str(delta_s / 60) + " minutes ago" elif delta_s < (120 * 60): return "about an hour ago" elif delta_s < (24 * 60 * 60): return "about " + str(delta_s / 3600) + " hours ago" elif delta_s < (48 * 60 * 60): return "1 day ago" else: return str(delta_s / 86400) + " days ago" def markup_tweet(text): return Markup(TwitterText(text).autolink.auto_link()) def markup_markdown(text): - return Markup(markdown.markdown(text)) + md = markdown.Markdown(extensions=['meta']) + return Markup(md.convert(text))
88a6708061ccdc7d3ac4d031c48de44039937b54
frontends/etiquette_flask/etiquette_flask_entrypoint.py
frontends/etiquette_flask/etiquette_flask_entrypoint.py
''' This file is the WSGI entrypoint for remote / production use. If you are using Gunicorn, for example: gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-" ''' import werkzeug.contrib.fixers import backend backend.site.wsgi_app = werkzeug.contrib.fixers.ProxyFix(backend.site.wsgi_app) site = backend.site
''' This file is the WSGI entrypoint for remote / production use. If you are using Gunicorn, for example: gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-" ''' import werkzeug.middleware.proxy_fix import backend backend.site.wsgi_app = werkzeug.middleware.proxy_fix.ProxyFix(backend.site.wsgi_app) site = backend.site
Replace werkzeug.contrib with werkzeug.middleware proxyfix.
Replace werkzeug.contrib with werkzeug.middleware proxyfix. werkzeug.contrib has been deprecated, this is the new location of the proxyfix.
Python
bsd-3-clause
voussoir/etiquette,voussoir/etiquette,voussoir/etiquette
''' This file is the WSGI entrypoint for remote / production use. If you are using Gunicorn, for example: gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-" ''' - import werkzeug.contrib.fixers + import werkzeug.middleware.proxy_fix import backend - backend.site.wsgi_app = werkzeug.contrib.fixers.ProxyFix(backend.site.wsgi_app) + backend.site.wsgi_app = werkzeug.middleware.proxy_fix.ProxyFix(backend.site.wsgi_app) site = backend.site
Replace werkzeug.contrib with werkzeug.middleware proxyfix.
## Code Before: ''' This file is the WSGI entrypoint for remote / production use. If you are using Gunicorn, for example: gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-" ''' import werkzeug.contrib.fixers import backend backend.site.wsgi_app = werkzeug.contrib.fixers.ProxyFix(backend.site.wsgi_app) site = backend.site ## Instruction: Replace werkzeug.contrib with werkzeug.middleware proxyfix. ## Code After: ''' This file is the WSGI entrypoint for remote / production use. If you are using Gunicorn, for example: gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-" ''' import werkzeug.middleware.proxy_fix import backend backend.site.wsgi_app = werkzeug.middleware.proxy_fix.ProxyFix(backend.site.wsgi_app) site = backend.site
''' This file is the WSGI entrypoint for remote / production use. If you are using Gunicorn, for example: gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-" ''' - import werkzeug.contrib.fixers + import werkzeug.middleware.proxy_fix import backend - backend.site.wsgi_app = werkzeug.contrib.fixers.ProxyFix(backend.site.wsgi_app) ? ^ ^^^^^^ --- + backend.site.wsgi_app = werkzeug.middleware.proxy_fix.ProxyFix(backend.site.wsgi_app) ? ^^^^^^^^^^^^^ ^^^ site = backend.site
9ae5ea3876fae6ef0bc092d87c71d9ea86040cf7
InvenTree/company/api.py
InvenTree/company/api.py
from __future__ import unicode_literals from django_filters.rest_framework import DjangoFilterBackend from rest_framework import filters from rest_framework import generics, permissions from django.conf.urls import url from .models import Company from .serializers import CompanySerializer class CompanyList(generics.ListCreateAPIView): serializer_class = CompanySerializer queryset = Company.objects.all() permission_classes = [ permissions.IsAuthenticatedOrReadOnly, ] filter_backends = [ DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter, ] filter_fields = [ 'name', 'is_customer', 'is_supplier', ] search_fields = [ 'name', 'description', ] ordering_fields = [ 'name', ] ordering = 'name' company_api_urls = [ url(r'^.*$', CompanyList.as_view(), name='api-company-list'), ]
from __future__ import unicode_literals from django_filters.rest_framework import DjangoFilterBackend from rest_framework import filters from rest_framework import generics, permissions from django.conf.urls import url from .models import Company from .serializers import CompanySerializer class CompanyList(generics.ListCreateAPIView): serializer_class = CompanySerializer queryset = Company.objects.all() permission_classes = [ permissions.IsAuthenticatedOrReadOnly, ] filter_backends = [ DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter, ] filter_fields = [ 'name', 'is_customer', 'is_supplier', ] search_fields = [ 'name', 'description', ] ordering_fields = [ 'name', ] ordering = 'name' class CompanyDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Company.objects.all() serializer_class = CompanySerializer permission_classes = [ permissions.IsAuthenticatedOrReadOnly, ] company_api_urls = [ url(r'^(?P<pk>\d+)/?', CompanyDetail.as_view(), name='api-company-detail'), url(r'^.*$', CompanyList.as_view(), name='api-company-list'), ]
Add RUD endpoint for Company
Add RUD endpoint for Company
Python
mit
SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree
from __future__ import unicode_literals from django_filters.rest_framework import DjangoFilterBackend from rest_framework import filters from rest_framework import generics, permissions from django.conf.urls import url from .models import Company from .serializers import CompanySerializer class CompanyList(generics.ListCreateAPIView): serializer_class = CompanySerializer queryset = Company.objects.all() permission_classes = [ permissions.IsAuthenticatedOrReadOnly, ] filter_backends = [ DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter, ] filter_fields = [ 'name', 'is_customer', 'is_supplier', ] search_fields = [ 'name', 'description', ] ordering_fields = [ 'name', ] ordering = 'name' + class CompanyDetail(generics.RetrieveUpdateDestroyAPIView): + + queryset = Company.objects.all() + serializer_class = CompanySerializer + + permission_classes = [ + permissions.IsAuthenticatedOrReadOnly, + ] + + company_api_urls = [ + + url(r'^(?P<pk>\d+)/?', CompanyDetail.as_view(), name='api-company-detail'), url(r'^.*$', CompanyList.as_view(), name='api-company-list'), ]
Add RUD endpoint for Company
## Code Before: from __future__ import unicode_literals from django_filters.rest_framework import DjangoFilterBackend from rest_framework import filters from rest_framework import generics, permissions from django.conf.urls import url from .models import Company from .serializers import CompanySerializer class CompanyList(generics.ListCreateAPIView): serializer_class = CompanySerializer queryset = Company.objects.all() permission_classes = [ permissions.IsAuthenticatedOrReadOnly, ] filter_backends = [ DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter, ] filter_fields = [ 'name', 'is_customer', 'is_supplier', ] search_fields = [ 'name', 'description', ] ordering_fields = [ 'name', ] ordering = 'name' company_api_urls = [ url(r'^.*$', CompanyList.as_view(), name='api-company-list'), ] ## Instruction: Add RUD endpoint for Company ## Code After: from __future__ import unicode_literals from django_filters.rest_framework import DjangoFilterBackend from rest_framework import filters from rest_framework import generics, permissions from django.conf.urls import url from .models import Company from .serializers import CompanySerializer class CompanyList(generics.ListCreateAPIView): serializer_class = CompanySerializer queryset = Company.objects.all() permission_classes = [ permissions.IsAuthenticatedOrReadOnly, ] filter_backends = [ DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter, ] filter_fields = [ 'name', 'is_customer', 'is_supplier', ] search_fields = [ 'name', 'description', ] ordering_fields = [ 'name', ] ordering = 'name' class CompanyDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Company.objects.all() serializer_class = CompanySerializer permission_classes = [ permissions.IsAuthenticatedOrReadOnly, ] company_api_urls = [ url(r'^(?P<pk>\d+)/?', CompanyDetail.as_view(), name='api-company-detail'), url(r'^.*$', CompanyList.as_view(), name='api-company-list'), ]
from __future__ import unicode_literals from django_filters.rest_framework import DjangoFilterBackend from rest_framework import filters from rest_framework import generics, permissions from django.conf.urls import url from .models import Company from .serializers import CompanySerializer class CompanyList(generics.ListCreateAPIView): serializer_class = CompanySerializer queryset = Company.objects.all() permission_classes = [ permissions.IsAuthenticatedOrReadOnly, ] filter_backends = [ DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter, ] filter_fields = [ 'name', 'is_customer', 'is_supplier', ] search_fields = [ 'name', 'description', ] ordering_fields = [ 'name', ] ordering = 'name' + class CompanyDetail(generics.RetrieveUpdateDestroyAPIView): + + queryset = Company.objects.all() + serializer_class = CompanySerializer + + permission_classes = [ + permissions.IsAuthenticatedOrReadOnly, + ] + + company_api_urls = [ + + url(r'^(?P<pk>\d+)/?', CompanyDetail.as_view(), name='api-company-detail'), url(r'^.*$', CompanyList.as_view(), name='api-company-list'), ]
c2973d4f2ae7da0f75f573cebd8eb1780d5b33e1
account_withholding_automatic/models/account_payment_group.py
account_withholding_automatic/models/account_payment_group.py
from openerp import models, api, fields class AccountPaymentGroup(models.Model): _inherit = "account.payment.group" withholdings_amount = fields.Monetary( compute='_compute_withholdings_amount' ) @api.multi @api.depends( 'payment_ids.tax_withholding_id', 'payment_ids.amount', ) def _compute_withholdings_amount(self): for rec in self: rec.withholdings_amount = sum( rec.payment_ids.filtered( lambda x: x.tax_withholding_id).mapped('amount')) @api.multi def compute_withholdings(self): for rec in self: if rec.partner_type != 'supplier': continue self.env['account.tax'].search([ ('type_tax_use', '=', rec.partner_type), ('company_id', '=', rec.company_id.id), ]).create_payment_withholdings(rec) @api.multi def confirm(self): res = super(AccountPaymentGroup, self).confirm() for rec in self: if rec.company_id.automatic_withholdings: rec.compute_withholdings() return res
from openerp import models, api, fields class AccountPaymentGroup(models.Model): _inherit = "account.payment.group" withholdings_amount = fields.Monetary( compute='_compute_withholdings_amount' ) @api.multi @api.depends( 'payment_ids.tax_withholding_id', 'payment_ids.amount', ) def _compute_withholdings_amount(self): for rec in self: rec.withholdings_amount = sum( rec.payment_ids.filtered( lambda x: x.tax_withholding_id).mapped('amount')) @api.multi def compute_withholdings(self): for rec in self: if rec.partner_type != 'supplier': continue # limpiamos el type por si se paga desde factura ya que el en ese # caso viene in_invoice o out_invoice y en search de tax filtrar # por impuestos de venta y compra (y no los nuestros de pagos # y cobros) self.env['account.tax'].with_context(type=None).search([ ('type_tax_use', '=', rec.partner_type), ('company_id', '=', rec.company_id.id), ]).create_payment_withholdings(rec) @api.multi def confirm(self): res = super(AccountPaymentGroup, self).confirm() for rec in self: if rec.company_id.automatic_withholdings: rec.compute_withholdings() return res
FIX withholdings computation when payment come from invoices
FIX withholdings computation when payment come from invoices
Python
agpl-3.0
ingadhoc/account-payment
from openerp import models, api, fields class AccountPaymentGroup(models.Model): _inherit = "account.payment.group" withholdings_amount = fields.Monetary( compute='_compute_withholdings_amount' ) @api.multi @api.depends( 'payment_ids.tax_withholding_id', 'payment_ids.amount', ) def _compute_withholdings_amount(self): for rec in self: rec.withholdings_amount = sum( rec.payment_ids.filtered( lambda x: x.tax_withholding_id).mapped('amount')) @api.multi def compute_withholdings(self): for rec in self: if rec.partner_type != 'supplier': continue + # limpiamos el type por si se paga desde factura ya que el en ese + # caso viene in_invoice o out_invoice y en search de tax filtrar + # por impuestos de venta y compra (y no los nuestros de pagos + # y cobros) - self.env['account.tax'].search([ + self.env['account.tax'].with_context(type=None).search([ ('type_tax_use', '=', rec.partner_type), ('company_id', '=', rec.company_id.id), ]).create_payment_withholdings(rec) @api.multi def confirm(self): res = super(AccountPaymentGroup, self).confirm() for rec in self: if rec.company_id.automatic_withholdings: rec.compute_withholdings() return res
FIX withholdings computation when payment come from invoices
## Code Before: from openerp import models, api, fields class AccountPaymentGroup(models.Model): _inherit = "account.payment.group" withholdings_amount = fields.Monetary( compute='_compute_withholdings_amount' ) @api.multi @api.depends( 'payment_ids.tax_withholding_id', 'payment_ids.amount', ) def _compute_withholdings_amount(self): for rec in self: rec.withholdings_amount = sum( rec.payment_ids.filtered( lambda x: x.tax_withholding_id).mapped('amount')) @api.multi def compute_withholdings(self): for rec in self: if rec.partner_type != 'supplier': continue self.env['account.tax'].search([ ('type_tax_use', '=', rec.partner_type), ('company_id', '=', rec.company_id.id), ]).create_payment_withholdings(rec) @api.multi def confirm(self): res = super(AccountPaymentGroup, self).confirm() for rec in self: if rec.company_id.automatic_withholdings: rec.compute_withholdings() return res ## Instruction: FIX withholdings computation when payment come from invoices ## Code After: from openerp import models, api, fields class AccountPaymentGroup(models.Model): _inherit = "account.payment.group" withholdings_amount = fields.Monetary( compute='_compute_withholdings_amount' ) @api.multi @api.depends( 'payment_ids.tax_withholding_id', 'payment_ids.amount', ) def _compute_withholdings_amount(self): for rec in self: rec.withholdings_amount = sum( rec.payment_ids.filtered( lambda x: x.tax_withholding_id).mapped('amount')) @api.multi def compute_withholdings(self): for rec in self: if rec.partner_type != 'supplier': continue # limpiamos el type por si se paga desde factura ya que el en ese # caso viene in_invoice o out_invoice y en search de tax filtrar # por impuestos de venta y compra (y no los nuestros de pagos # y cobros) self.env['account.tax'].with_context(type=None).search([ ('type_tax_use', '=', rec.partner_type), ('company_id', '=', rec.company_id.id), ]).create_payment_withholdings(rec) @api.multi def confirm(self): res = super(AccountPaymentGroup, self).confirm() for rec in self: if rec.company_id.automatic_withholdings: rec.compute_withholdings() return res
from openerp import models, api, fields class AccountPaymentGroup(models.Model): _inherit = "account.payment.group" withholdings_amount = fields.Monetary( compute='_compute_withholdings_amount' ) @api.multi @api.depends( 'payment_ids.tax_withholding_id', 'payment_ids.amount', ) def _compute_withholdings_amount(self): for rec in self: rec.withholdings_amount = sum( rec.payment_ids.filtered( lambda x: x.tax_withholding_id).mapped('amount')) @api.multi def compute_withholdings(self): for rec in self: if rec.partner_type != 'supplier': continue + # limpiamos el type por si se paga desde factura ya que el en ese + # caso viene in_invoice o out_invoice y en search de tax filtrar + # por impuestos de venta y compra (y no los nuestros de pagos + # y cobros) - self.env['account.tax'].search([ + self.env['account.tax'].with_context(type=None).search([ ? ++++++++++++++++++++++++ ('type_tax_use', '=', rec.partner_type), ('company_id', '=', rec.company_id.id), ]).create_payment_withholdings(rec) @api.multi def confirm(self): res = super(AccountPaymentGroup, self).confirm() for rec in self: if rec.company_id.automatic_withholdings: rec.compute_withholdings() return res
6dd1881fc2631602d7e34aede208abf42ed688aa
renderMenu.py
renderMenu.py
import json, os, requests from awsauth import S3Auth from datetime import datetime from pytz import timezone from flask import Flask, render_template, url_for from models import app, db, FoodMenu, FoodServices MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN') @app.route('/') def renderMenu(): nowWaterloo = datetime.now(timezone('America/Toronto')) foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result menu = json.loads(foodMenu)['response']['data'] serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result locations = json.loads(serviceInfo)['response']['data'] return render_template('index.html', menu=menu, locations=locations, nowWaterloo=nowWaterloo, mixpanelToken=MIXPANEL_TOKEN) if __name__ == "__main__": # Bind to PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
import json, os, requests from awsauth import S3Auth from datetime import datetime from pytz import timezone from flask import Flask, render_template, url_for, jsonify from models import app, db, FoodMenu, FoodServices MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN') @app.route('/') def renderMenu(): nowWaterloo = datetime.now(timezone('America/Toronto')) foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result menu = json.loads(foodMenu)['response']['data'] serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result locations = json.loads(serviceInfo)['response']['data'] return render_template('index.html', menu=menu, locations=locations, nowWaterloo=nowWaterloo, mixpanelToken=MIXPANEL_TOKEN) @app.route('/foodmenu') def foodmenu(): foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result menu = json.loads(foodMenu)['response']['data'] return jsonify(menu) @app.route('/foodservices') def foodservices(): serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result locations = json.loads(serviceInfo)['response']['data'] return jsonify(locations) if __name__ == "__main__": # Bind to PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
Add API endpoints to serve data in JSON format.
Add API endpoints to serve data in JSON format.
Python
mit
alykhank/FoodMenu,alykhank/FoodMenu,alykhank/FoodMenu
import json, os, requests from awsauth import S3Auth from datetime import datetime from pytz import timezone - from flask import Flask, render_template, url_for + from flask import Flask, render_template, url_for, jsonify from models import app, db, FoodMenu, FoodServices MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN') @app.route('/') def renderMenu(): nowWaterloo = datetime.now(timezone('America/Toronto')) foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result menu = json.loads(foodMenu)['response']['data'] serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result locations = json.loads(serviceInfo)['response']['data'] return render_template('index.html', menu=menu, locations=locations, nowWaterloo=nowWaterloo, mixpanelToken=MIXPANEL_TOKEN) + @app.route('/foodmenu') + def foodmenu(): + foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result + menu = json.loads(foodMenu)['response']['data'] + return jsonify(menu) + + @app.route('/foodservices') + def foodservices(): + serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result + locations = json.loads(serviceInfo)['response']['data'] + return jsonify(locations) + if __name__ == "__main__": # Bind to PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
Add API endpoints to serve data in JSON format.
## Code Before: import json, os, requests from awsauth import S3Auth from datetime import datetime from pytz import timezone from flask import Flask, render_template, url_for from models import app, db, FoodMenu, FoodServices MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN') @app.route('/') def renderMenu(): nowWaterloo = datetime.now(timezone('America/Toronto')) foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result menu = json.loads(foodMenu)['response']['data'] serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result locations = json.loads(serviceInfo)['response']['data'] return render_template('index.html', menu=menu, locations=locations, nowWaterloo=nowWaterloo, mixpanelToken=MIXPANEL_TOKEN) if __name__ == "__main__": # Bind to PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port) ## Instruction: Add API endpoints to serve data in JSON format. ## Code After: import json, os, requests from awsauth import S3Auth from datetime import datetime from pytz import timezone from flask import Flask, render_template, url_for, jsonify from models import app, db, FoodMenu, FoodServices MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN') @app.route('/') def renderMenu(): nowWaterloo = datetime.now(timezone('America/Toronto')) foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result menu = json.loads(foodMenu)['response']['data'] serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result locations = json.loads(serviceInfo)['response']['data'] return render_template('index.html', menu=menu, locations=locations, nowWaterloo=nowWaterloo, mixpanelToken=MIXPANEL_TOKEN) @app.route('/foodmenu') def foodmenu(): foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result menu = json.loads(foodMenu)['response']['data'] return jsonify(menu) @app.route('/foodservices') def foodservices(): serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result locations = json.loads(serviceInfo)['response']['data'] return jsonify(locations) if __name__ == "__main__": # Bind to PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
import json, os, requests from awsauth import S3Auth from datetime import datetime from pytz import timezone - from flask import Flask, render_template, url_for + from flask import Flask, render_template, url_for, jsonify ? +++++++++ from models import app, db, FoodMenu, FoodServices MIXPANEL_TOKEN = os.environ.get('MIXPANEL_TOKEN') @app.route('/') def renderMenu(): nowWaterloo = datetime.now(timezone('America/Toronto')) foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result menu = json.loads(foodMenu)['response']['data'] serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result locations = json.loads(serviceInfo)['response']['data'] return render_template('index.html', menu=menu, locations=locations, nowWaterloo=nowWaterloo, mixpanelToken=MIXPANEL_TOKEN) + @app.route('/foodmenu') + def foodmenu(): + foodMenu = FoodMenu.query.order_by(FoodMenu.id.desc()).first().result + menu = json.loads(foodMenu)['response']['data'] + return jsonify(menu) + + @app.route('/foodservices') + def foodservices(): + serviceInfo = FoodServices.query.order_by(FoodServices.id.desc()).first().result + locations = json.loads(serviceInfo)['response']['data'] + return jsonify(locations) + if __name__ == "__main__": # Bind to PORT if defined, otherwise default to 5000. port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
94c0c60172c1114d6f0938de88af67ae7203ae95
pi_setup/system.py
pi_setup/system.py
import subprocess def main(): subprocess.call(["apt-get", "update"]) subprocess.call(["apt-get", "-y", "upgrade"]) subprocess.call(["apt-get", "-y", "install", "python-dev"]) subprocess.call(["apt-get", "-y", "install", "python-pip"]) subprocess.call(["apt-get", "-y", "install", "avahi-daemon"]) subprocess.call(["apt-get", "-y", "install", "rpi-update"]) subprocess.call(["pip", "install", "virtualenv"]) if __name__ == '__main__': main()
import subprocess def main(): subprocess.call(["apt-get", "update"]) subprocess.call(["apt-get", "-y", "upgrade"]) subprocess.call(["apt-get", "-y", "install", "python-dev"]) subprocess.call(["apt-get", "-y", "install", "python-pip"]) subprocess.call(["apt-get", "-y", "install", "ipython-notebook"]) subprocess.call(["apt-get", "-y", "install", "avahi-daemon"]) subprocess.call(["apt-get", "-y", "install", "rpi-update"]) subprocess.call(["pip", "install", "virtualenv"]) if __name__ == '__main__': main()
Add python to install script
Add python to install script
Python
mit
projectweekend/Pi-Setup,projectweekend/Pi-Setup
import subprocess def main(): subprocess.call(["apt-get", "update"]) subprocess.call(["apt-get", "-y", "upgrade"]) subprocess.call(["apt-get", "-y", "install", "python-dev"]) subprocess.call(["apt-get", "-y", "install", "python-pip"]) + subprocess.call(["apt-get", "-y", "install", "ipython-notebook"]) subprocess.call(["apt-get", "-y", "install", "avahi-daemon"]) subprocess.call(["apt-get", "-y", "install", "rpi-update"]) subprocess.call(["pip", "install", "virtualenv"]) if __name__ == '__main__': main()
Add python to install script
## Code Before: import subprocess def main(): subprocess.call(["apt-get", "update"]) subprocess.call(["apt-get", "-y", "upgrade"]) subprocess.call(["apt-get", "-y", "install", "python-dev"]) subprocess.call(["apt-get", "-y", "install", "python-pip"]) subprocess.call(["apt-get", "-y", "install", "avahi-daemon"]) subprocess.call(["apt-get", "-y", "install", "rpi-update"]) subprocess.call(["pip", "install", "virtualenv"]) if __name__ == '__main__': main() ## Instruction: Add python to install script ## Code After: import subprocess def main(): subprocess.call(["apt-get", "update"]) subprocess.call(["apt-get", "-y", "upgrade"]) subprocess.call(["apt-get", "-y", "install", "python-dev"]) subprocess.call(["apt-get", "-y", "install", "python-pip"]) subprocess.call(["apt-get", "-y", "install", "ipython-notebook"]) subprocess.call(["apt-get", "-y", "install", "avahi-daemon"]) subprocess.call(["apt-get", "-y", "install", "rpi-update"]) subprocess.call(["pip", "install", "virtualenv"]) if __name__ == '__main__': main()
import subprocess def main(): subprocess.call(["apt-get", "update"]) subprocess.call(["apt-get", "-y", "upgrade"]) subprocess.call(["apt-get", "-y", "install", "python-dev"]) subprocess.call(["apt-get", "-y", "install", "python-pip"]) + subprocess.call(["apt-get", "-y", "install", "ipython-notebook"]) subprocess.call(["apt-get", "-y", "install", "avahi-daemon"]) subprocess.call(["apt-get", "-y", "install", "rpi-update"]) subprocess.call(["pip", "install", "virtualenv"]) if __name__ == '__main__': main()
98dd8df628079357b26a663d24adcbc6ac4d3794
indra/__init__.py
indra/__init__.py
from __future__ import print_function, unicode_literals import logging __version__ = '1.3.0' logging.basicConfig(format='%(levelname)s: indra/%(name)s - %(message)s', level=logging.INFO) logging.getLogger('requests').setLevel(logging.ERROR) logging.getLogger('urllib3').setLevel(logging.ERROR) logging.getLogger('rdflib').setLevel(logging.ERROR) logging.getLogger('boto3').setLevel(logging.CRITICAL) logging.getLogger('botocore').setLevel(logging.CRITICAL)
from __future__ import print_function, unicode_literals import logging __version__ = '1.3.0' __all__ = ['bel', 'biopax', 'trips', 'reach', 'index_cards', 'sparser', 'databases', 'literature', 'preassembler', 'assemblers', 'mechlinker', 'belief', 'tools', 'util'] ''' ############# # For now these imports are disabled because # (1) Every import would load everything in INDRA which is time consuming and # (2) Optional dependencies in some modules will try to be loaded even if # they are not intended to be used ################## # Core import statements # Input processors from indra import bel from indra import biopax from indra import trips from indra import reach from indra import index_cards # Clients from indra import databases from indra import literature # Assemblers from indra import preassembler from indra import assemblers from indra import mechlinker from indra import belief # Tools and utils from indra import tools from indra import util ''' logging.basicConfig(format='%(levelname)s: indra/%(name)s - %(message)s', level=logging.INFO) logging.getLogger('requests').setLevel(logging.ERROR) logging.getLogger('urllib3').setLevel(logging.ERROR) logging.getLogger('rdflib').setLevel(logging.ERROR) logging.getLogger('boto3').setLevel(logging.CRITICAL) logging.getLogger('botocore').setLevel(logging.CRITICAL)
Add commented out top-level imports
Add commented out top-level imports
Python
bsd-2-clause
pvtodorov/indra,sorgerlab/belpy,jmuhlich/indra,johnbachman/belpy,jmuhlich/indra,sorgerlab/indra,pvtodorov/indra,bgyori/indra,johnbachman/indra,jmuhlich/indra,sorgerlab/belpy,sorgerlab/indra,pvtodorov/indra,bgyori/indra,bgyori/indra,sorgerlab/indra,johnbachman/indra,pvtodorov/indra,johnbachman/belpy,johnbachman/belpy,sorgerlab/belpy,johnbachman/indra
from __future__ import print_function, unicode_literals import logging __version__ = '1.3.0' + + __all__ = ['bel', 'biopax', 'trips', 'reach', 'index_cards', 'sparser', + 'databases', 'literature', + 'preassembler', 'assemblers', 'mechlinker', 'belief', + 'tools', 'util'] + ''' + ############# + # For now these imports are disabled because + # (1) Every import would load everything in INDRA which is time consuming and + # (2) Optional dependencies in some modules will try to be loaded even if + # they are not intended to be used + ################## + # Core + import statements + # Input processors + from indra import bel + from indra import biopax + from indra import trips + from indra import reach + from indra import index_cards + # Clients + from indra import databases + from indra import literature + # Assemblers + from indra import preassembler + from indra import assemblers + from indra import mechlinker + from indra import belief + # Tools and utils + from indra import tools + from indra import util + ''' logging.basicConfig(format='%(levelname)s: indra/%(name)s - %(message)s', level=logging.INFO) logging.getLogger('requests').setLevel(logging.ERROR) logging.getLogger('urllib3').setLevel(logging.ERROR) logging.getLogger('rdflib').setLevel(logging.ERROR) logging.getLogger('boto3').setLevel(logging.CRITICAL) logging.getLogger('botocore').setLevel(logging.CRITICAL)
Add commented out top-level imports
## Code Before: from __future__ import print_function, unicode_literals import logging __version__ = '1.3.0' logging.basicConfig(format='%(levelname)s: indra/%(name)s - %(message)s', level=logging.INFO) logging.getLogger('requests').setLevel(logging.ERROR) logging.getLogger('urllib3').setLevel(logging.ERROR) logging.getLogger('rdflib').setLevel(logging.ERROR) logging.getLogger('boto3').setLevel(logging.CRITICAL) logging.getLogger('botocore').setLevel(logging.CRITICAL) ## Instruction: Add commented out top-level imports ## Code After: from __future__ import print_function, unicode_literals import logging __version__ = '1.3.0' __all__ = ['bel', 'biopax', 'trips', 'reach', 'index_cards', 'sparser', 'databases', 'literature', 'preassembler', 'assemblers', 'mechlinker', 'belief', 'tools', 'util'] ''' ############# # For now these imports are disabled because # (1) Every import would load everything in INDRA which is time consuming and # (2) Optional dependencies in some modules will try to be loaded even if # they are not intended to be used ################## # Core import statements # Input processors from indra import bel from indra import biopax from indra import trips from indra import reach from indra import index_cards # Clients from indra import databases from indra import literature # Assemblers from indra import preassembler from indra import assemblers from indra import mechlinker from indra import belief # Tools and utils from indra import tools from indra import util ''' logging.basicConfig(format='%(levelname)s: indra/%(name)s - %(message)s', level=logging.INFO) logging.getLogger('requests').setLevel(logging.ERROR) logging.getLogger('urllib3').setLevel(logging.ERROR) logging.getLogger('rdflib').setLevel(logging.ERROR) logging.getLogger('boto3').setLevel(logging.CRITICAL) logging.getLogger('botocore').setLevel(logging.CRITICAL)
from __future__ import print_function, unicode_literals import logging __version__ = '1.3.0' + + __all__ = ['bel', 'biopax', 'trips', 'reach', 'index_cards', 'sparser', + 'databases', 'literature', + 'preassembler', 'assemblers', 'mechlinker', 'belief', + 'tools', 'util'] + ''' + ############# + # For now these imports are disabled because + # (1) Every import would load everything in INDRA which is time consuming and + # (2) Optional dependencies in some modules will try to be loaded even if + # they are not intended to be used + ################## + # Core + import statements + # Input processors + from indra import bel + from indra import biopax + from indra import trips + from indra import reach + from indra import index_cards + # Clients + from indra import databases + from indra import literature + # Assemblers + from indra import preassembler + from indra import assemblers + from indra import mechlinker + from indra import belief + # Tools and utils + from indra import tools + from indra import util + ''' logging.basicConfig(format='%(levelname)s: indra/%(name)s - %(message)s', level=logging.INFO) logging.getLogger('requests').setLevel(logging.ERROR) logging.getLogger('urllib3').setLevel(logging.ERROR) logging.getLogger('rdflib').setLevel(logging.ERROR) logging.getLogger('boto3').setLevel(logging.CRITICAL) logging.getLogger('botocore').setLevel(logging.CRITICAL)
43ab1500719665b44e3b4eca4def9002711c2ee8
githublist/parser.py
githublist/parser.py
import requests import collections API_URL = 'https://api.github.com/users/{}/repos' def main(user): return parse(request(user)) def request(user): return requests.get(url=API_URL.format(user)) def parse(response): repos = response.json() data = [] if repos is None: return None for repo in repos: if 'name' in repo and not repo['fork']: data.append( collections.OrderedDict([('name', repo['name']), ('desc', repo['description']), ('lang', repo['language']), ('stars', repo['stargazers_count'])])) return data if __name__ == '__main__': import pprint u = 'kshvmdn' pprint.pprint(main(u))
import requests import collections API_URL = 'https://api.github.com/users/{}/repos?per_page=100' def main(user): return parse(request(user)) def request(user): return requests.get(url=API_URL.format(user)) def parse(response): repos = response.json() data = [] if repos is None: return None for repo in repos: if 'name' in repo and not repo['fork']: data.append( collections.OrderedDict([('name', repo['name']), ('desc', repo['description']), ('lang', repo['language']), ('stars', repo['stargazers_count'])])) return data if __name__ == '__main__': import pprint u = 'kshvmdn' pprint.pprint(main(u))
Update api url for recent 100 instead of default 30
Update api url for recent 100 instead of default 30
Python
mit
kshvmdn/github-list,kshvmdn/github-list,kshvmdn/github-list
import requests import collections - API_URL = 'https://api.github.com/users/{}/repos' + API_URL = 'https://api.github.com/users/{}/repos?per_page=100' def main(user): return parse(request(user)) def request(user): return requests.get(url=API_URL.format(user)) def parse(response): repos = response.json() data = [] if repos is None: return None for repo in repos: if 'name' in repo and not repo['fork']: data.append( collections.OrderedDict([('name', repo['name']), ('desc', repo['description']), ('lang', repo['language']), ('stars', repo['stargazers_count'])])) return data if __name__ == '__main__': import pprint u = 'kshvmdn' pprint.pprint(main(u))
Update api url for recent 100 instead of default 30
## Code Before: import requests import collections API_URL = 'https://api.github.com/users/{}/repos' def main(user): return parse(request(user)) def request(user): return requests.get(url=API_URL.format(user)) def parse(response): repos = response.json() data = [] if repos is None: return None for repo in repos: if 'name' in repo and not repo['fork']: data.append( collections.OrderedDict([('name', repo['name']), ('desc', repo['description']), ('lang', repo['language']), ('stars', repo['stargazers_count'])])) return data if __name__ == '__main__': import pprint u = 'kshvmdn' pprint.pprint(main(u)) ## Instruction: Update api url for recent 100 instead of default 30 ## Code After: import requests import collections API_URL = 'https://api.github.com/users/{}/repos?per_page=100' def main(user): return parse(request(user)) def request(user): return requests.get(url=API_URL.format(user)) def parse(response): repos = response.json() data = [] if repos is None: return None for repo in repos: if 'name' in repo and not repo['fork']: data.append( collections.OrderedDict([('name', repo['name']), ('desc', repo['description']), ('lang', repo['language']), ('stars', repo['stargazers_count'])])) return data if __name__ == '__main__': import pprint u = 'kshvmdn' pprint.pprint(main(u))
import requests import collections - API_URL = 'https://api.github.com/users/{}/repos' + API_URL = 'https://api.github.com/users/{}/repos?per_page=100' ? +++++++++++++ def main(user): return parse(request(user)) def request(user): return requests.get(url=API_URL.format(user)) def parse(response): repos = response.json() data = [] if repos is None: return None for repo in repos: if 'name' in repo and not repo['fork']: data.append( collections.OrderedDict([('name', repo['name']), ('desc', repo['description']), ('lang', repo['language']), ('stars', repo['stargazers_count'])])) return data if __name__ == '__main__': import pprint u = 'kshvmdn' pprint.pprint(main(u))
948b9987afa95d7a69bd61f3d8f9fea822323b01
wagtaildraftail/draft_text.py
wagtaildraftail/draft_text.py
from __future__ import absolute_import, unicode_literals import json from draftjs_exporter.html import HTML from wagtail.wagtailcore.rich_text import RichText from wagtaildraftail.settings import get_exporter_config class DraftText(RichText): def __init__(self, value, **kwargs): super(DraftText, self).__init__(value or '{}', **kwargs) self.exporter = HTML(get_exporter_config()) def get_json(self): return self.source def __html__(self): return self.exporter.render(json.loads(self.source))
from __future__ import absolute_import, unicode_literals import json from django.utils.functional import cached_property from draftjs_exporter.html import HTML from wagtail.wagtailcore.rich_text import RichText from wagtaildraftail.settings import get_exporter_config class DraftText(RichText): def __init__(self, value, **kwargs): super(DraftText, self).__init__(value or '{}', **kwargs) self.exporter = HTML(get_exporter_config()) def get_json(self): return self.source @cached_property def _html(self): return self.exporter.render(json.loads(self.source)) def __html__(self): return self._html def __eq__(self, other): return self.__html__() == other.__html__()
Implement equality check for DraftText nodes
Implement equality check for DraftText nodes Compare the (cached) rendered html of a node
Python
mit
gasman/wagtaildraftail,gasman/wagtaildraftail,gasman/wagtaildraftail,springload/wagtaildraftail,gasman/wagtaildraftail,springload/wagtaildraftail,springload/wagtaildraftail,springload/wagtaildraftail
from __future__ import absolute_import, unicode_literals import json + + from django.utils.functional import cached_property from draftjs_exporter.html import HTML from wagtail.wagtailcore.rich_text import RichText from wagtaildraftail.settings import get_exporter_config class DraftText(RichText): def __init__(self, value, **kwargs): super(DraftText, self).__init__(value or '{}', **kwargs) self.exporter = HTML(get_exporter_config()) def get_json(self): return self.source + @cached_property - def __html__(self): + def _html(self): return self.exporter.render(json.loads(self.source)) + def __html__(self): + return self._html + + def __eq__(self, other): + return self.__html__() == other.__html__() +
Implement equality check for DraftText nodes
## Code Before: from __future__ import absolute_import, unicode_literals import json from draftjs_exporter.html import HTML from wagtail.wagtailcore.rich_text import RichText from wagtaildraftail.settings import get_exporter_config class DraftText(RichText): def __init__(self, value, **kwargs): super(DraftText, self).__init__(value or '{}', **kwargs) self.exporter = HTML(get_exporter_config()) def get_json(self): return self.source def __html__(self): return self.exporter.render(json.loads(self.source)) ## Instruction: Implement equality check for DraftText nodes ## Code After: from __future__ import absolute_import, unicode_literals import json from django.utils.functional import cached_property from draftjs_exporter.html import HTML from wagtail.wagtailcore.rich_text import RichText from wagtaildraftail.settings import get_exporter_config class DraftText(RichText): def __init__(self, value, **kwargs): super(DraftText, self).__init__(value or '{}', **kwargs) self.exporter = HTML(get_exporter_config()) def get_json(self): return self.source @cached_property def _html(self): return self.exporter.render(json.loads(self.source)) def __html__(self): return self._html def __eq__(self, other): return self.__html__() == other.__html__()
from __future__ import absolute_import, unicode_literals import json + + from django.utils.functional import cached_property from draftjs_exporter.html import HTML from wagtail.wagtailcore.rich_text import RichText from wagtaildraftail.settings import get_exporter_config class DraftText(RichText): def __init__(self, value, **kwargs): super(DraftText, self).__init__(value or '{}', **kwargs) self.exporter = HTML(get_exporter_config()) def get_json(self): return self.source + @cached_property + def _html(self): + return self.exporter.render(json.loads(self.source)) + def __html__(self): - return self.exporter.render(json.loads(self.source)) + return self._html + + def __eq__(self, other): + return self.__html__() == other.__html__()
c8decb4f11059b58dd96442a4114f10cb95c7b35
tv-script-generation/helper.py
tv-script-generation/helper.py
import os import pickle def load_data(path): """ Load Dataset from File """ input_file = os.path.join(path) with open(input_file, "r") as f: data = f.read() return data def preprocess_and_save_data(dataset_path, token_lookup, create_lookup_tables): """ Preprocess Text Data """ text = load_data(dataset_path) token_dict = token_lookup() for key, token in token_dict.items(): text = text.replace(key, ' {} '.format(token)) text = text.lower() text = text.split() vocab_to_int, int_to_vocab = create_lookup_tables(text) int_text = [vocab_to_int[word] for word in text] pickle.dump((int_text, vocab_to_int, int_to_vocab, token_dict), open('preprocess.p', 'wb')) def load_preprocess(): """ Load the Preprocessed Training data and return them in batches of <batch_size> or less """ return pickle.load(open('preprocess.p', mode='rb')) def save_params(params): """ Save parameters to file """ pickle.dump(params, open('params.p', 'wb')) def load_params(): """ Load parameters from file """ return pickle.load(open('params.p', mode='rb'))
import os import pickle def load_data(path): """ Load Dataset from File """ input_file = os.path.join(path) with open(input_file, "r") as f: data = f.read() return data def preprocess_and_save_data(dataset_path, token_lookup, create_lookup_tables): """ Preprocess Text Data """ text = load_data(dataset_path) # Ignore notice, since we don't use it for analysing the data text = text[81:] token_dict = token_lookup() for key, token in token_dict.items(): text = text.replace(key, ' {} '.format(token)) text = text.lower() text = text.split() vocab_to_int, int_to_vocab = create_lookup_tables(text) int_text = [vocab_to_int[word] for word in text] pickle.dump((int_text, vocab_to_int, int_to_vocab, token_dict), open('preprocess.p', 'wb')) def load_preprocess(): """ Load the Preprocessed Training data and return them in batches of <batch_size> or less """ return pickle.load(open('preprocess.p', mode='rb')) def save_params(params): """ Save parameters to file """ pickle.dump(params, open('params.p', 'wb')) def load_params(): """ Load parameters from file """ return pickle.load(open('params.p', mode='rb'))
Remove copyright notice during preprocessing
Remove copyright notice during preprocessing
Python
mit
danresende/deep-learning,snegirigens/DLND,0x4a50/udacity-0x4a50-deep-learning-nanodegree,kitu2007/dl_class,seinberg/deep-learning,samirma/deep-learning,schaber/deep-learning,dataewan/deep-learning,michaelgat/Udacity_DL,Bismarrck/deep-learning,greg-ashby/deep-learning-nanodegree,cranium/deep-learning,thiagoqd/queirozdias-deep-learning,mastertrojan/Udacity,arthurtsang/deep-learning,oscarmore2/deep-learning-study,rahulkgup/deep-learning-foundation,ktmud/deep-learning,saravanakumar-periyasamy/deep-learning,nwhidden/ND101-Deep-Learning,elizabetht/deep-learning,Heerozh/deep-learning,yuanotes/deep-learning,chetnapriyadarshini/deep-learning,dxl0632/deeplearning_nd_udacity,ksooklall/deep_learning_foundation,rishizek/deep-learning,tkurfurst/deep-learning,haltux/deep-learning,adrianstaniec/deep-learning,voyageth/udacity-Deep_Learning_Foundations_Nanodegree,postBG/DL_project,stubz/deep-learning,kolaogun/deep-learning,xtr33me/deep-learning,atremblay/deep-learning,ClementPhil/deep-learning,adico-somoto/deep-learning,Bismarrck/deep-learning,quietcoolwu/deep-learning,gatmeh/Udacity-deep-learning,AlphaGit/deep-learning,khalido/deep-learning,jc091/deep-learning,azhurb/deep-learning,franciscodominguezmateos/DeepLearningNanoDegree,jc091/deep-learning,dewitt-li/deep-learning,xtr33me/deep-learning,elizabetht/deep-learning,guyk1971/deep-learning,voyageth/udacity-Deep_Learning_Foundations_Nanodegree,hfoffani/deep-learning,buncem/deep-learning,samirma/deep-learning,highb/deep-learning,sisnkemp/deep-learning,ktmud/deep-learning,tatsuya-ogawa/udacity-deep-learning,chetnapriyadarshini/deep-learning,ClementPhil/deep-learning,hvillanua/deep-learning,navaro1/deep-learning,javoweb/deep-learning,stubz/deep-learning,mdiaz236/DeepLearningFoundations,abhi1509/deep-learning,VenkatRepaka/deep-learning,angelmtenor/deep-learning,Jericho/deep-learning,hdchan/deep-learning,geilerloui/deep-learning,AndysDeepAbstractions/deep-learning,vvishwa/deep-learning,chusine/dlnd,oscarmore2/deep-learning-study,sisnkemp/deep-learning,raoyvn/deep-learning,luofan18/deep-learning,tamasjozsa/deep-learning,oscarmore2/deep-learning-study,kumi360/bike_sharing_prediction,gronnbeck/udacity-deep-learning,Agent007/deep-learning,ksooklall/deep_learning_foundation,brandoncgay/deep-learning,takahish/deep-learning,vinitsamel/udacitydeeplearning,strandbygaard/deep-learning,yuvrajsingh86/DeepLearning_Udacity,navaro1/deep-learning,scottquiring/Udacity_Deeplearning,theamazingfedex/ml-project-4,DestrinStorm/deep-learning,herruzojm/udacity-deep-learning,arturops/deep-learning,yuan-zong/deep-learning,dxl0632/deeplearning_nd_udacity,msanterre/deep_learning,greg-ashby/deep-learning-nanodegree,hvillanua/deep-learning,DataShrimp/deep-learning,DataShrimp/deep-learning,udacity/deep-learning,franciscodominguezmateos/DeepLearningNanoDegree,marko911/deep-learning,kimegitee/deep-learning,rahulkgup/deep-learning-foundation,ianhamilton117/deep-learning,JavascriptMick/deeplearning,azhurb/deep-learning,highb/deep-learning,sidazhang/udacity-dlnd,kazzz24/deep-learning,chusine/dlnd,Agent007/deep-learning,mkowoods/deep-learning,seinberg/deep-learning,nadvamir/deep-learning,thiagoqd/queirozdias-deep-learning,elenduuche/deep-learning,d-k-b/udacity-deep-learning,saravanakumar-periyasamy/deep-learning,johannesgiorgis/deep_learning,gururajl/deep-learning,raoyvn/deep-learning,strandbygaard/deep-learning,mikelseverson/Udacity-Deep_Learning-Nanodegree,rishizek/deep-learning,dewitt-li/deep-learning,llulai/deep-learning,kimegitee/deep-learning,luofan18/deep-learning,zizouvb/deeplearning,adico-somoto/deep-learning,gatmeh/Udacity-deep-learning,kvr777/deep-learning,AndysDeepAbstractions/deep-learning,heckmanc13/deep-learning,blua/deep-learning,efoley/deep-learning,kvr777/deep-learning,FiryZeplin/deep-learning,schaber/deep-learning,AndysDeepAbstractions/deep-learning,etendue/deep-learning,fnakashima/deep-learning,toddstrader/deep-learning,0x4a50/udacity-0x4a50-deep-learning-nanodegree,cranium/deep-learning,arthurtsang/deep-learning,scollins83/deep-learning,michaelgat/Udacity_DL,ysmazda/deep-learning,kumi360/bike_sharing_prediction,VenkatRepaka/deep-learning,guyk1971/deep-learning,tatsuya-ogawa/udacity-deep-learning,mikelseverson/Udacity-Deep_Learning-Nanodegree,dataewan/deep-learning,elenduuche/deep-learning,abhi1509/deep-learning,Riptawr/deep-learning,JasonNK/udacity-dlnd,josealber84/deep-learning,haltux/deep-learning,SlipknotTN/udacity-deeplearning-nanodegree,gronnbeck/udacity-deep-learning,retnuh/deep-learning,brandoncgay/deep-learning,angelmtenor/deep-learning,mdiaz236/DeepLearningFoundations,lukechen526/deep-learning,marko911/deep-learning,scottquiring/Udacity_Deeplearning,nwhidden/ND101-Deep-Learning,godfreyduke/deep-learning,yuan-zong/deep-learning,etendue/deep-learning,FishingOnATree/deep-learning,JasonNK/udacity-dlnd,AlphaGit/deep-learning,postBG/DL_project,rally12/deep-learning,throx66/deep-learning,godfreyduke/deep-learning,vinitsamel/udacitydeeplearning,Heerozh/deep-learning,rally12/deep-learning,arturops/deep-learning,harper/dlnd_thirdproject,georgebastille/deep-learning,flaviocordova/udacity_deep_learn_project,kiritisai/deep-learning-udacity,throx66/deep-learning,llulai/deep-learning,josealber84/deep-learning,herruzojm/udacity-deep-learning,sidazhang/udacity-dlnd,tamasjozsa/deep-learning,SlipknotTN/udacity-deeplearning-nanodegree,lukechen526/deep-learning,adrianstaniec/deep-learning,hfoffani/deep-learning,harper/dlnd_thirdproject,tkurfurst/deep-learning,jt6211/deep-learning,d-k-b/udacity-deep-learning,jc091/deep-learning,kazzz24/deep-learning,javoweb/deep-learning,retnuh/deep-learning,johannesgiorgis/deep_learning,ksooklall/deep_learning_foundation,theamazingfedex/ml-project-4,atremblay/deep-learning,zhuanxuhit/deep-learning,schaber/deep-learning,Riptawr/deep-learning,takahish/deep-learning
import os import pickle def load_data(path): """ Load Dataset from File """ input_file = os.path.join(path) with open(input_file, "r") as f: data = f.read() return data def preprocess_and_save_data(dataset_path, token_lookup, create_lookup_tables): """ Preprocess Text Data """ text = load_data(dataset_path) + + # Ignore notice, since we don't use it for analysing the data + text = text[81:] token_dict = token_lookup() for key, token in token_dict.items(): text = text.replace(key, ' {} '.format(token)) text = text.lower() text = text.split() vocab_to_int, int_to_vocab = create_lookup_tables(text) int_text = [vocab_to_int[word] for word in text] pickle.dump((int_text, vocab_to_int, int_to_vocab, token_dict), open('preprocess.p', 'wb')) def load_preprocess(): """ Load the Preprocessed Training data and return them in batches of <batch_size> or less """ return pickle.load(open('preprocess.p', mode='rb')) def save_params(params): """ Save parameters to file """ pickle.dump(params, open('params.p', 'wb')) def load_params(): """ Load parameters from file """ return pickle.load(open('params.p', mode='rb'))
Remove copyright notice during preprocessing
## Code Before: import os import pickle def load_data(path): """ Load Dataset from File """ input_file = os.path.join(path) with open(input_file, "r") as f: data = f.read() return data def preprocess_and_save_data(dataset_path, token_lookup, create_lookup_tables): """ Preprocess Text Data """ text = load_data(dataset_path) token_dict = token_lookup() for key, token in token_dict.items(): text = text.replace(key, ' {} '.format(token)) text = text.lower() text = text.split() vocab_to_int, int_to_vocab = create_lookup_tables(text) int_text = [vocab_to_int[word] for word in text] pickle.dump((int_text, vocab_to_int, int_to_vocab, token_dict), open('preprocess.p', 'wb')) def load_preprocess(): """ Load the Preprocessed Training data and return them in batches of <batch_size> or less """ return pickle.load(open('preprocess.p', mode='rb')) def save_params(params): """ Save parameters to file """ pickle.dump(params, open('params.p', 'wb')) def load_params(): """ Load parameters from file """ return pickle.load(open('params.p', mode='rb')) ## Instruction: Remove copyright notice during preprocessing ## Code After: import os import pickle def load_data(path): """ Load Dataset from File """ input_file = os.path.join(path) with open(input_file, "r") as f: data = f.read() return data def preprocess_and_save_data(dataset_path, token_lookup, create_lookup_tables): """ Preprocess Text Data """ text = load_data(dataset_path) # Ignore notice, since we don't use it for analysing the data text = text[81:] token_dict = token_lookup() for key, token in token_dict.items(): text = text.replace(key, ' {} '.format(token)) text = text.lower() text = text.split() vocab_to_int, int_to_vocab = create_lookup_tables(text) int_text = [vocab_to_int[word] for word in text] pickle.dump((int_text, vocab_to_int, int_to_vocab, token_dict), open('preprocess.p', 'wb')) def load_preprocess(): """ Load the Preprocessed Training data and return them in batches of <batch_size> or less """ return pickle.load(open('preprocess.p', mode='rb')) def save_params(params): """ Save parameters to file """ pickle.dump(params, open('params.p', 'wb')) def load_params(): """ Load parameters from file """ return pickle.load(open('params.p', mode='rb'))
import os import pickle def load_data(path): """ Load Dataset from File """ input_file = os.path.join(path) with open(input_file, "r") as f: data = f.read() return data def preprocess_and_save_data(dataset_path, token_lookup, create_lookup_tables): """ Preprocess Text Data """ text = load_data(dataset_path) + + # Ignore notice, since we don't use it for analysing the data + text = text[81:] token_dict = token_lookup() for key, token in token_dict.items(): text = text.replace(key, ' {} '.format(token)) text = text.lower() text = text.split() vocab_to_int, int_to_vocab = create_lookup_tables(text) int_text = [vocab_to_int[word] for word in text] pickle.dump((int_text, vocab_to_int, int_to_vocab, token_dict), open('preprocess.p', 'wb')) def load_preprocess(): """ Load the Preprocessed Training data and return them in batches of <batch_size> or less """ return pickle.load(open('preprocess.p', mode='rb')) def save_params(params): """ Save parameters to file """ pickle.dump(params, open('params.p', 'wb')) def load_params(): """ Load parameters from file """ return pickle.load(open('params.p', mode='rb'))
efb98ffae0a92d9a0facc76cd43bb51dca3b2820
nibble_aes/find_dist/find_ids.py
nibble_aes/find_dist/find_ids.py
import ast import sys def parse(line): return ast.literal_eval(line) def main(): if len(sys.argv) != 3: print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr) sys.exit(1) forward_diffs = [] with open(sys.argv[1]) as f: for i, forward_rounds, xss in map(parse, f): forward_diffs.append((i, forward_rounds, [set(xs) for xs in xss]) backward_diffs = [] with open(sys.argv[2]) as g: for i, backward_rounds, yss in map(parse, g): backward_diffs.append((i, backward_rounds, [set(ys) for ys in yss]) # truncate first round of backward differential # by comparing last round of forward differential and second last # round of backward differential ids = [] for i, forward_rounds, xss in forward_diffs: for j, backward_rounds, yss in backward_diffs: if xss[-1].isdisjoint(yss[-2]): backward_rounds -= 1 print((i, forward_rounds, backward_rounds, j)) if __name__ == "__main__": main()
import ast import sys def parse(line): i, rounds, xss = ast.literal_eval(line) yss = [set(xs) for xs in xss] return (i, rounds, yss) def main(): if len(sys.argv) != 3: print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr) sys.exit(1) ids = [] with open(sys.argv[1]) as f: for i, forward_rounds, xss in map(parse, f): if forward_rounds < 2: continue with open(sys.argv[2]) as g: for j, backward_rounds, yss in map(parse, g): if backward_rounds < 2: continue # truncate first round of backward differential # by comparing last round of forward differential and second last # round of backward differential if xss[-1].isdisjoint(yss[-2]): backward_rounds -= 1 print((i, forward_rounds, backward_rounds, j)) if __name__ == "__main__": main()
Revert "Trade memory for time."
Revert "Trade memory for time." This reverts commit f4c13756eef0dc6b7231e37d5f5d9029dea1fb62.
Python
mit
wei2912/aes-idc,wei2912/idc,wei2912/idc,wei2912/idc,wei2912/idc,wei2912/aes-idc
import ast import sys def parse(line): - return ast.literal_eval(line) + i, rounds, xss = ast.literal_eval(line) + yss = [set(xs) for xs in xss] + return (i, rounds, yss) def main(): if len(sys.argv) != 3: print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr) sys.exit(1) - forward_diffs = [] + ids = [] with open(sys.argv[1]) as f: - for i, forward_rounds, xss in map(parse, f): + for i, forward_rounds, xss in map(parse, f): - forward_diffs.append((i, forward_rounds, [set(xs) for xs in xss]) + if forward_rounds < 2: + continue - backward_diffs = [] - with open(sys.argv[2]) as g: + with open(sys.argv[2]) as g: - for i, backward_rounds, yss in map(parse, g): + for j, backward_rounds, yss in map(parse, g): - backward_diffs.append((i, backward_rounds, [set(ys) for ys in yss]) + if backward_rounds < 2: + continue - # truncate first round of backward differential + # truncate first round of backward differential - # by comparing last round of forward differential and second last + # by comparing last round of forward differential and second last - # round of backward differential + # round of backward differential - ids = [] - for i, forward_rounds, xss in forward_diffs: - for j, backward_rounds, yss in backward_diffs: - if xss[-1].isdisjoint(yss[-2]): + if xss[-1].isdisjoint(yss[-2]): - backward_rounds -= 1 + backward_rounds -= 1 - print((i, forward_rounds, backward_rounds, j)) + print((i, forward_rounds, backward_rounds, j)) if __name__ == "__main__": main()
Revert "Trade memory for time."
## Code Before: import ast import sys def parse(line): return ast.literal_eval(line) def main(): if len(sys.argv) != 3: print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr) sys.exit(1) forward_diffs = [] with open(sys.argv[1]) as f: for i, forward_rounds, xss in map(parse, f): forward_diffs.append((i, forward_rounds, [set(xs) for xs in xss]) backward_diffs = [] with open(sys.argv[2]) as g: for i, backward_rounds, yss in map(parse, g): backward_diffs.append((i, backward_rounds, [set(ys) for ys in yss]) # truncate first round of backward differential # by comparing last round of forward differential and second last # round of backward differential ids = [] for i, forward_rounds, xss in forward_diffs: for j, backward_rounds, yss in backward_diffs: if xss[-1].isdisjoint(yss[-2]): backward_rounds -= 1 print((i, forward_rounds, backward_rounds, j)) if __name__ == "__main__": main() ## Instruction: Revert "Trade memory for time." ## Code After: import ast import sys def parse(line): i, rounds, xss = ast.literal_eval(line) yss = [set(xs) for xs in xss] return (i, rounds, yss) def main(): if len(sys.argv) != 3: print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr) sys.exit(1) ids = [] with open(sys.argv[1]) as f: for i, forward_rounds, xss in map(parse, f): if forward_rounds < 2: continue with open(sys.argv[2]) as g: for j, backward_rounds, yss in map(parse, g): if backward_rounds < 2: continue # truncate first round of backward differential # by comparing last round of forward differential and second last # round of backward differential if xss[-1].isdisjoint(yss[-2]): backward_rounds -= 1 print((i, forward_rounds, backward_rounds, j)) if __name__ == "__main__": main()
import ast import sys def parse(line): - return ast.literal_eval(line) ? ^^ - + i, rounds, xss = ast.literal_eval(line) ? +++ ^ +++++++++ + yss = [set(xs) for xs in xss] + return (i, rounds, yss) def main(): if len(sys.argv) != 3: print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr) sys.exit(1) - forward_diffs = [] + ids = [] with open(sys.argv[1]) as f: - for i, forward_rounds, xss in map(parse, f): + for i, forward_rounds, xss in map(parse, f): ? + - forward_diffs.append((i, forward_rounds, [set(xs) for xs in xss]) + if forward_rounds < 2: + continue - backward_diffs = [] - with open(sys.argv[2]) as g: + with open(sys.argv[2]) as g: ? ++++++++ - for i, backward_rounds, yss in map(parse, g): ? ^ + for j, backward_rounds, yss in map(parse, g): ? ++++++++ ^ - backward_diffs.append((i, backward_rounds, [set(ys) for ys in yss]) + if backward_rounds < 2: + continue - # truncate first round of backward differential + # truncate first round of backward differential ? ++++++++++++++++ - # by comparing last round of forward differential and second last + # by comparing last round of forward differential and second last ? ++++++++++++++++ - # round of backward differential + # round of backward differential ? ++++++++++++++++ - ids = [] - for i, forward_rounds, xss in forward_diffs: - for j, backward_rounds, yss in backward_diffs: - if xss[-1].isdisjoint(yss[-2]): + if xss[-1].isdisjoint(yss[-2]): ? ++++++++ - backward_rounds -= 1 + backward_rounds -= 1 ? ++++++++ - print((i, forward_rounds, backward_rounds, j)) + print((i, forward_rounds, backward_rounds, j)) ? ++++++++ if __name__ == "__main__": main()
ae629597067817457db9e86121dde7f6ee3a2b7d
stagecraft/libs/request_logger/middleware.py
stagecraft/libs/request_logger/middleware.py
from __future__ import unicode_literals import logging import time logger = logging.getLogger(__name__) class RequestLoggerMiddleware(object): def process_request(self, request): self.request_time = time.time() logger.info("{method} {path}".format( method=request.method, path=request.get_full_path()), extra={ 'request_method': request.method, 'http_host': request.META.get('HTTP_HOST'), 'http_path': request.get_full_path(), 'request_id': request.META.get('HTTP_REQUEST_ID') }) def process_response(self, request, response): elapsed_time = time.time() - self.request_time logger.info("{method} {path} : {status} {secs:.6f}s".format( method=request.method, path=request.get_full_path(), status=response.status_code, secs=elapsed_time), extra={ 'request_method': request.method, 'http_host': request.META.get('HTTP_HOST'), 'http_path': request.get_full_path(), 'status': response.status_code, 'request_time': elapsed_time, }) return response
from __future__ import unicode_literals import logging import time logger = logging.getLogger(__name__) class RequestLoggerMiddleware(object): def process_request(self, request): request.start_request_time = time.time() logger.info("{method} {path}".format( method=request.method, path=request.get_full_path()), extra={ 'request_method': request.method, 'http_host': request.META.get('HTTP_HOST'), 'http_path': request.get_full_path(), 'request_id': request.META.get('HTTP_REQUEST_ID') }) def process_response(self, request, response): if hasattr(request, 'start_request_time'): elapsed_time = time.time() - request.start_request_time logger.info("{method} {path} : {status} {secs:.6f}s".format( method=request.method, path=request.get_full_path(), status=response.status_code, secs=elapsed_time), extra={ 'request_method': request.method, 'http_host': request.META.get('HTTP_HOST'), 'http_path': request.get_full_path(), 'status': response.status_code, 'request_time': elapsed_time, }) return response
Fix thread-safety issue in stagecraft
Fix thread-safety issue in stagecraft Django middleware is not thread-safe. We should store this context on the request object instance. Django always calls `process_response`, but it is possible that `process_request` has been skipped. So we have a guard checking that it’s safe to log the response time. See https://docs.djangoproject.com/en/1.7/topics/http/middleware/#process_request
Python
mit
alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft
from __future__ import unicode_literals import logging import time logger = logging.getLogger(__name__) class RequestLoggerMiddleware(object): def process_request(self, request): - self.request_time = time.time() + request.start_request_time = time.time() logger.info("{method} {path}".format( method=request.method, path=request.get_full_path()), extra={ 'request_method': request.method, 'http_host': request.META.get('HTTP_HOST'), 'http_path': request.get_full_path(), 'request_id': request.META.get('HTTP_REQUEST_ID') }) def process_response(self, request, response): + if hasattr(request, 'start_request_time'): - elapsed_time = time.time() - self.request_time + elapsed_time = time.time() - request.start_request_time - logger.info("{method} {path} : {status} {secs:.6f}s".format( + logger.info("{method} {path} : {status} {secs:.6f}s".format( - method=request.method, + method=request.method, - path=request.get_full_path(), + path=request.get_full_path(), - status=response.status_code, + status=response.status_code, - secs=elapsed_time), + secs=elapsed_time), - extra={ + extra={ - 'request_method': request.method, + 'request_method': request.method, - 'http_host': request.META.get('HTTP_HOST'), + 'http_host': request.META.get('HTTP_HOST'), - 'http_path': request.get_full_path(), + 'http_path': request.get_full_path(), - 'status': response.status_code, + 'status': response.status_code, - 'request_time': elapsed_time, + 'request_time': elapsed_time, - }) + }) return response
Fix thread-safety issue in stagecraft
## Code Before: from __future__ import unicode_literals import logging import time logger = logging.getLogger(__name__) class RequestLoggerMiddleware(object): def process_request(self, request): self.request_time = time.time() logger.info("{method} {path}".format( method=request.method, path=request.get_full_path()), extra={ 'request_method': request.method, 'http_host': request.META.get('HTTP_HOST'), 'http_path': request.get_full_path(), 'request_id': request.META.get('HTTP_REQUEST_ID') }) def process_response(self, request, response): elapsed_time = time.time() - self.request_time logger.info("{method} {path} : {status} {secs:.6f}s".format( method=request.method, path=request.get_full_path(), status=response.status_code, secs=elapsed_time), extra={ 'request_method': request.method, 'http_host': request.META.get('HTTP_HOST'), 'http_path': request.get_full_path(), 'status': response.status_code, 'request_time': elapsed_time, }) return response ## Instruction: Fix thread-safety issue in stagecraft ## Code After: from __future__ import unicode_literals import logging import time logger = logging.getLogger(__name__) class RequestLoggerMiddleware(object): def process_request(self, request): request.start_request_time = time.time() logger.info("{method} {path}".format( method=request.method, path=request.get_full_path()), extra={ 'request_method': request.method, 'http_host': request.META.get('HTTP_HOST'), 'http_path': request.get_full_path(), 'request_id': request.META.get('HTTP_REQUEST_ID') }) def process_response(self, request, response): if hasattr(request, 'start_request_time'): elapsed_time = time.time() - request.start_request_time logger.info("{method} {path} : {status} {secs:.6f}s".format( method=request.method, path=request.get_full_path(), status=response.status_code, secs=elapsed_time), extra={ 'request_method': request.method, 'http_host': request.META.get('HTTP_HOST'), 'http_path': request.get_full_path(), 'status': response.status_code, 'request_time': elapsed_time, }) return response
from __future__ import unicode_literals import logging import time logger = logging.getLogger(__name__) class RequestLoggerMiddleware(object): def process_request(self, request): - self.request_time = time.time() ? ^^^ + request.start_request_time = time.time() ? +++++ ^ ++++++ logger.info("{method} {path}".format( method=request.method, path=request.get_full_path()), extra={ 'request_method': request.method, 'http_host': request.META.get('HTTP_HOST'), 'http_path': request.get_full_path(), 'request_id': request.META.get('HTTP_REQUEST_ID') }) def process_response(self, request, response): + if hasattr(request, 'start_request_time'): - elapsed_time = time.time() - self.request_time ? ^^^ + elapsed_time = time.time() - request.start_request_time ? ++++ +++++ ^ ++++++ - logger.info("{method} {path} : {status} {secs:.6f}s".format( + logger.info("{method} {path} : {status} {secs:.6f}s".format( ? ++++ - method=request.method, + method=request.method, ? ++++ - path=request.get_full_path(), + path=request.get_full_path(), ? ++++ - status=response.status_code, + status=response.status_code, ? ++++ - secs=elapsed_time), + secs=elapsed_time), ? ++++ - extra={ + extra={ ? ++++ - 'request_method': request.method, + 'request_method': request.method, ? ++++ - 'http_host': request.META.get('HTTP_HOST'), + 'http_host': request.META.get('HTTP_HOST'), ? ++++ - 'http_path': request.get_full_path(), + 'http_path': request.get_full_path(), ? ++++ - 'status': response.status_code, + 'status': response.status_code, ? ++++ - 'request_time': elapsed_time, + 'request_time': elapsed_time, ? ++++ - }) + }) ? ++++ return response
c1e5822f07e2fe4ca47633ed3dfda7d7bee64b6c
nvchecker/source/aiohttp_httpclient.py
nvchecker/source/aiohttp_httpclient.py
import atexit import aiohttp connector = aiohttp.TCPConnector(limit=20) __all__ = ['session', 'HTTPError'] class HTTPError(Exception): def __init__(self, code, message, response): self.code = code self.message = message self.response = response class BetterClientSession(aiohttp.ClientSession): async def _request(self, *args, **kwargs): if hasattr(self, "nv_config") and self.nv_config.get("proxy"): kwargs.setdefault("proxy", self.nv_config.get("proxy")) res = await super(BetterClientSession, self)._request( *args, **kwargs) if res.status >= 400: raise HTTPError(res.status, res.reason, res) return res session = BetterClientSession(connector=connector, read_timeout=10, conn_timeout=5) atexit.register(session.close)
import atexit import asyncio import aiohttp connector = aiohttp.TCPConnector(limit=20) __all__ = ['session', 'HTTPError'] class HTTPError(Exception): def __init__(self, code, message, response): self.code = code self.message = message self.response = response class BetterClientSession(aiohttp.ClientSession): async def _request(self, *args, **kwargs): if hasattr(self, "nv_config") and self.nv_config.get("proxy"): kwargs.setdefault("proxy", self.nv_config.get("proxy")) res = await super(BetterClientSession, self)._request( *args, **kwargs) if res.status >= 400: raise HTTPError(res.status, res.reason, res) return res session = BetterClientSession(connector=connector) @atexit.register def cleanup(): loop = asyncio.get_event_loop() loop.run_until_complete(session.close())
Handle graceful exit and timeout
Handle graceful exit and timeout Timeout was refactored and the defaults work correctly here.
Python
mit
lilydjwg/nvchecker
import atexit + import asyncio import aiohttp connector = aiohttp.TCPConnector(limit=20) __all__ = ['session', 'HTTPError'] class HTTPError(Exception): def __init__(self, code, message, response): self.code = code self.message = message self.response = response class BetterClientSession(aiohttp.ClientSession): async def _request(self, *args, **kwargs): if hasattr(self, "nv_config") and self.nv_config.get("proxy"): kwargs.setdefault("proxy", self.nv_config.get("proxy")) res = await super(BetterClientSession, self)._request( *args, **kwargs) if res.status >= 400: raise HTTPError(res.status, res.reason, res) return res - session = BetterClientSession(connector=connector, read_timeout=10, conn_timeout=5) + session = BetterClientSession(connector=connector) - atexit.register(session.close) + @atexit.register + def cleanup(): + loop = asyncio.get_event_loop() + loop.run_until_complete(session.close()) +
Handle graceful exit and timeout
## Code Before: import atexit import aiohttp connector = aiohttp.TCPConnector(limit=20) __all__ = ['session', 'HTTPError'] class HTTPError(Exception): def __init__(self, code, message, response): self.code = code self.message = message self.response = response class BetterClientSession(aiohttp.ClientSession): async def _request(self, *args, **kwargs): if hasattr(self, "nv_config") and self.nv_config.get("proxy"): kwargs.setdefault("proxy", self.nv_config.get("proxy")) res = await super(BetterClientSession, self)._request( *args, **kwargs) if res.status >= 400: raise HTTPError(res.status, res.reason, res) return res session = BetterClientSession(connector=connector, read_timeout=10, conn_timeout=5) atexit.register(session.close) ## Instruction: Handle graceful exit and timeout ## Code After: import atexit import asyncio import aiohttp connector = aiohttp.TCPConnector(limit=20) __all__ = ['session', 'HTTPError'] class HTTPError(Exception): def __init__(self, code, message, response): self.code = code self.message = message self.response = response class BetterClientSession(aiohttp.ClientSession): async def _request(self, *args, **kwargs): if hasattr(self, "nv_config") and self.nv_config.get("proxy"): kwargs.setdefault("proxy", self.nv_config.get("proxy")) res = await super(BetterClientSession, self)._request( *args, **kwargs) if res.status >= 400: raise HTTPError(res.status, res.reason, res) return res session = BetterClientSession(connector=connector) @atexit.register def cleanup(): loop = asyncio.get_event_loop() loop.run_until_complete(session.close())
import atexit + import asyncio import aiohttp connector = aiohttp.TCPConnector(limit=20) __all__ = ['session', 'HTTPError'] class HTTPError(Exception): def __init__(self, code, message, response): self.code = code self.message = message self.response = response class BetterClientSession(aiohttp.ClientSession): async def _request(self, *args, **kwargs): if hasattr(self, "nv_config") and self.nv_config.get("proxy"): kwargs.setdefault("proxy", self.nv_config.get("proxy")) res = await super(BetterClientSession, self)._request( *args, **kwargs) if res.status >= 400: raise HTTPError(res.status, res.reason, res) return res - session = BetterClientSession(connector=connector, read_timeout=10, conn_timeout=5) ? --------------------------------- + session = BetterClientSession(connector=connector) - atexit.register(session.close) + + @atexit.register + def cleanup(): + loop = asyncio.get_event_loop() + loop.run_until_complete(session.close())
9dee48fb0964b12780f57cef26c5b84072448232
ds/api/serializer/app.py
ds/api/serializer/app.py
from __future__ import absolute_import from ds.models import App from .base import Serializer from .manager import add @add(App) class AppSerializer(Serializer): def serialize(self, item, attrs): return { 'id': str(item.id), 'name': item.name, }
from __future__ import absolute_import from ds.models import App from .base import Serializer from .manager import add @add(App) class AppSerializer(Serializer): def serialize(self, item, attrs): return { 'id': str(item.id), 'name': item.name, 'provider': item.provider, 'provider_config': item.provider_config, }
Add provider information to App
Add provider information to App
Python
apache-2.0
jkimbo/freight,rshk/freight,jkimbo/freight,getsentry/freight,jkimbo/freight,rshk/freight,klynton/freight,rshk/freight,getsentry/freight,klynton/freight,getsentry/freight,rshk/freight,klynton/freight,getsentry/freight,getsentry/freight,jkimbo/freight,klynton/freight
from __future__ import absolute_import from ds.models import App from .base import Serializer from .manager import add @add(App) class AppSerializer(Serializer): def serialize(self, item, attrs): return { 'id': str(item.id), 'name': item.name, + 'provider': item.provider, + 'provider_config': item.provider_config, }
Add provider information to App
## Code Before: from __future__ import absolute_import from ds.models import App from .base import Serializer from .manager import add @add(App) class AppSerializer(Serializer): def serialize(self, item, attrs): return { 'id': str(item.id), 'name': item.name, } ## Instruction: Add provider information to App ## Code After: from __future__ import absolute_import from ds.models import App from .base import Serializer from .manager import add @add(App) class AppSerializer(Serializer): def serialize(self, item, attrs): return { 'id': str(item.id), 'name': item.name, 'provider': item.provider, 'provider_config': item.provider_config, }
from __future__ import absolute_import from ds.models import App from .base import Serializer from .manager import add @add(App) class AppSerializer(Serializer): def serialize(self, item, attrs): return { 'id': str(item.id), 'name': item.name, + 'provider': item.provider, + 'provider_config': item.provider_config, }
b60fbc21271a7efa09d256debb17f583ec83fdf2
MMCorePy_wrap/setup.py
MMCorePy_wrap/setup.py
from distutils.core import setup, Extension import numpy.distutils.misc_util import os os.environ['CC'] = 'g++' #os.environ['CXX'] = 'g++' #os.environ['CPP'] = 'g++' #os.environ['LDSHARED'] = 'g++' mmcorepy_module = Extension('_MMCorePy', sources=['MMCorePy_wrap.cxx', '../MMDevice/DeviceUtils.cpp', '../MMDevice/ImgBuffer.cpp', '../MMDevice/Property.cpp', '../MMCore/CircularBuffer.cpp', '../MMCore/Configuration.cpp', '../MMCore/CoreCallback.cpp', '../MMCore/CoreProperty.cpp', '../MMCore/FastLogger.cpp', '../MMCore/MMCore.cpp', '../MMCore/PluginManager.cpp'], language = "c++", extra_objects = [], include_dirs=numpy.distutils.misc_util.get_numpy_include_dirs(), ) setup (name = 'MMCorePy', version = '0.1', author = "Micro-Manager", description = "Micro-Manager Core Python wrapper", ext_modules = [mmcorepy_module], py_modules = ["MMCorePy"], )
from distutils.core import setup, Extension import numpy.distutils.misc_util import os os.environ['CC'] = 'g++' #os.environ['CXX'] = 'g++' #os.environ['CPP'] = 'g++' #os.environ['LDSHARED'] = 'g++' mmcorepy_module = Extension('_MMCorePy', sources=['MMCorePy_wrap.cxx', '../MMCore/CircularBuffer.cpp', '../MMCore/Configuration.cpp', '../MMCore/CoreCallback.cpp', '../MMCore/CoreProperty.cpp', '../MMCore/FastLogger.cpp', '../MMCore/Host.cpp', '../MMCore/MMCore.cpp', '../MMCore/PluginManager.cpp', '../MMDevice/DeviceUtils.cpp', '../MMDevice/ImgBuffer.cpp', ], language="c++", include_dirs=numpy.distutils.misc_util.get_numpy_include_dirs(), ) setup(name='MMCorePy', version='0.1', author="Micro-Manager", description="Micro-Manager Core Python wrapper", ext_modules=[mmcorepy_module], py_modules=["MMCorePy"], )
Add MMCore/Host.cpp to Unix build.
MMCorePy: Add MMCore/Host.cpp to Unix build. Was missing. Note that build is still broken (even though it does not explicitly fail), at least on Mac OS X, because of missing libraries (IOKit, CoreFoundation, and boost.system, I think). Also removed MMDevice/Property.cpp, which is not needed here. git-svn-id: 03a8048b5ee8463be5048a3801110fb50f378627@11992 d0ab736e-dc22-4aeb-8dc9-08def0aa14fd
Python
mit
kmdouglass/Micro-Manager,kmdouglass/Micro-Manager
from distutils.core import setup, Extension import numpy.distutils.misc_util import os os.environ['CC'] = 'g++' #os.environ['CXX'] = 'g++' #os.environ['CPP'] = 'g++' #os.environ['LDSHARED'] = 'g++' mmcorepy_module = Extension('_MMCorePy', - sources=['MMCorePy_wrap.cxx', + sources=['MMCorePy_wrap.cxx', - '../MMDevice/DeviceUtils.cpp', - '../MMDevice/ImgBuffer.cpp', - '../MMDevice/Property.cpp', - '../MMCore/CircularBuffer.cpp', - '../MMCore/Configuration.cpp', - '../MMCore/CoreCallback.cpp', - '../MMCore/CoreProperty.cpp', - '../MMCore/FastLogger.cpp', - '../MMCore/MMCore.cpp', - '../MMCore/PluginManager.cpp'], - language = "c++", - extra_objects = [], + '../MMCore/CircularBuffer.cpp', + '../MMCore/Configuration.cpp', + '../MMCore/CoreCallback.cpp', + '../MMCore/CoreProperty.cpp', + '../MMCore/FastLogger.cpp', + '../MMCore/Host.cpp', + '../MMCore/MMCore.cpp', + '../MMCore/PluginManager.cpp', + '../MMDevice/DeviceUtils.cpp', + '../MMDevice/ImgBuffer.cpp', + ], + language="c++", include_dirs=numpy.distutils.misc_util.get_numpy_include_dirs(), ) - setup (name = 'MMCorePy', + setup(name='MMCorePy', - version = '0.1', + version='0.1', - author = "Micro-Manager", + author="Micro-Manager", - description = "Micro-Manager Core Python wrapper", + description="Micro-Manager Core Python wrapper", - ext_modules = [mmcorepy_module], + ext_modules=[mmcorepy_module], - py_modules = ["MMCorePy"], + py_modules=["MMCorePy"], - ) + ) -
Add MMCore/Host.cpp to Unix build.
## Code Before: from distutils.core import setup, Extension import numpy.distutils.misc_util import os os.environ['CC'] = 'g++' #os.environ['CXX'] = 'g++' #os.environ['CPP'] = 'g++' #os.environ['LDSHARED'] = 'g++' mmcorepy_module = Extension('_MMCorePy', sources=['MMCorePy_wrap.cxx', '../MMDevice/DeviceUtils.cpp', '../MMDevice/ImgBuffer.cpp', '../MMDevice/Property.cpp', '../MMCore/CircularBuffer.cpp', '../MMCore/Configuration.cpp', '../MMCore/CoreCallback.cpp', '../MMCore/CoreProperty.cpp', '../MMCore/FastLogger.cpp', '../MMCore/MMCore.cpp', '../MMCore/PluginManager.cpp'], language = "c++", extra_objects = [], include_dirs=numpy.distutils.misc_util.get_numpy_include_dirs(), ) setup (name = 'MMCorePy', version = '0.1', author = "Micro-Manager", description = "Micro-Manager Core Python wrapper", ext_modules = [mmcorepy_module], py_modules = ["MMCorePy"], ) ## Instruction: Add MMCore/Host.cpp to Unix build. ## Code After: from distutils.core import setup, Extension import numpy.distutils.misc_util import os os.environ['CC'] = 'g++' #os.environ['CXX'] = 'g++' #os.environ['CPP'] = 'g++' #os.environ['LDSHARED'] = 'g++' mmcorepy_module = Extension('_MMCorePy', sources=['MMCorePy_wrap.cxx', '../MMCore/CircularBuffer.cpp', '../MMCore/Configuration.cpp', '../MMCore/CoreCallback.cpp', '../MMCore/CoreProperty.cpp', '../MMCore/FastLogger.cpp', '../MMCore/Host.cpp', '../MMCore/MMCore.cpp', '../MMCore/PluginManager.cpp', '../MMDevice/DeviceUtils.cpp', '../MMDevice/ImgBuffer.cpp', ], language="c++", include_dirs=numpy.distutils.misc_util.get_numpy_include_dirs(), ) setup(name='MMCorePy', version='0.1', author="Micro-Manager", description="Micro-Manager Core Python wrapper", ext_modules=[mmcorepy_module], py_modules=["MMCorePy"], )
from distutils.core import setup, Extension import numpy.distutils.misc_util import os os.environ['CC'] = 'g++' #os.environ['CXX'] = 'g++' #os.environ['CPP'] = 'g++' #os.environ['LDSHARED'] = 'g++' mmcorepy_module = Extension('_MMCorePy', - sources=['MMCorePy_wrap.cxx', + sources=['MMCorePy_wrap.cxx', ? + - '../MMDevice/DeviceUtils.cpp', - '../MMDevice/ImgBuffer.cpp', - '../MMDevice/Property.cpp', - '../MMCore/CircularBuffer.cpp', - '../MMCore/Configuration.cpp', - '../MMCore/CoreCallback.cpp', - '../MMCore/CoreProperty.cpp', - '../MMCore/FastLogger.cpp', - '../MMCore/MMCore.cpp', - '../MMCore/PluginManager.cpp'], - language = "c++", - extra_objects = [], + '../MMCore/CircularBuffer.cpp', + '../MMCore/Configuration.cpp', + '../MMCore/CoreCallback.cpp', + '../MMCore/CoreProperty.cpp', + '../MMCore/FastLogger.cpp', + '../MMCore/Host.cpp', + '../MMCore/MMCore.cpp', + '../MMCore/PluginManager.cpp', + '../MMDevice/DeviceUtils.cpp', + '../MMDevice/ImgBuffer.cpp', + ], + language="c++", include_dirs=numpy.distutils.misc_util.get_numpy_include_dirs(), ) - setup (name = 'MMCorePy', ? - - - + setup(name='MMCorePy', - version = '0.1', ? - - - + version='0.1', - author = "Micro-Manager", ? - ------ - + author="Micro-Manager", - description = "Micro-Manager Core Python wrapper", ? - - - + description="Micro-Manager Core Python wrapper", - ext_modules = [mmcorepy_module], ? - - - + ext_modules=[mmcorepy_module], - py_modules = ["MMCorePy"], ? - - - + py_modules=["MMCorePy"], - ) ? -- + ) -
6b3dd31b1a795a92a00c7dba636a88636018655c
tests/blueprints/admin/conftest.py
tests/blueprints/admin/conftest.py
import pytest from tests.base import create_admin_app from tests.conftest import database_recreated @pytest.fixture(scope='session') def admin_app_without_db(db): app = create_admin_app() with app.app_context(): yield app @pytest.fixture(scope='module') def app(admin_app_without_db, db): app = admin_app_without_db with database_recreated(db): yield app
import pytest from tests.conftest import database_recreated @pytest.fixture(scope='module') def app(admin_app, db): app = admin_app with app.app_context(): with database_recreated(db): yield app
Use existing `admin_app` fixture for admin tests
Use existing `admin_app` fixture for admin tests
Python
bsd-3-clause
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
import pytest - from tests.base import create_admin_app from tests.conftest import database_recreated - @pytest.fixture(scope='session') + @pytest.fixture(scope='module') - def admin_app_without_db(db): + def app(admin_app, db): - app = create_admin_app() + app = admin_app with app.app_context(): + with database_recreated(db): - yield app + yield app - - @pytest.fixture(scope='module') - def app(admin_app_without_db, db): - app = admin_app_without_db - with database_recreated(db): - yield app -
Use existing `admin_app` fixture for admin tests
## Code Before: import pytest from tests.base import create_admin_app from tests.conftest import database_recreated @pytest.fixture(scope='session') def admin_app_without_db(db): app = create_admin_app() with app.app_context(): yield app @pytest.fixture(scope='module') def app(admin_app_without_db, db): app = admin_app_without_db with database_recreated(db): yield app ## Instruction: Use existing `admin_app` fixture for admin tests ## Code After: import pytest from tests.conftest import database_recreated @pytest.fixture(scope='module') def app(admin_app, db): app = admin_app with app.app_context(): with database_recreated(db): yield app
import pytest - from tests.base import create_admin_app from tests.conftest import database_recreated - @pytest.fixture(scope='session') ? ^ ----- + @pytest.fixture(scope='module') ? ^^^^^ - def admin_app_without_db(db): + def app(admin_app, db): - app = create_admin_app() ? ------- -- + app = admin_app with app.app_context(): - yield app - - - @pytest.fixture(scope='module') - def app(admin_app_without_db, db): - app = admin_app_without_db - with database_recreated(db): + with database_recreated(db): ? ++++ - yield app + yield app ? ++++
3e02a38a9ae52603f620a7969ce532b61de531d7
libgreader/__init__.py
libgreader/__init__.py
__author__ = "Matt Behrens <[email protected]>" __version__ = "0.8.0" __copyright__ = "Copyright (C) 2012 Matt Behrens" from .googlereader import GoogleReader from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method from .items import * from .url import ReaderUrl
__author__ = "Matt Behrens <[email protected]>" __version__ = "0.8.0" __copyright__ = "Copyright (C) 2012 Matt Behrens" try: import requests except ImportError: # Will occur during setup.py install pass else: from .googlereader import GoogleReader from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method from .items import * from .url import ReaderUrl
Fix import error during setup.py install
Fix import error during setup.py install
Python
mit
smurfix/librssreader,askedrelic/libgreader
__author__ = "Matt Behrens <[email protected]>" __version__ = "0.8.0" __copyright__ = "Copyright (C) 2012 Matt Behrens" + try: + import requests + except ImportError: + # Will occur during setup.py install + pass + else: - from .googlereader import GoogleReader + from .googlereader import GoogleReader - from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method + from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method - from .items import * + from .items import * - from .url import ReaderUrl + from .url import ReaderUrl
Fix import error during setup.py install
## Code Before: __author__ = "Matt Behrens <[email protected]>" __version__ = "0.8.0" __copyright__ = "Copyright (C) 2012 Matt Behrens" from .googlereader import GoogleReader from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method from .items import * from .url import ReaderUrl ## Instruction: Fix import error during setup.py install ## Code After: __author__ = "Matt Behrens <[email protected]>" __version__ = "0.8.0" __copyright__ = "Copyright (C) 2012 Matt Behrens" try: import requests except ImportError: # Will occur during setup.py install pass else: from .googlereader import GoogleReader from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method from .items import * from .url import ReaderUrl
__author__ = "Matt Behrens <[email protected]>" __version__ = "0.8.0" __copyright__ = "Copyright (C) 2012 Matt Behrens" + try: + import requests + except ImportError: + # Will occur during setup.py install + pass + else: - from .googlereader import GoogleReader + from .googlereader import GoogleReader ? ++++ - from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method + from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method ? ++++ - from .items import * + from .items import * ? ++++ - from .url import ReaderUrl + from .url import ReaderUrl ? ++++
b3a9a4a1e451815f15dc35c9b6ec9f7b67387260
scipy/misc/tests/test_common.py
scipy/misc/tests/test_common.py
from __future__ import division, print_function, absolute_import import pytest from numpy.testing import assert_equal, assert_allclose from scipy._lib._numpy_compat import suppress_warnings from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram from scipy.special import logsumexp as sc_logsumexp def test_logsumexp(): # make sure logsumexp can be imported from either scipy.misc or # scipy.special with suppress_warnings() as sup: sup.filter(DeprecationWarning, "`logsumexp` is deprecated") assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16) def test_pade(): # make sure scipy.misc.pade exists with suppress_warnings() as sup: sup.filter(DeprecationWarning, "`pade` is deprecated") pade([1, 2], 1) def test_face(): assert_equal(face().shape, (768, 1024, 3)) def test_ascent(): assert_equal(ascent().shape, (512, 512)) def test_electrocardiogram(): # Test shape and dtype of signal ecg = electrocardiogram() assert_equal(ecg.shape, (108000,)) assert ecg.dtype == float
from __future__ import division, print_function, absolute_import import pytest from numpy.testing import assert_equal, assert_allclose, assert_almost_equal from scipy._lib._numpy_compat import suppress_warnings from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram from scipy.special import logsumexp as sc_logsumexp def test_logsumexp(): # make sure logsumexp can be imported from either scipy.misc or # scipy.special with suppress_warnings() as sup: sup.filter(DeprecationWarning, "`logsumexp` is deprecated") assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16) def test_pade(): # make sure scipy.misc.pade exists with suppress_warnings() as sup: sup.filter(DeprecationWarning, "`pade` is deprecated") pade([1, 2], 1) def test_face(): assert_equal(face().shape, (768, 1024, 3)) def test_ascent(): assert_equal(ascent().shape, (512, 512)) def test_electrocardiogram(): # Test shape, dtype and stats of signal ecg = electrocardiogram() assert ecg.dtype == float assert_equal(ecg.shape, (108000,)) assert_almost_equal(ecg.mean(), -0.16510875) assert_almost_equal(ecg.std(), 0.5992473991177294)
Check mean and STD of returned ECG signal
TST: Check mean and STD of returned ECG signal
Python
bsd-3-clause
Eric89GXL/scipy,jor-/scipy,andyfaff/scipy,ilayn/scipy,andyfaff/scipy,tylerjereddy/scipy,grlee77/scipy,aarchiba/scipy,arokem/scipy,lhilt/scipy,perimosocordiae/scipy,mdhaber/scipy,rgommers/scipy,matthew-brett/scipy,aeklant/scipy,endolith/scipy,rgommers/scipy,person142/scipy,ilayn/scipy,WarrenWeckesser/scipy,matthew-brett/scipy,scipy/scipy,nmayorov/scipy,aeklant/scipy,andyfaff/scipy,endolith/scipy,Stefan-Endres/scipy,aarchiba/scipy,person142/scipy,aarchiba/scipy,ilayn/scipy,Eric89GXL/scipy,person142/scipy,pizzathief/scipy,grlee77/scipy,vigna/scipy,ilayn/scipy,anntzer/scipy,scipy/scipy,pizzathief/scipy,jamestwebber/scipy,arokem/scipy,arokem/scipy,vigna/scipy,jor-/scipy,mdhaber/scipy,gertingold/scipy,ilayn/scipy,Stefan-Endres/scipy,andyfaff/scipy,gfyoung/scipy,rgommers/scipy,lhilt/scipy,andyfaff/scipy,e-q/scipy,perimosocordiae/scipy,pizzathief/scipy,tylerjereddy/scipy,endolith/scipy,WarrenWeckesser/scipy,gfyoung/scipy,person142/scipy,Eric89GXL/scipy,endolith/scipy,matthew-brett/scipy,grlee77/scipy,Stefan-Endres/scipy,aarchiba/scipy,WarrenWeckesser/scipy,Stefan-Endres/scipy,endolith/scipy,gertingold/scipy,perimosocordiae/scipy,e-q/scipy,scipy/scipy,vigna/scipy,gertingold/scipy,gfyoung/scipy,e-q/scipy,jor-/scipy,matthew-brett/scipy,perimosocordiae/scipy,aeklant/scipy,mdhaber/scipy,WarrenWeckesser/scipy,scipy/scipy,jamestwebber/scipy,anntzer/scipy,vigna/scipy,Eric89GXL/scipy,scipy/scipy,nmayorov/scipy,Stefan-Endres/scipy,gfyoung/scipy,lhilt/scipy,tylerjereddy/scipy,Eric89GXL/scipy,Eric89GXL/scipy,anntzer/scipy,nmayorov/scipy,WarrenWeckesser/scipy,WarrenWeckesser/scipy,pizzathief/scipy,gfyoung/scipy,zerothi/scipy,scipy/scipy,e-q/scipy,matthew-brett/scipy,aarchiba/scipy,pizzathief/scipy,tylerjereddy/scipy,aeklant/scipy,aeklant/scipy,jor-/scipy,perimosocordiae/scipy,vigna/scipy,e-q/scipy,arokem/scipy,anntzer/scipy,ilayn/scipy,jor-/scipy,zerothi/scipy,mdhaber/scipy,perimosocordiae/scipy,gertingold/scipy,Stefan-Endres/scipy,nmayorov/scipy,jamestwebber/scipy,anntzer/scipy,endolith/scipy,zerothi/scipy,nmayorov/scipy,person142/scipy,andyfaff/scipy,anntzer/scipy,rgommers/scipy,grlee77/scipy,rgommers/scipy,zerothi/scipy,zerothi/scipy,jamestwebber/scipy,arokem/scipy,tylerjereddy/scipy,jamestwebber/scipy,gertingold/scipy,mdhaber/scipy,zerothi/scipy,lhilt/scipy,lhilt/scipy,grlee77/scipy,mdhaber/scipy
from __future__ import division, print_function, absolute_import import pytest - from numpy.testing import assert_equal, assert_allclose + from numpy.testing import assert_equal, assert_allclose, assert_almost_equal from scipy._lib._numpy_compat import suppress_warnings from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram from scipy.special import logsumexp as sc_logsumexp def test_logsumexp(): # make sure logsumexp can be imported from either scipy.misc or # scipy.special with suppress_warnings() as sup: sup.filter(DeprecationWarning, "`logsumexp` is deprecated") assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16) def test_pade(): # make sure scipy.misc.pade exists with suppress_warnings() as sup: sup.filter(DeprecationWarning, "`pade` is deprecated") pade([1, 2], 1) def test_face(): assert_equal(face().shape, (768, 1024, 3)) def test_ascent(): assert_equal(ascent().shape, (512, 512)) def test_electrocardiogram(): - # Test shape and dtype of signal + # Test shape, dtype and stats of signal ecg = electrocardiogram() + assert ecg.dtype == float assert_equal(ecg.shape, (108000,)) - assert ecg.dtype == float + assert_almost_equal(ecg.mean(), -0.16510875) + assert_almost_equal(ecg.std(), 0.5992473991177294)
Check mean and STD of returned ECG signal
## Code Before: from __future__ import division, print_function, absolute_import import pytest from numpy.testing import assert_equal, assert_allclose from scipy._lib._numpy_compat import suppress_warnings from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram from scipy.special import logsumexp as sc_logsumexp def test_logsumexp(): # make sure logsumexp can be imported from either scipy.misc or # scipy.special with suppress_warnings() as sup: sup.filter(DeprecationWarning, "`logsumexp` is deprecated") assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16) def test_pade(): # make sure scipy.misc.pade exists with suppress_warnings() as sup: sup.filter(DeprecationWarning, "`pade` is deprecated") pade([1, 2], 1) def test_face(): assert_equal(face().shape, (768, 1024, 3)) def test_ascent(): assert_equal(ascent().shape, (512, 512)) def test_electrocardiogram(): # Test shape and dtype of signal ecg = electrocardiogram() assert_equal(ecg.shape, (108000,)) assert ecg.dtype == float ## Instruction: Check mean and STD of returned ECG signal ## Code After: from __future__ import division, print_function, absolute_import import pytest from numpy.testing import assert_equal, assert_allclose, assert_almost_equal from scipy._lib._numpy_compat import suppress_warnings from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram from scipy.special import logsumexp as sc_logsumexp def test_logsumexp(): # make sure logsumexp can be imported from either scipy.misc or # scipy.special with suppress_warnings() as sup: sup.filter(DeprecationWarning, "`logsumexp` is deprecated") assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16) def test_pade(): # make sure scipy.misc.pade exists with suppress_warnings() as sup: sup.filter(DeprecationWarning, "`pade` is deprecated") pade([1, 2], 1) def test_face(): assert_equal(face().shape, (768, 1024, 3)) def test_ascent(): assert_equal(ascent().shape, (512, 512)) def test_electrocardiogram(): # Test shape, dtype and stats of signal ecg = electrocardiogram() assert ecg.dtype == float assert_equal(ecg.shape, (108000,)) assert_almost_equal(ecg.mean(), -0.16510875) assert_almost_equal(ecg.std(), 0.5992473991177294)
from __future__ import division, print_function, absolute_import import pytest - from numpy.testing import assert_equal, assert_allclose + from numpy.testing import assert_equal, assert_allclose, assert_almost_equal ? +++++++++++++++++++++ from scipy._lib._numpy_compat import suppress_warnings from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram from scipy.special import logsumexp as sc_logsumexp def test_logsumexp(): # make sure logsumexp can be imported from either scipy.misc or # scipy.special with suppress_warnings() as sup: sup.filter(DeprecationWarning, "`logsumexp` is deprecated") assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16) def test_pade(): # make sure scipy.misc.pade exists with suppress_warnings() as sup: sup.filter(DeprecationWarning, "`pade` is deprecated") pade([1, 2], 1) def test_face(): assert_equal(face().shape, (768, 1024, 3)) def test_ascent(): assert_equal(ascent().shape, (512, 512)) def test_electrocardiogram(): - # Test shape and dtype of signal ? ^^^^ + # Test shape, dtype and stats of signal ? ^ ++++++++++ ecg = electrocardiogram() + assert ecg.dtype == float assert_equal(ecg.shape, (108000,)) - assert ecg.dtype == float + assert_almost_equal(ecg.mean(), -0.16510875) + assert_almost_equal(ecg.std(), 0.5992473991177294)
bc0895f318a9297144e31da3647d6fc5716aafc4
setup.py
setup.py
''' Setup script that: /pyquic: - compiles pyquic - copies py_quic into base directory so that we can use the module directly ''' import os import shutil class temp_cd(): def __init__(self, temp_dir): self._temp_dir = temp_dir self._return_dir = os.path.dirname(os.path.realpath(__file__)) def __enter__(self): os.chdir(self._temp_dir) def __exit__(self, type, value, traceback): os.chdir(self._return_dir) def setup_pyquic(): with temp_cd('pyquic/py_quic'): os.system('make') shutil.rmtree('quic/py_quic') shutil.copytree('pyquic/py_quic', 'quic/py_quic') def clean_pyquic(): shutil.rmtree('py_quic') os.system('git submodule update --checkout --remote -f') if __name__ == "__main__": setup_pyquic()
''' Setup script that: /pyquic: - compiles pyquic - copies py_quic into base directory so that we can use the module directly ''' import os import shutil class temp_cd(): def __init__(self, temp_dir): self._temp_dir = temp_dir self._return_dir = os.path.dirname(os.path.realpath(__file__)) def __enter__(self): os.chdir(self._temp_dir) def __exit__(self, type, value, traceback): os.chdir(self._return_dir) def setup_pyquic(): with temp_cd('pyquic/py_quic'): os.system('make') if os.path.exists('quic/py_quic'): shutil.rmtree('quic/py_quic') shutil.copytree('pyquic/py_quic', 'quic/py_quic') def clean_pyquic(): shutil.rmtree('py_quic') os.system('git submodule update --checkout --remote -f') if __name__ == "__main__": setup_pyquic()
Make sure this works the first time you run it
Make sure this works the first time you run it
Python
mit
skggm/skggm,skggm/skggm
''' Setup script that: /pyquic: - compiles pyquic - copies py_quic into base directory so that we can use the module directly ''' import os import shutil class temp_cd(): def __init__(self, temp_dir): self._temp_dir = temp_dir self._return_dir = os.path.dirname(os.path.realpath(__file__)) def __enter__(self): os.chdir(self._temp_dir) def __exit__(self, type, value, traceback): os.chdir(self._return_dir) def setup_pyquic(): with temp_cd('pyquic/py_quic'): os.system('make') + if os.path.exists('quic/py_quic'): - shutil.rmtree('quic/py_quic') + shutil.rmtree('quic/py_quic') + shutil.copytree('pyquic/py_quic', 'quic/py_quic') def clean_pyquic(): shutil.rmtree('py_quic') os.system('git submodule update --checkout --remote -f') if __name__ == "__main__": setup_pyquic()
Make sure this works the first time you run it
## Code Before: ''' Setup script that: /pyquic: - compiles pyquic - copies py_quic into base directory so that we can use the module directly ''' import os import shutil class temp_cd(): def __init__(self, temp_dir): self._temp_dir = temp_dir self._return_dir = os.path.dirname(os.path.realpath(__file__)) def __enter__(self): os.chdir(self._temp_dir) def __exit__(self, type, value, traceback): os.chdir(self._return_dir) def setup_pyquic(): with temp_cd('pyquic/py_quic'): os.system('make') shutil.rmtree('quic/py_quic') shutil.copytree('pyquic/py_quic', 'quic/py_quic') def clean_pyquic(): shutil.rmtree('py_quic') os.system('git submodule update --checkout --remote -f') if __name__ == "__main__": setup_pyquic() ## Instruction: Make sure this works the first time you run it ## Code After: ''' Setup script that: /pyquic: - compiles pyquic - copies py_quic into base directory so that we can use the module directly ''' import os import shutil class temp_cd(): def __init__(self, temp_dir): self._temp_dir = temp_dir self._return_dir = os.path.dirname(os.path.realpath(__file__)) def __enter__(self): os.chdir(self._temp_dir) def __exit__(self, type, value, traceback): os.chdir(self._return_dir) def setup_pyquic(): with temp_cd('pyquic/py_quic'): os.system('make') if os.path.exists('quic/py_quic'): shutil.rmtree('quic/py_quic') shutil.copytree('pyquic/py_quic', 'quic/py_quic') def clean_pyquic(): shutil.rmtree('py_quic') os.system('git submodule update --checkout --remote -f') if __name__ == "__main__": setup_pyquic()
''' Setup script that: /pyquic: - compiles pyquic - copies py_quic into base directory so that we can use the module directly ''' import os import shutil class temp_cd(): def __init__(self, temp_dir): self._temp_dir = temp_dir self._return_dir = os.path.dirname(os.path.realpath(__file__)) def __enter__(self): os.chdir(self._temp_dir) def __exit__(self, type, value, traceback): os.chdir(self._return_dir) def setup_pyquic(): with temp_cd('pyquic/py_quic'): os.system('make') + if os.path.exists('quic/py_quic'): - shutil.rmtree('quic/py_quic') + shutil.rmtree('quic/py_quic') ? ++++ + shutil.copytree('pyquic/py_quic', 'quic/py_quic') def clean_pyquic(): shutil.rmtree('py_quic') os.system('git submodule update --checkout --remote -f') if __name__ == "__main__": setup_pyquic()
a6d05f3c1a33381a07d459c1fdff93bc4ba30594
pidman/pid/migrations/0002_pid_sequence_initial_value.py
pidman/pid/migrations/0002_pid_sequence_initial_value.py
from __future__ import unicode_literals from django.db import migrations, models from pidman.pid.noid import decode_noid from pidman.pid import models as pid_models def pid_sequence_lastvalue(apps, schema_editor): # if the database has existing pids, update the sequence last value # so it will start minting pids starting after the current set Pid = apps.get_model("pid", "Pid") Sequence = apps.get_model("sequences", "Sequence") if Pid.objects.count(): max_noid = Pid.objects.all() \ .aggregate(models.Max('pid')).values()[0] last_val = decode_noid(max_noid) pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME, last=last_val) pid_seq.save() def remove_pid_sequence(apps, schema_editor): Sequence = apps.get_model("sequences", "Sequence") Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete() class Migration(migrations.Migration): dependencies = [ ('pid', '0001_initial'), ('sequences', '0001_initial'), ] operations = [ migrations.RunPython(pid_sequence_lastvalue, remove_pid_sequence), ]
from __future__ import unicode_literals from django.db import migrations, models from pidman.pid.noid import decode_noid, encode_noid from pidman.pid import models as pid_models def pid_sequence_lastvalue(apps, schema_editor): # if the database has existing pids, update the sequence last value # so it will start minting pids starting after the current set Pid = apps.get_model("pid", "Pid") Sequence = apps.get_model("sequences", "Sequence") if Pid.objects.count(): # pid noids are generated in sequence, so the pid with the # highest pk _should_ be the one with the highest noid max_noid = Pid.objects.all().order_by('pk').last().pid # (previously using aggregate max, but doesn't seem to find # the highest pid value correctly) last_val = decode_noid(max_noid) pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME, last=last_val) pid_seq.save() def remove_pid_sequence(apps, schema_editor): Sequence = apps.get_model("sequences", "Sequence") Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete() class Migration(migrations.Migration): dependencies = [ ('pid', '0001_initial'), ('sequences', '0001_initial'), ] operations = [ migrations.RunPython(pid_sequence_lastvalue, remove_pid_sequence), ]
Fix max noid detection when setting pid sequence
Fix max noid detection when setting pid sequence
Python
apache-2.0
emory-libraries/pidman,emory-libraries/pidman
from __future__ import unicode_literals from django.db import migrations, models - from pidman.pid.noid import decode_noid + from pidman.pid.noid import decode_noid, encode_noid from pidman.pid import models as pid_models def pid_sequence_lastvalue(apps, schema_editor): # if the database has existing pids, update the sequence last value # so it will start minting pids starting after the current set Pid = apps.get_model("pid", "Pid") Sequence = apps.get_model("sequences", "Sequence") if Pid.objects.count(): - max_noid = Pid.objects.all() \ - .aggregate(models.Max('pid')).values()[0] + # pid noids are generated in sequence, so the pid with the + # highest pk _should_ be the one with the highest noid + max_noid = Pid.objects.all().order_by('pk').last().pid + # (previously using aggregate max, but doesn't seem to find + # the highest pid value correctly) last_val = decode_noid(max_noid) pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME, last=last_val) pid_seq.save() def remove_pid_sequence(apps, schema_editor): Sequence = apps.get_model("sequences", "Sequence") Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete() class Migration(migrations.Migration): dependencies = [ ('pid', '0001_initial'), ('sequences', '0001_initial'), ] operations = [ migrations.RunPython(pid_sequence_lastvalue, remove_pid_sequence), ]
Fix max noid detection when setting pid sequence
## Code Before: from __future__ import unicode_literals from django.db import migrations, models from pidman.pid.noid import decode_noid from pidman.pid import models as pid_models def pid_sequence_lastvalue(apps, schema_editor): # if the database has existing pids, update the sequence last value # so it will start minting pids starting after the current set Pid = apps.get_model("pid", "Pid") Sequence = apps.get_model("sequences", "Sequence") if Pid.objects.count(): max_noid = Pid.objects.all() \ .aggregate(models.Max('pid')).values()[0] last_val = decode_noid(max_noid) pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME, last=last_val) pid_seq.save() def remove_pid_sequence(apps, schema_editor): Sequence = apps.get_model("sequences", "Sequence") Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete() class Migration(migrations.Migration): dependencies = [ ('pid', '0001_initial'), ('sequences', '0001_initial'), ] operations = [ migrations.RunPython(pid_sequence_lastvalue, remove_pid_sequence), ] ## Instruction: Fix max noid detection when setting pid sequence ## Code After: from __future__ import unicode_literals from django.db import migrations, models from pidman.pid.noid import decode_noid, encode_noid from pidman.pid import models as pid_models def pid_sequence_lastvalue(apps, schema_editor): # if the database has existing pids, update the sequence last value # so it will start minting pids starting after the current set Pid = apps.get_model("pid", "Pid") Sequence = apps.get_model("sequences", "Sequence") if Pid.objects.count(): # pid noids are generated in sequence, so the pid with the # highest pk _should_ be the one with the highest noid max_noid = Pid.objects.all().order_by('pk').last().pid # (previously using aggregate max, but doesn't seem to find # the highest pid value correctly) last_val = decode_noid(max_noid) pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME, last=last_val) pid_seq.save() def remove_pid_sequence(apps, schema_editor): Sequence = apps.get_model("sequences", "Sequence") Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete() class Migration(migrations.Migration): dependencies = [ ('pid', '0001_initial'), ('sequences', '0001_initial'), ] operations = [ migrations.RunPython(pid_sequence_lastvalue, remove_pid_sequence), ]
from __future__ import unicode_literals from django.db import migrations, models - from pidman.pid.noid import decode_noid + from pidman.pid.noid import decode_noid, encode_noid ? +++++++++++++ from pidman.pid import models as pid_models def pid_sequence_lastvalue(apps, schema_editor): # if the database has existing pids, update the sequence last value # so it will start minting pids starting after the current set Pid = apps.get_model("pid", "Pid") Sequence = apps.get_model("sequences", "Sequence") if Pid.objects.count(): - max_noid = Pid.objects.all() \ - .aggregate(models.Max('pid')).values()[0] + # pid noids are generated in sequence, so the pid with the + # highest pk _should_ be the one with the highest noid + max_noid = Pid.objects.all().order_by('pk').last().pid + # (previously using aggregate max, but doesn't seem to find + # the highest pid value correctly) last_val = decode_noid(max_noid) pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME, last=last_val) pid_seq.save() def remove_pid_sequence(apps, schema_editor): Sequence = apps.get_model("sequences", "Sequence") Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete() class Migration(migrations.Migration): dependencies = [ ('pid', '0001_initial'), ('sequences', '0001_initial'), ] operations = [ migrations.RunPython(pid_sequence_lastvalue, remove_pid_sequence), ]
41df71518ba23460194194cb82d9dbb183afcc19
gtlaunch.py
gtlaunch.py
import json import os import subprocess def run(): with open('gtlaunch.json', 'r') as fp: config = json.load(fp) project = config['test'] args = ['gnome-terminal', '--maximize'] args.extend(['--working-directory', os.path.expanduser(project['cwd'])]) for idx, tab in enumerate(project['tabs']): tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default' prefix = project.get('prefix', 'true') command = "zsh -is eval '{} && {}'".format(prefix, tab['command']) args.append(tab_option) args.extend(['--title', tab['title']]) args.extend(['--command', command]) return subprocess.Popen(args) if __name__ == "__main__": run()
import argparse import json import os import subprocess def run(args): with open(os.path.expanduser(args.config), 'r') as fp: config = json.load(fp) project = config['test'] args = ['gnome-terminal', '--maximize'] args.extend(['--working-directory', os.path.expanduser(project['cwd'])]) for idx, tab in enumerate(project['tabs']): tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default' prefix = project.get('prefix', 'true') command = "zsh -is eval '{} && {}'".format(prefix, tab['command']) args.append(tab_option) args.extend(['--title', tab['title']]) args.extend(['--command', command]) return subprocess.Popen(args) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument( '-c', '--config', metavar='FILE', help="path to configuration file", default="~/gtlaunch.json", ) args = parser.parse_args() run(args)
Use argparse to locate config file.
Use argparse to locate config file.
Python
mit
GoldenLine/gtlaunch
+ import argparse import json import os import subprocess - def run(): + def run(args): - with open('gtlaunch.json', 'r') as fp: + with open(os.path.expanduser(args.config), 'r') as fp: config = json.load(fp) project = config['test'] args = ['gnome-terminal', '--maximize'] args.extend(['--working-directory', os.path.expanduser(project['cwd'])]) for idx, tab in enumerate(project['tabs']): tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default' prefix = project.get('prefix', 'true') command = "zsh -is eval '{} && {}'".format(prefix, tab['command']) args.append(tab_option) args.extend(['--title', tab['title']]) args.extend(['--command', command]) return subprocess.Popen(args) if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + '-c', '--config', metavar='FILE', help="path to configuration file", + default="~/gtlaunch.json", + ) + args = parser.parse_args() - run() + run(args)
Use argparse to locate config file.
## Code Before: import json import os import subprocess def run(): with open('gtlaunch.json', 'r') as fp: config = json.load(fp) project = config['test'] args = ['gnome-terminal', '--maximize'] args.extend(['--working-directory', os.path.expanduser(project['cwd'])]) for idx, tab in enumerate(project['tabs']): tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default' prefix = project.get('prefix', 'true') command = "zsh -is eval '{} && {}'".format(prefix, tab['command']) args.append(tab_option) args.extend(['--title', tab['title']]) args.extend(['--command', command]) return subprocess.Popen(args) if __name__ == "__main__": run() ## Instruction: Use argparse to locate config file. ## Code After: import argparse import json import os import subprocess def run(args): with open(os.path.expanduser(args.config), 'r') as fp: config = json.load(fp) project = config['test'] args = ['gnome-terminal', '--maximize'] args.extend(['--working-directory', os.path.expanduser(project['cwd'])]) for idx, tab in enumerate(project['tabs']): tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default' prefix = project.get('prefix', 'true') command = "zsh -is eval '{} && {}'".format(prefix, tab['command']) args.append(tab_option) args.extend(['--title', tab['title']]) args.extend(['--command', command]) return subprocess.Popen(args) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument( '-c', '--config', metavar='FILE', help="path to configuration file", default="~/gtlaunch.json", ) args = parser.parse_args() run(args)
+ import argparse import json import os import subprocess - def run(): + def run(args): ? ++++ - with open('gtlaunch.json', 'r') as fp: + with open(os.path.expanduser(args.config), 'r') as fp: config = json.load(fp) project = config['test'] args = ['gnome-terminal', '--maximize'] args.extend(['--working-directory', os.path.expanduser(project['cwd'])]) for idx, tab in enumerate(project['tabs']): tab_option = '--tab' if idx == 0 else '--tab-with-profile=Default' prefix = project.get('prefix', 'true') command = "zsh -is eval '{} && {}'".format(prefix, tab['command']) args.append(tab_option) args.extend(['--title', tab['title']]) args.extend(['--command', command]) return subprocess.Popen(args) if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument( + '-c', '--config', metavar='FILE', help="path to configuration file", + default="~/gtlaunch.json", + ) + args = parser.parse_args() - run() + run(args) ? ++++
f1496f7f5babdf1f9fa8f527f42442aeccab46d7
tests/test_location.py
tests/test_location.py
from SUASSystem import * import math import numpy import unittest from dronekit import LocationGlobalRelative class locationTestCase(unittest.TestCase): def setUp(self): self.position = Location(5, 12, 20) def test_get_lat(self): self.assertEquals(5, self.position.get_lat())
from SUASSystem import * import math import numpy import unittest from dronekit import LocationGlobalRelative class locationTestCase(unittest.TestCase): def setUp(self): self.position = Location(5, 12, 20) def test_get_lat(self): self.assertEquals(5, self.position.get_lat()) def test_get_lon(self): self.assertEquals(12, self.position.get_lon()) def test_get_alt(self): self.assertEquals(20, self.position.get_alt()) def test_repr(self): self.assertEquals('Lat: 5 Lon: 12 Alt: 20', self.position.__repr__())
Add finished working location unit tests
Add finished working location unit tests
Python
mit
FlintHill/SUAS-Competition,FlintHill/SUAS-Competition,FlintHill/SUAS-Competition,FlintHill/SUAS-Competition,FlintHill/SUAS-Competition
from SUASSystem import * import math import numpy import unittest from dronekit import LocationGlobalRelative class locationTestCase(unittest.TestCase): def setUp(self): self.position = Location(5, 12, 20) def test_get_lat(self): self.assertEquals(5, self.position.get_lat()) + def test_get_lon(self): + self.assertEquals(12, self.position.get_lon()) + + def test_get_alt(self): + self.assertEquals(20, self.position.get_alt()) + + def test_repr(self): + self.assertEquals('Lat: 5 Lon: 12 Alt: 20', self.position.__repr__()) +
Add finished working location unit tests
## Code Before: from SUASSystem import * import math import numpy import unittest from dronekit import LocationGlobalRelative class locationTestCase(unittest.TestCase): def setUp(self): self.position = Location(5, 12, 20) def test_get_lat(self): self.assertEquals(5, self.position.get_lat()) ## Instruction: Add finished working location unit tests ## Code After: from SUASSystem import * import math import numpy import unittest from dronekit import LocationGlobalRelative class locationTestCase(unittest.TestCase): def setUp(self): self.position = Location(5, 12, 20) def test_get_lat(self): self.assertEquals(5, self.position.get_lat()) def test_get_lon(self): self.assertEquals(12, self.position.get_lon()) def test_get_alt(self): self.assertEquals(20, self.position.get_alt()) def test_repr(self): self.assertEquals('Lat: 5 Lon: 12 Alt: 20', self.position.__repr__())
from SUASSystem import * import math import numpy import unittest from dronekit import LocationGlobalRelative class locationTestCase(unittest.TestCase): def setUp(self): self.position = Location(5, 12, 20) def test_get_lat(self): self.assertEquals(5, self.position.get_lat()) + + def test_get_lon(self): + self.assertEquals(12, self.position.get_lon()) + + def test_get_alt(self): + self.assertEquals(20, self.position.get_alt()) + + def test_repr(self): + self.assertEquals('Lat: 5 Lon: 12 Alt: 20', self.position.__repr__())
efbcd8104470234e50ad2e40719b0edf1fbc45c4
zou/app/utils/date_helpers.py
zou/app/utils/date_helpers.py
from datetime import date, timedelta def get_date_from_now(nb_days): return date.today() - timedelta(days=nb_days) def get_date_diff(date_a, date_b): return abs((date_b - date_a).total_seconds())
from babel.dates import format_datetime from datetime import date, datetime, timedelta def get_date_from_now(nb_days): return date.today() - timedelta(days=nb_days) def get_date_diff(date_a, date_b): return abs((date_b - date_a).total_seconds()) def get_date_string_with_timezone(date_string, timezone): """ Apply given timezone to given date and return it as a string. """ date_obj = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S") return format_datetime( date_obj, "YYYY-MM-DDTHH:mm:ss", tzinfo=timezone )
Add helper to handle timezone in date strings
[utils] Add helper to handle timezone in date strings
Python
agpl-3.0
cgwire/zou
+ from babel.dates import format_datetime - from datetime import date, timedelta + from datetime import date, datetime, timedelta def get_date_from_now(nb_days): return date.today() - timedelta(days=nb_days) def get_date_diff(date_a, date_b): return abs((date_b - date_a).total_seconds()) + + def get_date_string_with_timezone(date_string, timezone): + """ + Apply given timezone to given date and return it as a string. + """ + date_obj = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S") + return format_datetime( + date_obj, + "YYYY-MM-DDTHH:mm:ss", + tzinfo=timezone + ) +
Add helper to handle timezone in date strings
## Code Before: from datetime import date, timedelta def get_date_from_now(nb_days): return date.today() - timedelta(days=nb_days) def get_date_diff(date_a, date_b): return abs((date_b - date_a).total_seconds()) ## Instruction: Add helper to handle timezone in date strings ## Code After: from babel.dates import format_datetime from datetime import date, datetime, timedelta def get_date_from_now(nb_days): return date.today() - timedelta(days=nb_days) def get_date_diff(date_a, date_b): return abs((date_b - date_a).total_seconds()) def get_date_string_with_timezone(date_string, timezone): """ Apply given timezone to given date and return it as a string. """ date_obj = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S") return format_datetime( date_obj, "YYYY-MM-DDTHH:mm:ss", tzinfo=timezone )
+ from babel.dates import format_datetime - from datetime import date, timedelta + from datetime import date, datetime, timedelta ? ++++++++++ def get_date_from_now(nb_days): return date.today() - timedelta(days=nb_days) def get_date_diff(date_a, date_b): return abs((date_b - date_a).total_seconds()) + + + def get_date_string_with_timezone(date_string, timezone): + """ + Apply given timezone to given date and return it as a string. + """ + date_obj = datetime.strptime(date_string, "%Y-%m-%dT%H:%M:%S") + return format_datetime( + date_obj, + "YYYY-MM-DDTHH:mm:ss", + tzinfo=timezone + )
229f8f22a71044dc2c39a52ff36458720958c5b9
cpnest/__init__.py
cpnest/__init__.py
from .cpnest import CPNest __version__ = '0.9.8' __all__ = ['model', 'NestedSampling', 'parameter', 'sampler', 'cpnest', 'nest2pos', 'proposal', 'plot']
import logging from .logger import CPNestLogger from .cpnest import CPNest logging.setLoggerClass(CPNestLogger) __version__ = '0.9.8' __all__ = ['model', 'NestedSampling', 'parameter', 'sampler', 'cpnest', 'nest2pos', 'proposal', 'plot', 'logger']
Set logger class in init
Set logger class in init
Python
mit
johnveitch/cpnest
+ import logging + from .logger import CPNestLogger from .cpnest import CPNest + + logging.setLoggerClass(CPNestLogger) __version__ = '0.9.8' __all__ = ['model', 'NestedSampling', 'parameter', 'sampler', 'cpnest', 'nest2pos', 'proposal', - 'plot'] + 'plot', + 'logger'] +
Set logger class in init
## Code Before: from .cpnest import CPNest __version__ = '0.9.8' __all__ = ['model', 'NestedSampling', 'parameter', 'sampler', 'cpnest', 'nest2pos', 'proposal', 'plot'] ## Instruction: Set logger class in init ## Code After: import logging from .logger import CPNestLogger from .cpnest import CPNest logging.setLoggerClass(CPNestLogger) __version__ = '0.9.8' __all__ = ['model', 'NestedSampling', 'parameter', 'sampler', 'cpnest', 'nest2pos', 'proposal', 'plot', 'logger']
+ import logging + from .logger import CPNestLogger from .cpnest import CPNest + + logging.setLoggerClass(CPNestLogger) __version__ = '0.9.8' __all__ = ['model', 'NestedSampling', 'parameter', 'sampler', 'cpnest', 'nest2pos', 'proposal', - 'plot'] ? ^ + 'plot', ? ^ + 'logger'] +
46b00107e90df8f34a9cce5c4b010fdfb88f5f52
shovel/code.py
shovel/code.py
from __future__ import absolute_import, division, print_function from pathlib import Path from isort import SortImports from shovel import task # isort multi_line_output modes GRID = 0 VERTICAL = 1 HANGING_INDENT = 2 VERTICAL_HANGING_INDENT = 3 HANGING_GRID = 4 HANGING_GRID_GROUPED = 5 @task def format_imports(): """Sort imports into a consistent style.""" astrodynamics_dir = Path('astrodynamics') constants_dir = astrodynamics_dir / 'constants' for initfile in astrodynamics_dir.glob('**/__init__.py'): if constants_dir in initfile.parents: continue SortImports(str(initfile), multi_line_output=VERTICAL_HANGING_INDENT, not_skip=['__init__.py']) # Exclude __init__.py # Exclude generated constants/ python files for pyfile in astrodynamics_dir.glob('**/*.py'): if constants_dir in pyfile.parents and pyfile.stem != 'constant': continue SortImports(str(pyfile), multi_line_output=HANGING_GRID, skip=['__init__.py'])
from __future__ import absolute_import, division, print_function from pathlib import Path from isort import SortImports from shovel import task # isort multi_line_output modes GRID = 0 VERTICAL = 1 HANGING_INDENT = 2 VERTICAL_HANGING_INDENT = 3 HANGING_GRID = 4 HANGING_GRID_GROUPED = 5 @task def format_imports(): """Sort imports into a consistent style.""" astrodynamics_dir = Path('astrodynamics') constants_dir = astrodynamics_dir / 'constants' for initfile in astrodynamics_dir.glob('**/__init__.py'): if constants_dir in initfile.parents: continue SortImports(str(initfile), multi_line_output=VERTICAL_HANGING_INDENT, not_skip=['__init__.py']) # Exclude __init__.py # Exclude generated constants/ python files for pyfile in astrodynamics_dir.glob('**/*.py'): if constants_dir in pyfile.parents and pyfile.stem != 'constant': continue SortImports(str(pyfile), multi_line_output=HANGING_GRID, skip=['__init__.py'], known_third_party=['six'])
Add 'six' to known_third_party for SortImports
Add 'six' to known_third_party for SortImports six was being sorted incorrectly due to being classed as first party.
Python
mit
python-astrodynamics/astrodynamics,python-astrodynamics/astrodynamics
from __future__ import absolute_import, division, print_function from pathlib import Path from isort import SortImports from shovel import task # isort multi_line_output modes GRID = 0 VERTICAL = 1 HANGING_INDENT = 2 VERTICAL_HANGING_INDENT = 3 HANGING_GRID = 4 HANGING_GRID_GROUPED = 5 @task def format_imports(): """Sort imports into a consistent style.""" astrodynamics_dir = Path('astrodynamics') constants_dir = astrodynamics_dir / 'constants' for initfile in astrodynamics_dir.glob('**/__init__.py'): if constants_dir in initfile.parents: continue SortImports(str(initfile), multi_line_output=VERTICAL_HANGING_INDENT, not_skip=['__init__.py']) # Exclude __init__.py # Exclude generated constants/ python files for pyfile in astrodynamics_dir.glob('**/*.py'): if constants_dir in pyfile.parents and pyfile.stem != 'constant': continue SortImports(str(pyfile), multi_line_output=HANGING_GRID, - skip=['__init__.py']) + skip=['__init__.py'], + known_third_party=['six'])
Add 'six' to known_third_party for SortImports
## Code Before: from __future__ import absolute_import, division, print_function from pathlib import Path from isort import SortImports from shovel import task # isort multi_line_output modes GRID = 0 VERTICAL = 1 HANGING_INDENT = 2 VERTICAL_HANGING_INDENT = 3 HANGING_GRID = 4 HANGING_GRID_GROUPED = 5 @task def format_imports(): """Sort imports into a consistent style.""" astrodynamics_dir = Path('astrodynamics') constants_dir = astrodynamics_dir / 'constants' for initfile in astrodynamics_dir.glob('**/__init__.py'): if constants_dir in initfile.parents: continue SortImports(str(initfile), multi_line_output=VERTICAL_HANGING_INDENT, not_skip=['__init__.py']) # Exclude __init__.py # Exclude generated constants/ python files for pyfile in astrodynamics_dir.glob('**/*.py'): if constants_dir in pyfile.parents and pyfile.stem != 'constant': continue SortImports(str(pyfile), multi_line_output=HANGING_GRID, skip=['__init__.py']) ## Instruction: Add 'six' to known_third_party for SortImports ## Code After: from __future__ import absolute_import, division, print_function from pathlib import Path from isort import SortImports from shovel import task # isort multi_line_output modes GRID = 0 VERTICAL = 1 HANGING_INDENT = 2 VERTICAL_HANGING_INDENT = 3 HANGING_GRID = 4 HANGING_GRID_GROUPED = 5 @task def format_imports(): """Sort imports into a consistent style.""" astrodynamics_dir = Path('astrodynamics') constants_dir = astrodynamics_dir / 'constants' for initfile in astrodynamics_dir.glob('**/__init__.py'): if constants_dir in initfile.parents: continue SortImports(str(initfile), multi_line_output=VERTICAL_HANGING_INDENT, not_skip=['__init__.py']) # Exclude __init__.py # Exclude generated constants/ python files for pyfile in astrodynamics_dir.glob('**/*.py'): if constants_dir in pyfile.parents and pyfile.stem != 'constant': continue SortImports(str(pyfile), multi_line_output=HANGING_GRID, skip=['__init__.py'], known_third_party=['six'])
from __future__ import absolute_import, division, print_function from pathlib import Path from isort import SortImports from shovel import task # isort multi_line_output modes GRID = 0 VERTICAL = 1 HANGING_INDENT = 2 VERTICAL_HANGING_INDENT = 3 HANGING_GRID = 4 HANGING_GRID_GROUPED = 5 @task def format_imports(): """Sort imports into a consistent style.""" astrodynamics_dir = Path('astrodynamics') constants_dir = astrodynamics_dir / 'constants' for initfile in astrodynamics_dir.glob('**/__init__.py'): if constants_dir in initfile.parents: continue SortImports(str(initfile), multi_line_output=VERTICAL_HANGING_INDENT, not_skip=['__init__.py']) # Exclude __init__.py # Exclude generated constants/ python files for pyfile in astrodynamics_dir.glob('**/*.py'): if constants_dir in pyfile.parents and pyfile.stem != 'constant': continue SortImports(str(pyfile), multi_line_output=HANGING_GRID, - skip=['__init__.py']) ? ^ + skip=['__init__.py'], ? ^ + known_third_party=['six'])
dd68fbb86100d0d3da08172505e7c564cc5bd3e7
monitor-notifier-slack.py
monitor-notifier-slack.py
import pika import json import requests SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"] RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] connection = pika.BlockingConnection(pika.ConnectionParameters( RABBIT_MQ_SERVER)) channel = connection.channel() channel.queue_declare(queue='slack') def callback(ch, method, properties, body): payload = {} payload["text"] = body r = requests.post(SLACK_WEBHOOK_URL, data = json.dumps(payload)) channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming()
import pika import json import requests import os RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"] RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"] credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD) connection = pika.BlockingConnection(pika.ConnectionParameters( RABBIT_MQ_SERVER, credentials = credentials)) channel = connection.channel() # channel.queue_declare(queue='slack') def callback(ch, method, properties, body): payload = {} payload["text"] = body req = json.loads(body) webhook_url = json.loads(req["monitor"]["notifier"]["arguments"])["webhook_url"] r = requests.post(webhook_url, data = json.dumps(payload)) channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming()
Add credentials + read webhook_url from notifier arguments
Add credentials + read webhook_url from notifier arguments
Python
mit
observer-hackaton/monitor-notifier-slack
import pika import json import requests + import os - SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"] RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] + RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"] + RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"] + + credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD) connection = pika.BlockingConnection(pika.ConnectionParameters( - RABBIT_MQ_SERVER)) + RABBIT_MQ_SERVER, credentials = credentials)) channel = connection.channel() - channel.queue_declare(queue='slack') + # channel.queue_declare(queue='slack') def callback(ch, method, properties, body): payload = {} payload["text"] = body + req = json.loads(body) + webhook_url = json.loads(req["monitor"]["notifier"]["arguments"])["webhook_url"] - r = requests.post(SLACK_WEBHOOK_URL, data = json.dumps(payload)) + r = requests.post(webhook_url, data = json.dumps(payload)) channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming()
Add credentials + read webhook_url from notifier arguments
## Code Before: import pika import json import requests SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"] RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] connection = pika.BlockingConnection(pika.ConnectionParameters( RABBIT_MQ_SERVER)) channel = connection.channel() channel.queue_declare(queue='slack') def callback(ch, method, properties, body): payload = {} payload["text"] = body r = requests.post(SLACK_WEBHOOK_URL, data = json.dumps(payload)) channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming() ## Instruction: Add credentials + read webhook_url from notifier arguments ## Code After: import pika import json import requests import os RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"] RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"] credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD) connection = pika.BlockingConnection(pika.ConnectionParameters( RABBIT_MQ_SERVER, credentials = credentials)) channel = connection.channel() # channel.queue_declare(queue='slack') def callback(ch, method, properties, body): payload = {} payload["text"] = body req = json.loads(body) webhook_url = json.loads(req["monitor"]["notifier"]["arguments"])["webhook_url"] r = requests.post(webhook_url, data = json.dumps(payload)) channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming()
import pika import json import requests + import os - SLACK_WEBHOOK_URL = os.environ["SLACK_WEBHOOK_URL"] RABBIT_MQ_SERVER = os.environ["RABBIT_MQ_SERVER"] + RABBIT_MQ_USER = os.environ["RABBIT_MQ_USER"] + RABBIT_MQ_PWD = os.environ["RABBIT_MQ_PWD"] + + credentials = pika.PlainCredentials(RABBIT_MQ_USER, RABBIT_MQ_PWD) connection = pika.BlockingConnection(pika.ConnectionParameters( - RABBIT_MQ_SERVER)) + RABBIT_MQ_SERVER, credentials = credentials)) channel = connection.channel() - channel.queue_declare(queue='slack') + # channel.queue_declare(queue='slack') ? ++ def callback(ch, method, properties, body): payload = {} payload["text"] = body + req = json.loads(body) + webhook_url = json.loads(req["monitor"]["notifier"]["arguments"])["webhook_url"] - r = requests.post(SLACK_WEBHOOK_URL, data = json.dumps(payload)) ? ^^^^^ ^^^^^^^^^^^ + r = requests.post(webhook_url, data = json.dumps(payload)) ? ^^^^^^^ ^^^ channel.basic_consume(callback, queue='slack', no_ack=True) channel.start_consuming()
e5f662d9cebe4133705eca74a300c325d432ad04
anvil/components/cinder_client.py
anvil/components/cinder_client.py
from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) def _filter_pip_requires_line(self, line): if line.lower().find('keystoneclient') != -1: return None if line.lower().find('novaclient') != -1: return None if line.lower().find('glanceclient') != -1: return None return line class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs)
from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs)
Remove destruction of pips/test requires entries that don't exist.
Remove destruction of pips/test requires entries that don't exist.
Python
apache-2.0
stackforge/anvil,stackforge/anvil,mc2014/anvil,mc2014/anvil
from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) - def _filter_pip_requires_line(self, line): - if line.lower().find('keystoneclient') != -1: - return None - if line.lower().find('novaclient') != -1: - return None - if line.lower().find('glanceclient') != -1: - return None - return line - class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs)
Remove destruction of pips/test requires entries that don't exist.
## Code Before: from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) def _filter_pip_requires_line(self, line): if line.lower().find('keystoneclient') != -1: return None if line.lower().find('novaclient') != -1: return None if line.lower().find('glanceclient') != -1: return None return line class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs) ## Instruction: Remove destruction of pips/test requires entries that don't exist. ## Code After: from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs)
from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) - def _filter_pip_requires_line(self, line): - if line.lower().find('keystoneclient') != -1: - return None - if line.lower().find('novaclient') != -1: - return None - if line.lower().find('glanceclient') != -1: - return None - return line - class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs)
7f86ab26fb1c6ba01f81fdc3f5b66a0f079c23ff
tests/test_app.py
tests/test_app.py
import asyncio from unittest import mock import aiohttp import pytest from bottery.app import App def test_app_session(): app = App() assert isinstance(app.session, aiohttp.ClientSession) def test_app_already_configured_session(): app = App() app._session = 'session' assert app.session == 'session' def test_app_loop(): app = App() assert isinstance(app.loop, asyncio.AbstractEventLoop) def test_app_already_configured_loop(): app = App() app._loop = 'loop' assert app.loop == 'loop' @mock.patch('bottery.app.settings') def test_app_configure_without_platforms(mocked_settings): """Should raise Exception if no platform was found at settings""" mocked_settings.PLATFORMS = {} app = App() with pytest.raises(Exception): app.configure_platforms()
import asyncio import sys from unittest import mock import aiohttp import pytest from bottery.app import App @pytest.fixture def mocked_engine(): mocked_engine_module = mock.MagicMock() mocked_engine_instance = mocked_engine_module.engine.return_value mocked_engine_instance.tasks.return_value = [(mock.MagicMock(), )] sys.modules['tests.fake_engine'] = mocked_engine_module yield { 'module': mocked_engine_module, 'instance': mocked_engine_instance } del sys.modules['tests.fake_engine'] def test_app_session(): app = App() assert isinstance(app.session, aiohttp.ClientSession) def test_app_already_configured_session(): app = App() app._session = 'session' assert app.session == 'session' def test_app_loop(): app = App() assert isinstance(app.loop, asyncio.AbstractEventLoop) def test_app_already_configured_loop(): app = App() app._loop = 'loop' assert app.loop == 'loop' @mock.patch('bottery.app.settings') def test_app_configure_without_platforms(mocked_settings): """Should raise Exception if no platform was found at settings""" mocked_settings.PLATFORMS = {} app = App() with pytest.raises(Exception): app.configure_platforms() @mock.patch('bottery.app.settings') def test_app_configure_with_platforms(mocked_settings, mocked_engine): """Should call the platform interface methods""" mocked_settings.PLATFORMS = { 'test': { 'ENGINE': 'tests.fake_engine', 'OPTIONS': { 'token': 'should-be-a-valid-token' } } } app = App() app.configure_platforms() mocked_engine['module'].engine.assert_called_with( session=app.session, token='should-be-a-valid-token' ) mocked_engine['instance'].configure.assert_called_with() mocked_engine['instance'].tasks.assert_called_with()
Increase the code coverage of App.configure_platforms method
Increase the code coverage of App.configure_platforms method
Python
mit
rougeth/bottery
import asyncio + import sys from unittest import mock import aiohttp import pytest from bottery.app import App + + + @pytest.fixture + def mocked_engine(): + mocked_engine_module = mock.MagicMock() + mocked_engine_instance = mocked_engine_module.engine.return_value + mocked_engine_instance.tasks.return_value = [(mock.MagicMock(), )] + sys.modules['tests.fake_engine'] = mocked_engine_module + + yield { + 'module': mocked_engine_module, + 'instance': mocked_engine_instance + } + + del sys.modules['tests.fake_engine'] def test_app_session(): app = App() assert isinstance(app.session, aiohttp.ClientSession) def test_app_already_configured_session(): app = App() app._session = 'session' assert app.session == 'session' def test_app_loop(): app = App() assert isinstance(app.loop, asyncio.AbstractEventLoop) def test_app_already_configured_loop(): app = App() app._loop = 'loop' assert app.loop == 'loop' @mock.patch('bottery.app.settings') def test_app_configure_without_platforms(mocked_settings): """Should raise Exception if no platform was found at settings""" mocked_settings.PLATFORMS = {} app = App() with pytest.raises(Exception): app.configure_platforms() + @mock.patch('bottery.app.settings') + def test_app_configure_with_platforms(mocked_settings, mocked_engine): + """Should call the platform interface methods""" + + mocked_settings.PLATFORMS = { + 'test': { + 'ENGINE': 'tests.fake_engine', + 'OPTIONS': { + 'token': 'should-be-a-valid-token' + } + } + } + + app = App() + app.configure_platforms() + + mocked_engine['module'].engine.assert_called_with( + session=app.session, + token='should-be-a-valid-token' + ) + mocked_engine['instance'].configure.assert_called_with() + mocked_engine['instance'].tasks.assert_called_with() +
Increase the code coverage of App.configure_platforms method
## Code Before: import asyncio from unittest import mock import aiohttp import pytest from bottery.app import App def test_app_session(): app = App() assert isinstance(app.session, aiohttp.ClientSession) def test_app_already_configured_session(): app = App() app._session = 'session' assert app.session == 'session' def test_app_loop(): app = App() assert isinstance(app.loop, asyncio.AbstractEventLoop) def test_app_already_configured_loop(): app = App() app._loop = 'loop' assert app.loop == 'loop' @mock.patch('bottery.app.settings') def test_app_configure_without_platforms(mocked_settings): """Should raise Exception if no platform was found at settings""" mocked_settings.PLATFORMS = {} app = App() with pytest.raises(Exception): app.configure_platforms() ## Instruction: Increase the code coverage of App.configure_platforms method ## Code After: import asyncio import sys from unittest import mock import aiohttp import pytest from bottery.app import App @pytest.fixture def mocked_engine(): mocked_engine_module = mock.MagicMock() mocked_engine_instance = mocked_engine_module.engine.return_value mocked_engine_instance.tasks.return_value = [(mock.MagicMock(), )] sys.modules['tests.fake_engine'] = mocked_engine_module yield { 'module': mocked_engine_module, 'instance': mocked_engine_instance } del sys.modules['tests.fake_engine'] def test_app_session(): app = App() assert isinstance(app.session, aiohttp.ClientSession) def test_app_already_configured_session(): app = App() app._session = 'session' assert app.session == 'session' def test_app_loop(): app = App() assert isinstance(app.loop, asyncio.AbstractEventLoop) def test_app_already_configured_loop(): app = App() app._loop = 'loop' assert app.loop == 'loop' @mock.patch('bottery.app.settings') def test_app_configure_without_platforms(mocked_settings): """Should raise Exception if no platform was found at settings""" mocked_settings.PLATFORMS = {} app = App() with pytest.raises(Exception): app.configure_platforms() @mock.patch('bottery.app.settings') def test_app_configure_with_platforms(mocked_settings, mocked_engine): """Should call the platform interface methods""" mocked_settings.PLATFORMS = { 'test': { 'ENGINE': 'tests.fake_engine', 'OPTIONS': { 'token': 'should-be-a-valid-token' } } } app = App() app.configure_platforms() mocked_engine['module'].engine.assert_called_with( session=app.session, token='should-be-a-valid-token' ) mocked_engine['instance'].configure.assert_called_with() mocked_engine['instance'].tasks.assert_called_with()
import asyncio + import sys from unittest import mock import aiohttp import pytest from bottery.app import App + + + @pytest.fixture + def mocked_engine(): + mocked_engine_module = mock.MagicMock() + mocked_engine_instance = mocked_engine_module.engine.return_value + mocked_engine_instance.tasks.return_value = [(mock.MagicMock(), )] + sys.modules['tests.fake_engine'] = mocked_engine_module + + yield { + 'module': mocked_engine_module, + 'instance': mocked_engine_instance + } + + del sys.modules['tests.fake_engine'] def test_app_session(): app = App() assert isinstance(app.session, aiohttp.ClientSession) def test_app_already_configured_session(): app = App() app._session = 'session' assert app.session == 'session' def test_app_loop(): app = App() assert isinstance(app.loop, asyncio.AbstractEventLoop) def test_app_already_configured_loop(): app = App() app._loop = 'loop' assert app.loop == 'loop' @mock.patch('bottery.app.settings') def test_app_configure_without_platforms(mocked_settings): """Should raise Exception if no platform was found at settings""" mocked_settings.PLATFORMS = {} app = App() with pytest.raises(Exception): app.configure_platforms() + + @mock.patch('bottery.app.settings') + def test_app_configure_with_platforms(mocked_settings, mocked_engine): + """Should call the platform interface methods""" + + mocked_settings.PLATFORMS = { + 'test': { + 'ENGINE': 'tests.fake_engine', + 'OPTIONS': { + 'token': 'should-be-a-valid-token' + } + } + } + + app = App() + app.configure_platforms() + + mocked_engine['module'].engine.assert_called_with( + session=app.session, + token='should-be-a-valid-token' + ) + mocked_engine['instance'].configure.assert_called_with() + mocked_engine['instance'].tasks.assert_called_with()
f41b06ca9a61b75bdb6cef0a0c534755ca80a513
tests/unit/test_pathologic_models.py
tests/unit/test_pathologic_models.py
from __future__ import unicode_literals import pytest from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch def test_optional_inside_zeroormore(): """ Test optional match inside a zero or more. Optional should always succeed thus inducing ZeroOrMore to try the match again. Arpeggio handle this using soft failures. """ def grammar(): return ZeroOrMore(Optional('a')) parser = ParserPython(grammar) with pytest.raises(NoMatch): # This could lead to infinite loop parser.parse('b')
from __future__ import unicode_literals import pytest from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch, EOF def test_optional_inside_zeroormore(): """ Test optional match inside a zero or more. Optional should always succeed thus inducing ZeroOrMore to try the match again. Arpeggio handle this case. """ def grammar(): return ZeroOrMore(Optional('a')), EOF parser = ParserPython(grammar) with pytest.raises(NoMatch): # This could lead to infinite loop parser.parse('b')
Fix in test for pathologic grammars.
Fix in test for pathologic grammars.
Python
mit
leiyangyou/Arpeggio,leiyangyou/Arpeggio
from __future__ import unicode_literals import pytest - from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch + from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch, EOF def test_optional_inside_zeroormore(): """ Test optional match inside a zero or more. Optional should always succeed thus inducing ZeroOrMore to try the match again. - Arpeggio handle this using soft failures. + Arpeggio handle this case. """ - def grammar(): return ZeroOrMore(Optional('a')) + def grammar(): return ZeroOrMore(Optional('a')), EOF parser = ParserPython(grammar) with pytest.raises(NoMatch): # This could lead to infinite loop parser.parse('b')
Fix in test for pathologic grammars.
## Code Before: from __future__ import unicode_literals import pytest from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch def test_optional_inside_zeroormore(): """ Test optional match inside a zero or more. Optional should always succeed thus inducing ZeroOrMore to try the match again. Arpeggio handle this using soft failures. """ def grammar(): return ZeroOrMore(Optional('a')) parser = ParserPython(grammar) with pytest.raises(NoMatch): # This could lead to infinite loop parser.parse('b') ## Instruction: Fix in test for pathologic grammars. ## Code After: from __future__ import unicode_literals import pytest from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch, EOF def test_optional_inside_zeroormore(): """ Test optional match inside a zero or more. Optional should always succeed thus inducing ZeroOrMore to try the match again. Arpeggio handle this case. """ def grammar(): return ZeroOrMore(Optional('a')), EOF parser = ParserPython(grammar) with pytest.raises(NoMatch): # This could lead to infinite loop parser.parse('b')
from __future__ import unicode_literals import pytest - from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch + from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch, EOF ? +++++ def test_optional_inside_zeroormore(): """ Test optional match inside a zero or more. Optional should always succeed thus inducing ZeroOrMore to try the match again. - Arpeggio handle this using soft failures. + Arpeggio handle this case. """ - def grammar(): return ZeroOrMore(Optional('a')) + def grammar(): return ZeroOrMore(Optional('a')), EOF ? +++++ parser = ParserPython(grammar) with pytest.raises(NoMatch): # This could lead to infinite loop parser.parse('b')
92138f23dfc5dbbcb81aeb1f429e68a63a9d5005
apps/organizations/admin.py
apps/organizations/admin.py
from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 0 class OrganizationAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} inlines = (OrganizationAddressAdmin,) search_fields = ('name', 'description') admin.site.register(Organization, OrganizationAdmin) admin.site.register(OrganizationMember)
from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 0 class OrganizationAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} inlines = (OrganizationAddressAdmin,) search_fields = ('name', 'description') admin.site.register(Organization, OrganizationAdmin) class OrganizationMemberAdmin(admin.ModelAdmin): list_display = ('user', 'function', 'organization') list_filter = ('function',) search_fields = ('user__first_name', 'user__last_name', 'user__username', 'organization__name') admin.site.register(OrganizationMember, OrganizationMemberAdmin)
Add a custom Admin page for organization members.
Add a custom Admin page for organization members. This is a partial fix for BB-66.
Python
bsd-3-clause
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 0 class OrganizationAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} inlines = (OrganizationAddressAdmin,) search_fields = ('name', 'description') - admin.site.register(Organization, OrganizationAdmin) - admin.site.register(OrganizationMember) + class OrganizationMemberAdmin(admin.ModelAdmin): + list_display = ('user', 'function', 'organization') + list_filter = ('function',) + search_fields = ('user__first_name', 'user__last_name', + 'user__username', 'organization__name') + + admin.site.register(OrganizationMember, OrganizationMemberAdmin)
Add a custom Admin page for organization members.
## Code Before: from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 0 class OrganizationAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} inlines = (OrganizationAddressAdmin,) search_fields = ('name', 'description') admin.site.register(Organization, OrganizationAdmin) admin.site.register(OrganizationMember) ## Instruction: Add a custom Admin page for organization members. ## Code After: from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 0 class OrganizationAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} inlines = (OrganizationAddressAdmin,) search_fields = ('name', 'description') admin.site.register(Organization, OrganizationAdmin) class OrganizationMemberAdmin(admin.ModelAdmin): list_display = ('user', 'function', 'organization') list_filter = ('function',) search_fields = ('user__first_name', 'user__last_name', 'user__username', 'organization__name') admin.site.register(OrganizationMember, OrganizationMemberAdmin)
from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 0 class OrganizationAdmin(admin.ModelAdmin): prepopulated_fields = {"slug": ("name",)} inlines = (OrganizationAddressAdmin,) search_fields = ('name', 'description') - admin.site.register(Organization, OrganizationAdmin) + + class OrganizationMemberAdmin(admin.ModelAdmin): + list_display = ('user', 'function', 'organization') + list_filter = ('function',) + search_fields = ('user__first_name', 'user__last_name', + 'user__username', 'organization__name') + - admin.site.register(OrganizationMember) + admin.site.register(OrganizationMember, OrganizationMemberAdmin) ? +++++++++++++++++++++++++
80c192155256aa02f290130f792fc804fb59a4d7
pycat/talk.py
pycat/talk.py
"""Communication link driver.""" import sys import selectors CLIENT_TO_SERVER = object() SERVER_TO_CLIENT = object() def talk(socket, source=sys.stdin.buffer, sink=sys.stdout.buffer): """Run communication, in a loop. Input from `source` is sent on `socket`, and data received on `socket` is forwarded to `sink`. All file descriptors must be non-blocking. """ OUTPUT_BUFFER_SIZE = 1024 with selectors.DefaultSelector() as selector: selector.register(source, selectors.EVENT_READ, CLIENT_TO_SERVER) selector.register(socket, selectors.EVENT_READ, SERVER_TO_CLIENT) while True: for key, events in selector.select(): if key.data is CLIENT_TO_SERVER: data = source.readline() socket.send(data) elif key.data is SERVER_TO_CLIENT: data = socket.recv(OUTPUT_BUFFER_SIZE) sink.write(data) sink.flush()
"""Communication link driver.""" import sys import select def talk(socket, source=sys.stdin.buffer, sink=sys.stdout.buffer): """Run communication, in a loop. Input from `source` is sent on `socket`, and data received on `socket` is forwarded to `sink`. All file descriptors must be non-blocking. """ OUTPUT_BUFFER_SIZE = 1024 while True: readable, writable, exceptional = select.select((socket, source), (), (socket, source, sink)) if source in readable: socket.send(source.readline()) if socket in readable: sink.write(socket.recv(OUTPUT_BUFFER_SIZE)) sink.flush()
Switch to just using `select` directly
Switch to just using `select` directly This is less efficient, but does let use get the "exceptional" cases and handle them more pleasantly.
Python
mit
prophile/pycat
"""Communication link driver.""" import sys - import selectors + import select - - - CLIENT_TO_SERVER = object() - SERVER_TO_CLIENT = object() def talk(socket, source=sys.stdin.buffer, sink=sys.stdout.buffer): """Run communication, in a loop. Input from `source` is sent on `socket`, and data received on `socket` is forwarded to `sink`. All file descriptors must be non-blocking. """ OUTPUT_BUFFER_SIZE = 1024 - with selectors.DefaultSelector() as selector: - selector.register(source, selectors.EVENT_READ, CLIENT_TO_SERVER) - selector.register(socket, selectors.EVENT_READ, SERVER_TO_CLIENT) - while True: + while True: - for key, events in selector.select(): - if key.data is CLIENT_TO_SERVER: - data = source.readline() - socket.send(data) - elif key.data is SERVER_TO_CLIENT: + readable, writable, exceptional = select.select((socket, source), + (), + (socket, source, sink)) + if source in readable: + socket.send(source.readline()) + if socket in readable: - data = socket.recv(OUTPUT_BUFFER_SIZE) + sink.write(socket.recv(OUTPUT_BUFFER_SIZE)) - sink.write(data) - sink.flush() + sink.flush()
Switch to just using `select` directly
## Code Before: """Communication link driver.""" import sys import selectors CLIENT_TO_SERVER = object() SERVER_TO_CLIENT = object() def talk(socket, source=sys.stdin.buffer, sink=sys.stdout.buffer): """Run communication, in a loop. Input from `source` is sent on `socket`, and data received on `socket` is forwarded to `sink`. All file descriptors must be non-blocking. """ OUTPUT_BUFFER_SIZE = 1024 with selectors.DefaultSelector() as selector: selector.register(source, selectors.EVENT_READ, CLIENT_TO_SERVER) selector.register(socket, selectors.EVENT_READ, SERVER_TO_CLIENT) while True: for key, events in selector.select(): if key.data is CLIENT_TO_SERVER: data = source.readline() socket.send(data) elif key.data is SERVER_TO_CLIENT: data = socket.recv(OUTPUT_BUFFER_SIZE) sink.write(data) sink.flush() ## Instruction: Switch to just using `select` directly ## Code After: """Communication link driver.""" import sys import select def talk(socket, source=sys.stdin.buffer, sink=sys.stdout.buffer): """Run communication, in a loop. Input from `source` is sent on `socket`, and data received on `socket` is forwarded to `sink`. All file descriptors must be non-blocking. """ OUTPUT_BUFFER_SIZE = 1024 while True: readable, writable, exceptional = select.select((socket, source), (), (socket, source, sink)) if source in readable: socket.send(source.readline()) if socket in readable: sink.write(socket.recv(OUTPUT_BUFFER_SIZE)) sink.flush()
"""Communication link driver.""" import sys - import selectors ? --- + import select - - - CLIENT_TO_SERVER = object() - SERVER_TO_CLIENT = object() def talk(socket, source=sys.stdin.buffer, sink=sys.stdout.buffer): """Run communication, in a loop. Input from `source` is sent on `socket`, and data received on `socket` is forwarded to `sink`. All file descriptors must be non-blocking. """ OUTPUT_BUFFER_SIZE = 1024 - with selectors.DefaultSelector() as selector: - selector.register(source, selectors.EVENT_READ, CLIENT_TO_SERVER) - selector.register(socket, selectors.EVENT_READ, SERVER_TO_CLIENT) - while True: ? ---- + while True: - for key, events in selector.select(): - if key.data is CLIENT_TO_SERVER: - data = source.readline() - socket.send(data) - elif key.data is SERVER_TO_CLIENT: + readable, writable, exceptional = select.select((socket, source), + (), + (socket, source, sink)) + if source in readable: + socket.send(source.readline()) + if socket in readable: - data = socket.recv(OUTPUT_BUFFER_SIZE) ? ^^^^^^^^^^ ^^^^ + sink.write(socket.recv(OUTPUT_BUFFER_SIZE)) ? ^^^^^^^^ ^^ + - sink.write(data) - sink.flush() ? -------- + sink.flush()
adb265a57baed6a94f83ba13f88342313ad78566
tests/adapter.py
tests/adapter.py
class MockStorageAdapter: """Mock storage adapter class. Will be patched for testing purposes """ def store_entry(self, entry): """Mock store_entry""" pass def store_response(self, response): """Mock store_response""" pass def get_all_entries(self): """Mock get_all_entries""" pass def get_entry_responses(self, entry_id): """Mock get_entry_responses""" pass def get_last_entry(self): """Mock get_last_entry""" pass def get_prompts(self): """Mock get_prompts""" pass
class MockStorageAdapter: """Mock storage adapter class. Will be patched for testing purposes """ def store_entry(self, entry: dict): """Mock store_entry""" pass def store_response(self, response: dict): """Mock store_response""" pass def get_all_entries(self): """Mock get_all_entries""" pass def get_entry_responses(self, entry_id: str): """Mock get_entry_responses""" pass def get_last_entry(self): """Mock get_last_entry""" pass def get_prompts(self): """Mock get_prompts""" pass
Add type annotations to MockStorageAdapter methods
Add type annotations to MockStorageAdapter methods
Python
mit
tjmcginnis/tmj
class MockStorageAdapter: """Mock storage adapter class. Will be patched for testing purposes """ - def store_entry(self, entry): + def store_entry(self, entry: dict): """Mock store_entry""" pass - def store_response(self, response): + def store_response(self, response: dict): """Mock store_response""" pass def get_all_entries(self): """Mock get_all_entries""" pass - def get_entry_responses(self, entry_id): + def get_entry_responses(self, entry_id: str): """Mock get_entry_responses""" pass def get_last_entry(self): """Mock get_last_entry""" pass def get_prompts(self): """Mock get_prompts""" pass
Add type annotations to MockStorageAdapter methods
## Code Before: class MockStorageAdapter: """Mock storage adapter class. Will be patched for testing purposes """ def store_entry(self, entry): """Mock store_entry""" pass def store_response(self, response): """Mock store_response""" pass def get_all_entries(self): """Mock get_all_entries""" pass def get_entry_responses(self, entry_id): """Mock get_entry_responses""" pass def get_last_entry(self): """Mock get_last_entry""" pass def get_prompts(self): """Mock get_prompts""" pass ## Instruction: Add type annotations to MockStorageAdapter methods ## Code After: class MockStorageAdapter: """Mock storage adapter class. Will be patched for testing purposes """ def store_entry(self, entry: dict): """Mock store_entry""" pass def store_response(self, response: dict): """Mock store_response""" pass def get_all_entries(self): """Mock get_all_entries""" pass def get_entry_responses(self, entry_id: str): """Mock get_entry_responses""" pass def get_last_entry(self): """Mock get_last_entry""" pass def get_prompts(self): """Mock get_prompts""" pass
class MockStorageAdapter: """Mock storage adapter class. Will be patched for testing purposes """ - def store_entry(self, entry): + def store_entry(self, entry: dict): ? ++++++ """Mock store_entry""" pass - def store_response(self, response): + def store_response(self, response: dict): ? ++++++ """Mock store_response""" pass def get_all_entries(self): """Mock get_all_entries""" pass - def get_entry_responses(self, entry_id): + def get_entry_responses(self, entry_id: str): ? +++++ """Mock get_entry_responses""" pass def get_last_entry(self): """Mock get_last_entry""" pass def get_prompts(self): """Mock get_prompts""" pass
f5d9fbf618f44e8572344e04e9a09c7cae3302bb
neurodsp/plts/__init__.py
neurodsp/plts/__init__.py
"""Plotting functions.""" from .time_series import plot_time_series, plot_bursts from .filt import plot_filter_properties, plot_frequency_response, plot_impulse_response from .rhythm import plot_swm_pattern, plot_lagged_coherence from .spectral import plot_power_spectra, plot_scv, plot_scv_rs_lines, plot_scv_rs_matrix, plot_spectral_hist
"""Plotting functions.""" from .time_series import plot_time_series, plot_bursts, plot_instantaneous_measure from .filt import plot_filter_properties, plot_frequency_response, plot_impulse_response from .rhythm import plot_swm_pattern, plot_lagged_coherence from .spectral import plot_power_spectra, plot_scv, plot_scv_rs_lines, plot_scv_rs_matrix, plot_spectral_hist
Make plot_instantaneous_measure accessible from root of plots
Make plot_instantaneous_measure accessible from root of plots
Python
apache-2.0
voytekresearch/neurodsp
"""Plotting functions.""" - from .time_series import plot_time_series, plot_bursts + from .time_series import plot_time_series, plot_bursts, plot_instantaneous_measure from .filt import plot_filter_properties, plot_frequency_response, plot_impulse_response from .rhythm import plot_swm_pattern, plot_lagged_coherence from .spectral import plot_power_spectra, plot_scv, plot_scv_rs_lines, plot_scv_rs_matrix, plot_spectral_hist
Make plot_instantaneous_measure accessible from root of plots
## Code Before: """Plotting functions.""" from .time_series import plot_time_series, plot_bursts from .filt import plot_filter_properties, plot_frequency_response, plot_impulse_response from .rhythm import plot_swm_pattern, plot_lagged_coherence from .spectral import plot_power_spectra, plot_scv, plot_scv_rs_lines, plot_scv_rs_matrix, plot_spectral_hist ## Instruction: Make plot_instantaneous_measure accessible from root of plots ## Code After: """Plotting functions.""" from .time_series import plot_time_series, plot_bursts, plot_instantaneous_measure from .filt import plot_filter_properties, plot_frequency_response, plot_impulse_response from .rhythm import plot_swm_pattern, plot_lagged_coherence from .spectral import plot_power_spectra, plot_scv, plot_scv_rs_lines, plot_scv_rs_matrix, plot_spectral_hist
"""Plotting functions.""" - from .time_series import plot_time_series, plot_bursts + from .time_series import plot_time_series, plot_bursts, plot_instantaneous_measure ? ++++++++++++++++++++++++++++ from .filt import plot_filter_properties, plot_frequency_response, plot_impulse_response from .rhythm import plot_swm_pattern, plot_lagged_coherence from .spectral import plot_power_spectra, plot_scv, plot_scv_rs_lines, plot_scv_rs_matrix, plot_spectral_hist
986901c9e91d44758200fb8d3264b88c0977be37
lvsr/configs/timit_bothgru_hybrid2.py
lvsr/configs/timit_bothgru_hybrid2.py
Config( net=Config(attention_type='hybrid2', shift_predictor_dims=[100], max_left=10, max_right=100), initialization=[ ("/recognizer", "rec_weights_init", "IsotropicGaussian(0.1)"), ("/recognizer/generator/att_trans/hybrid_att/loc_att", "weights_init", "IsotropicGaussian(0.01)"), ("/recognizer/generator/att_trans/hybrid_att/loc_att", "biases_init", "IsotropicGaussian(5.0)")], data=Config(normalization="norm.pkl"))
Config( net=Config(dec_transition='GatedRecurrent', enc_transition='GatedRecurrent', attention_type='hybrid2', shift_predictor_dims=[100], max_left=10, max_right=100), initialization=[ ("/recognizer", "rec_weights_init", "IsotropicGaussian(0.1)"), ("/recognizer/generator/att_trans/hybrid_att/loc_att", "weights_init", "IsotropicGaussian(0.001)"), ("/recognizer/generator/att_trans/hybrid_att/loc_att", "biases_init", "IsotropicGaussian(5.0)")], data=Config(normalization="norm.pkl"))
Fix hybrid2, but it is still no use
Fix hybrid2, but it is still no use
Python
mit
nke001/attention-lvcsr,rizar/attention-lvcsr,rizar/attention-lvcsr,nke001/attention-lvcsr,nke001/attention-lvcsr,rizar/attention-lvcsr,nke001/attention-lvcsr,rizar/attention-lvcsr,rizar/attention-lvcsr,nke001/attention-lvcsr
Config( - net=Config(attention_type='hybrid2', + net=Config(dec_transition='GatedRecurrent', + enc_transition='GatedRecurrent', + attention_type='hybrid2', shift_predictor_dims=[100], max_left=10, max_right=100), initialization=[ ("/recognizer", "rec_weights_init", "IsotropicGaussian(0.1)"), ("/recognizer/generator/att_trans/hybrid_att/loc_att", - "weights_init", "IsotropicGaussian(0.01)"), + "weights_init", "IsotropicGaussian(0.001)"), ("/recognizer/generator/att_trans/hybrid_att/loc_att", "biases_init", "IsotropicGaussian(5.0)")], data=Config(normalization="norm.pkl"))
Fix hybrid2, but it is still no use
## Code Before: Config( net=Config(attention_type='hybrid2', shift_predictor_dims=[100], max_left=10, max_right=100), initialization=[ ("/recognizer", "rec_weights_init", "IsotropicGaussian(0.1)"), ("/recognizer/generator/att_trans/hybrid_att/loc_att", "weights_init", "IsotropicGaussian(0.01)"), ("/recognizer/generator/att_trans/hybrid_att/loc_att", "biases_init", "IsotropicGaussian(5.0)")], data=Config(normalization="norm.pkl")) ## Instruction: Fix hybrid2, but it is still no use ## Code After: Config( net=Config(dec_transition='GatedRecurrent', enc_transition='GatedRecurrent', attention_type='hybrid2', shift_predictor_dims=[100], max_left=10, max_right=100), initialization=[ ("/recognizer", "rec_weights_init", "IsotropicGaussian(0.1)"), ("/recognizer/generator/att_trans/hybrid_att/loc_att", "weights_init", "IsotropicGaussian(0.001)"), ("/recognizer/generator/att_trans/hybrid_att/loc_att", "biases_init", "IsotropicGaussian(5.0)")], data=Config(normalization="norm.pkl"))
Config( - net=Config(attention_type='hybrid2', + net=Config(dec_transition='GatedRecurrent', + enc_transition='GatedRecurrent', + attention_type='hybrid2', shift_predictor_dims=[100], max_left=10, max_right=100), initialization=[ ("/recognizer", "rec_weights_init", "IsotropicGaussian(0.1)"), ("/recognizer/generator/att_trans/hybrid_att/loc_att", - "weights_init", "IsotropicGaussian(0.01)"), + "weights_init", "IsotropicGaussian(0.001)"), ? + ("/recognizer/generator/att_trans/hybrid_att/loc_att", "biases_init", "IsotropicGaussian(5.0)")], data=Config(normalization="norm.pkl"))
06d2bb81d19ba3089bddeb77e7e85482b5f0596b
cms/djangoapps/contentstore/management/commands/export_all_courses.py
cms/djangoapps/contentstore/management/commands/export_all_courses.py
from django.core.management.base import BaseCommand, CommandError from xmodule.modulestore.xml_exporter import export_to_xml from xmodule.modulestore.django import modulestore from xmodule.contentstore.django import contentstore class Command(BaseCommand): """Export all courses from mongo to the specified data directory""" help = 'Export all courses from mongo to the specified data directory' def handle(self, *args, **options): "Execute the command" if len(args) != 1: raise CommandError("export requires one argument: <output path>") output_path = args[0] cs = contentstore() ms = modulestore('direct') root_dir = output_path courses = ms.get_courses() print("%d courses to export:" % len(courses)) cids = [x.id for x in courses] print(cids) for course_id in cids: print("-"*77) print("Exporting course id = {0} to {1}".format(course_id, output_path)) if 1: try: course_dir = course_id.replace('/', '...') export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore()) except Exception as err: print("="*30 + "> Oops, failed to export %s" % course_id) print("Error:") print(err)
from django.core.management.base import BaseCommand, CommandError from xmodule.modulestore.xml_exporter import export_to_xml from xmodule.modulestore.django import modulestore from xmodule.contentstore.django import contentstore class Command(BaseCommand): """Export all courses from mongo to the specified data directory""" help = 'Export all courses from mongo to the specified data directory' def handle(self, *args, **options): "Execute the command" if len(args) != 1: raise CommandError("export requires one argument: <output path>") output_path = args[0] cs = contentstore() ms = modulestore('direct') root_dir = output_path courses = ms.get_courses() print("%d courses to export:" % len(courses)) cids = [x.id for x in courses] print(cids) for course_id in cids: print("-"*77) print("Exporting course id = {0} to {1}".format(course_id, output_path)) if 1: try: course_dir = course_id.to_deprecated_string().replace('/', '...') export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore()) except Exception as err: print("="*30 + "> Oops, failed to export %s" % course_id) print("Error:") print(err)
Fix course id separator at export all courses command
Fix course id separator at export all courses command
Python
agpl-3.0
morenopc/edx-platform,morenopc/edx-platform,morenopc/edx-platform,morenopc/edx-platform,morenopc/edx-platform
from django.core.management.base import BaseCommand, CommandError from xmodule.modulestore.xml_exporter import export_to_xml from xmodule.modulestore.django import modulestore from xmodule.contentstore.django import contentstore class Command(BaseCommand): """Export all courses from mongo to the specified data directory""" help = 'Export all courses from mongo to the specified data directory' def handle(self, *args, **options): "Execute the command" if len(args) != 1: raise CommandError("export requires one argument: <output path>") output_path = args[0] cs = contentstore() ms = modulestore('direct') root_dir = output_path courses = ms.get_courses() print("%d courses to export:" % len(courses)) cids = [x.id for x in courses] print(cids) for course_id in cids: print("-"*77) print("Exporting course id = {0} to {1}".format(course_id, output_path)) if 1: try: - course_dir = course_id.replace('/', '...') + course_dir = course_id.to_deprecated_string().replace('/', '...') export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore()) except Exception as err: print("="*30 + "> Oops, failed to export %s" % course_id) print("Error:") print(err)
Fix course id separator at export all courses command
## Code Before: from django.core.management.base import BaseCommand, CommandError from xmodule.modulestore.xml_exporter import export_to_xml from xmodule.modulestore.django import modulestore from xmodule.contentstore.django import contentstore class Command(BaseCommand): """Export all courses from mongo to the specified data directory""" help = 'Export all courses from mongo to the specified data directory' def handle(self, *args, **options): "Execute the command" if len(args) != 1: raise CommandError("export requires one argument: <output path>") output_path = args[0] cs = contentstore() ms = modulestore('direct') root_dir = output_path courses = ms.get_courses() print("%d courses to export:" % len(courses)) cids = [x.id for x in courses] print(cids) for course_id in cids: print("-"*77) print("Exporting course id = {0} to {1}".format(course_id, output_path)) if 1: try: course_dir = course_id.replace('/', '...') export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore()) except Exception as err: print("="*30 + "> Oops, failed to export %s" % course_id) print("Error:") print(err) ## Instruction: Fix course id separator at export all courses command ## Code After: from django.core.management.base import BaseCommand, CommandError from xmodule.modulestore.xml_exporter import export_to_xml from xmodule.modulestore.django import modulestore from xmodule.contentstore.django import contentstore class Command(BaseCommand): """Export all courses from mongo to the specified data directory""" help = 'Export all courses from mongo to the specified data directory' def handle(self, *args, **options): "Execute the command" if len(args) != 1: raise CommandError("export requires one argument: <output path>") output_path = args[0] cs = contentstore() ms = modulestore('direct') root_dir = output_path courses = ms.get_courses() print("%d courses to export:" % len(courses)) cids = [x.id for x in courses] print(cids) for course_id in cids: print("-"*77) print("Exporting course id = {0} to {1}".format(course_id, output_path)) if 1: try: course_dir = course_id.to_deprecated_string().replace('/', '...') export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore()) except Exception as err: print("="*30 + "> Oops, failed to export %s" % course_id) print("Error:") print(err)
from django.core.management.base import BaseCommand, CommandError from xmodule.modulestore.xml_exporter import export_to_xml from xmodule.modulestore.django import modulestore from xmodule.contentstore.django import contentstore class Command(BaseCommand): """Export all courses from mongo to the specified data directory""" help = 'Export all courses from mongo to the specified data directory' def handle(self, *args, **options): "Execute the command" if len(args) != 1: raise CommandError("export requires one argument: <output path>") output_path = args[0] cs = contentstore() ms = modulestore('direct') root_dir = output_path courses = ms.get_courses() print("%d courses to export:" % len(courses)) cids = [x.id for x in courses] print(cids) for course_id in cids: print("-"*77) print("Exporting course id = {0} to {1}".format(course_id, output_path)) if 1: try: - course_dir = course_id.replace('/', '...') + course_dir = course_id.to_deprecated_string().replace('/', '...') ? +++++++++++++++++++++++ export_to_xml(ms, cs, course_id, root_dir, course_dir, modulestore()) except Exception as err: print("="*30 + "> Oops, failed to export %s" % course_id) print("Error:") print(err)
a28b2bc45b69503a8133b0df98ffa96d9aa4e229
helusers/migrations/0002_add_oidcbackchannellogoutevent.py
helusers/migrations/0002_add_oidcbackchannellogoutevent.py
from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ("helusers", "0001_add_ad_groups"), ] operations = [ migrations.CreateModel( name="OIDCBackChannelLogoutEvent", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("created_at", models.DateTimeField(default=django.utils.timezone.now)), ("iss", models.CharField(db_index=True, max_length=4096)), ("sub", models.CharField(blank=True, db_index=True, max_length=4096)), ("sid", models.CharField(blank=True, db_index=True, max_length=4096)), ], options={ "unique_together": {("iss", "sub", "sid")}, }, ), ]
from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ("helusers", "0001_add_ad_groups"), ] operations = [ migrations.CreateModel( name="OIDCBackChannelLogoutEvent", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("created_at", models.DateTimeField(default=django.utils.timezone.now)), ("iss", models.CharField(db_index=True, max_length=4096)), ("sub", models.CharField(blank=True, db_index=True, max_length=4096)), ("sid", models.CharField(blank=True, db_index=True, max_length=4096)), ], options={ "verbose_name": "OIDC back channel logout event", "verbose_name_plural": "OIDC back channel logout events", "unique_together": {("iss", "sub", "sid")}, }, ), ]
Modify migration file to include meta data changes
Modify migration file to include meta data changes The OIDCBackChannelLogoutEvent model's meta data was changed in commit f62a72b29f. Although this has no effect on the database, Django still wants to include the meta data in migrations. Since this migration file isn't yet included in any release, it can be modified, instead of creating a new migration file only for the meta data change.
Python
bsd-2-clause
City-of-Helsinki/django-helusers,City-of-Helsinki/django-helusers
- from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ("helusers", "0001_add_ad_groups"), ] operations = [ migrations.CreateModel( name="OIDCBackChannelLogoutEvent", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("created_at", models.DateTimeField(default=django.utils.timezone.now)), ("iss", models.CharField(db_index=True, max_length=4096)), ("sub", models.CharField(blank=True, db_index=True, max_length=4096)), ("sid", models.CharField(blank=True, db_index=True, max_length=4096)), ], options={ + "verbose_name": "OIDC back channel logout event", + "verbose_name_plural": "OIDC back channel logout events", "unique_together": {("iss", "sub", "sid")}, }, ), ]
Modify migration file to include meta data changes
## Code Before: from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ("helusers", "0001_add_ad_groups"), ] operations = [ migrations.CreateModel( name="OIDCBackChannelLogoutEvent", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("created_at", models.DateTimeField(default=django.utils.timezone.now)), ("iss", models.CharField(db_index=True, max_length=4096)), ("sub", models.CharField(blank=True, db_index=True, max_length=4096)), ("sid", models.CharField(blank=True, db_index=True, max_length=4096)), ], options={ "unique_together": {("iss", "sub", "sid")}, }, ), ] ## Instruction: Modify migration file to include meta data changes ## Code After: from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ("helusers", "0001_add_ad_groups"), ] operations = [ migrations.CreateModel( name="OIDCBackChannelLogoutEvent", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("created_at", models.DateTimeField(default=django.utils.timezone.now)), ("iss", models.CharField(db_index=True, max_length=4096)), ("sub", models.CharField(blank=True, db_index=True, max_length=4096)), ("sid", models.CharField(blank=True, db_index=True, max_length=4096)), ], options={ "verbose_name": "OIDC back channel logout event", "verbose_name_plural": "OIDC back channel logout events", "unique_together": {("iss", "sub", "sid")}, }, ), ]
- from django.db import migrations, models import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ("helusers", "0001_add_ad_groups"), ] operations = [ migrations.CreateModel( name="OIDCBackChannelLogoutEvent", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("created_at", models.DateTimeField(default=django.utils.timezone.now)), ("iss", models.CharField(db_index=True, max_length=4096)), ("sub", models.CharField(blank=True, db_index=True, max_length=4096)), ("sid", models.CharField(blank=True, db_index=True, max_length=4096)), ], options={ + "verbose_name": "OIDC back channel logout event", + "verbose_name_plural": "OIDC back channel logout events", "unique_together": {("iss", "sub", "sid")}, }, ), ]
156c049cc3965f969ee252dc5859cf0713bcbe27
grip/__init__.py
grip/__init__.py
__version__ = '1.2.0' from . import command from .server import default_filenames, serve from .renderer import render_content, render_page
__version__ = '1.2.0' from . import command from .renderer import render_content, render_page from .server import default_filenames, create_app, serve from .exporter import export
Add create_app and export to API.
Add create_app and export to API.
Python
mit
jbarreras/grip,jbarreras/grip,joeyespo/grip,mgoddard-pivotal/grip,ssundarraj/grip,ssundarraj/grip,mgoddard-pivotal/grip,joeyespo/grip
__version__ = '1.2.0' from . import command - from .server import default_filenames, serve from .renderer import render_content, render_page + from .server import default_filenames, create_app, serve + from .exporter import export
Add create_app and export to API.
## Code Before: __version__ = '1.2.0' from . import command from .server import default_filenames, serve from .renderer import render_content, render_page ## Instruction: Add create_app and export to API. ## Code After: __version__ = '1.2.0' from . import command from .renderer import render_content, render_page from .server import default_filenames, create_app, serve from .exporter import export
__version__ = '1.2.0' from . import command - from .server import default_filenames, serve from .renderer import render_content, render_page + from .server import default_filenames, create_app, serve + from .exporter import export
9201e9c433930da8fd0bfb13eadbc249469e4d84
fireplace/cards/tourney/mage.py
fireplace/cards/tourney/mage.py
from ..utils import * ## # Secrets # Effigy class AT_002: events = Death(FRIENDLY + MINION).on( lambda self, minion: Summon(self.controller, RandomMinion(cost=minion.cost)) )
from ..utils import * ## # Minions # Dalaran Aspirant class AT_006: inspire = Buff(SELF, "AT_006e") # Spellslinger class AT_007: play = Give(ALL_PLAYERS, RandomSpell()) # Rhonin class AT_009: deathrattle = Give(CONTROLLER, "EX1_277") * 3 ## # Spells # Flame Lance class AT_001: play = Hit(TARGET, 8) # Arcane Blast class AT_004: play = Hit(TARGET, 2) # Polymorph: Boar class AT_005: play = Morph(TARGET, "AT_005t") ## # Secrets # Effigy class AT_002: events = Death(FRIENDLY + MINION).on( lambda self, minion: Summon(self.controller, RandomMinion(cost=minion.cost)) )
Implement Mage cards for The Grand Tournament
Implement Mage cards for The Grand Tournament
Python
agpl-3.0
Meerkov/fireplace,amw2104/fireplace,liujimj/fireplace,Ragowit/fireplace,smallnamespace/fireplace,jleclanche/fireplace,Ragowit/fireplace,oftc-ftw/fireplace,beheh/fireplace,smallnamespace/fireplace,NightKev/fireplace,liujimj/fireplace,Meerkov/fireplace,amw2104/fireplace,oftc-ftw/fireplace
from ..utils import * + + + ## + # Minions + + # Dalaran Aspirant + class AT_006: + inspire = Buff(SELF, "AT_006e") + + + # Spellslinger + class AT_007: + play = Give(ALL_PLAYERS, RandomSpell()) + + + # Rhonin + class AT_009: + deathrattle = Give(CONTROLLER, "EX1_277") * 3 + + + ## + # Spells + + # Flame Lance + class AT_001: + play = Hit(TARGET, 8) + + + # Arcane Blast + class AT_004: + play = Hit(TARGET, 2) + + + # Polymorph: Boar + class AT_005: + play = Morph(TARGET, "AT_005t") ## # Secrets # Effigy class AT_002: events = Death(FRIENDLY + MINION).on( lambda self, minion: Summon(self.controller, RandomMinion(cost=minion.cost)) )
Implement Mage cards for The Grand Tournament
## Code Before: from ..utils import * ## # Secrets # Effigy class AT_002: events = Death(FRIENDLY + MINION).on( lambda self, minion: Summon(self.controller, RandomMinion(cost=minion.cost)) ) ## Instruction: Implement Mage cards for The Grand Tournament ## Code After: from ..utils import * ## # Minions # Dalaran Aspirant class AT_006: inspire = Buff(SELF, "AT_006e") # Spellslinger class AT_007: play = Give(ALL_PLAYERS, RandomSpell()) # Rhonin class AT_009: deathrattle = Give(CONTROLLER, "EX1_277") * 3 ## # Spells # Flame Lance class AT_001: play = Hit(TARGET, 8) # Arcane Blast class AT_004: play = Hit(TARGET, 2) # Polymorph: Boar class AT_005: play = Morph(TARGET, "AT_005t") ## # Secrets # Effigy class AT_002: events = Death(FRIENDLY + MINION).on( lambda self, minion: Summon(self.controller, RandomMinion(cost=minion.cost)) )
from ..utils import * + + + ## + # Minions + + # Dalaran Aspirant + class AT_006: + inspire = Buff(SELF, "AT_006e") + + + # Spellslinger + class AT_007: + play = Give(ALL_PLAYERS, RandomSpell()) + + + # Rhonin + class AT_009: + deathrattle = Give(CONTROLLER, "EX1_277") * 3 + + + ## + # Spells + + # Flame Lance + class AT_001: + play = Hit(TARGET, 8) + + + # Arcane Blast + class AT_004: + play = Hit(TARGET, 2) + + + # Polymorph: Boar + class AT_005: + play = Morph(TARGET, "AT_005t") ## # Secrets # Effigy class AT_002: events = Death(FRIENDLY + MINION).on( lambda self, minion: Summon(self.controller, RandomMinion(cost=minion.cost)) )
1e6b0b6f53a4508c3e4218345b2ee57d48fbc8d1
flask_app.py
flask_app.py
from flask import abort from flask import Flask from flask_caching import Cache import main app = Flask(__name__) cache = Cache(app, config={'CACHE_TYPE': 'simple'}) @app.route('/') def display_available(): content = ('<html>' + '<head>' + '<title>Restaurant Menu Parser</title>' + '</head>' + '<body>' + '<p><a href="ki">Campus Solna (KI)</a></p>' + '<p><a href="uu">Campus Uppsala (BMC)</a></p>' + '</body>' + '</html>') return content @app.route('/api/restaurants') @cache.cached(timeout=3600) def api_list_restaurants(): return main.list_restaurants() @app.route('/api/restaurant/<name>') @cache.cached(timeout=3600) def api_get_restaurant(name): data = main.get_restaurant(name) if not data: abort(404) return data @app.route('/ki') @cache.cached(timeout=3600) def make_menu_ki(): return main.gen_ki_menu() @app.route('/uu') @cache.cached(timeout=3600) def make_menu_uu(): return main.gen_uu_menu()
import json from flask import abort from flask import Flask from flask_caching import Cache import main app = Flask(__name__) cache = Cache(app, config={'CACHE_TYPE': 'simple'}) @app.route('/') def display_available(): content = ('<html>' + '<head>' + '<title>Restaurant Menu Parser</title>' + '</head>' + '<body>' + '<p><a href="ki">Campus Solna (KI)</a></p>' + '<p><a href="uu">Campus Uppsala (BMC)</a></p>' + '</body>' + '</html>') return content @app.route('/api/restaurants') @cache.cached(timeout=3600) def api_list_restaurants(): return json.dumps(main.list_restaurants()) @app.route('/api/restaurant/<name>') @cache.cached(timeout=3600) def api_get_restaurant(name): data = main.get_restaurant(name) if not data: abort(404) return json.dumps(data) @app.route('/ki') @cache.cached(timeout=3600) def make_menu_ki(): return main.gen_ki_menu() @app.route('/uu') @cache.cached(timeout=3600) def make_menu_uu(): return main.gen_uu_menu()
Return str instead of dict.
Return str instead of dict.
Python
bsd-3-clause
talavis/kimenu
+ import json + from flask import abort from flask import Flask from flask_caching import Cache import main app = Flask(__name__) cache = Cache(app, config={'CACHE_TYPE': 'simple'}) @app.route('/') def display_available(): content = ('<html>' + '<head>' + '<title>Restaurant Menu Parser</title>' + '</head>' + '<body>' + '<p><a href="ki">Campus Solna (KI)</a></p>' + '<p><a href="uu">Campus Uppsala (BMC)</a></p>' + '</body>' + '</html>') return content @app.route('/api/restaurants') @cache.cached(timeout=3600) def api_list_restaurants(): - return main.list_restaurants() + return json.dumps(main.list_restaurants()) @app.route('/api/restaurant/<name>') @cache.cached(timeout=3600) def api_get_restaurant(name): data = main.get_restaurant(name) if not data: abort(404) - return data + return json.dumps(data) @app.route('/ki') @cache.cached(timeout=3600) def make_menu_ki(): return main.gen_ki_menu() @app.route('/uu') @cache.cached(timeout=3600) def make_menu_uu(): return main.gen_uu_menu()
Return str instead of dict.
## Code Before: from flask import abort from flask import Flask from flask_caching import Cache import main app = Flask(__name__) cache = Cache(app, config={'CACHE_TYPE': 'simple'}) @app.route('/') def display_available(): content = ('<html>' + '<head>' + '<title>Restaurant Menu Parser</title>' + '</head>' + '<body>' + '<p><a href="ki">Campus Solna (KI)</a></p>' + '<p><a href="uu">Campus Uppsala (BMC)</a></p>' + '</body>' + '</html>') return content @app.route('/api/restaurants') @cache.cached(timeout=3600) def api_list_restaurants(): return main.list_restaurants() @app.route('/api/restaurant/<name>') @cache.cached(timeout=3600) def api_get_restaurant(name): data = main.get_restaurant(name) if not data: abort(404) return data @app.route('/ki') @cache.cached(timeout=3600) def make_menu_ki(): return main.gen_ki_menu() @app.route('/uu') @cache.cached(timeout=3600) def make_menu_uu(): return main.gen_uu_menu() ## Instruction: Return str instead of dict. ## Code After: import json from flask import abort from flask import Flask from flask_caching import Cache import main app = Flask(__name__) cache = Cache(app, config={'CACHE_TYPE': 'simple'}) @app.route('/') def display_available(): content = ('<html>' + '<head>' + '<title>Restaurant Menu Parser</title>' + '</head>' + '<body>' + '<p><a href="ki">Campus Solna (KI)</a></p>' + '<p><a href="uu">Campus Uppsala (BMC)</a></p>' + '</body>' + '</html>') return content @app.route('/api/restaurants') @cache.cached(timeout=3600) def api_list_restaurants(): return json.dumps(main.list_restaurants()) @app.route('/api/restaurant/<name>') @cache.cached(timeout=3600) def api_get_restaurant(name): data = main.get_restaurant(name) if not data: abort(404) return json.dumps(data) @app.route('/ki') @cache.cached(timeout=3600) def make_menu_ki(): return main.gen_ki_menu() @app.route('/uu') @cache.cached(timeout=3600) def make_menu_uu(): return main.gen_uu_menu()
+ import json + from flask import abort from flask import Flask from flask_caching import Cache import main app = Flask(__name__) cache = Cache(app, config={'CACHE_TYPE': 'simple'}) @app.route('/') def display_available(): content = ('<html>' + '<head>' + '<title>Restaurant Menu Parser</title>' + '</head>' + '<body>' + '<p><a href="ki">Campus Solna (KI)</a></p>' + '<p><a href="uu">Campus Uppsala (BMC)</a></p>' + '</body>' + '</html>') return content @app.route('/api/restaurants') @cache.cached(timeout=3600) def api_list_restaurants(): - return main.list_restaurants() + return json.dumps(main.list_restaurants()) ? +++++++++++ + @app.route('/api/restaurant/<name>') @cache.cached(timeout=3600) def api_get_restaurant(name): data = main.get_restaurant(name) if not data: abort(404) - return data + return json.dumps(data) @app.route('/ki') @cache.cached(timeout=3600) def make_menu_ki(): return main.gen_ki_menu() @app.route('/uu') @cache.cached(timeout=3600) def make_menu_uu(): return main.gen_uu_menu()
e7805528be294374b128dd6e40e3f8990b03cdac
main.py
main.py
from importlib import import_module from bombdefusalmanual.ui.console import ConsoleUI from bombdefusalmanual.ui.models import Answer ANSWERS = [ Answer('thebutton', 'The Button'), Answer('complicatedwires', 'Complicated Wires'), Answer('morsecode', 'Morse Code'), Answer('passwords', 'Passwords'), Answer('whosonfirst', 'Who\'s on First'), ] def ask_for_subject(ui): return ui.ask_for_choice('Which subject?', ANSWERS) def import_subject_module(name): return import_module('bombdefusalmanual.subjects.{}'.format(name)) if __name__ == '__main__': ui = ConsoleUI() subject_name = ask_for_subject(ui) module = import_subject_module(subject_name) module.execute(ui)
from argparse import ArgumentParser from importlib import import_module from bombdefusalmanual.ui.console import ConsoleUI from bombdefusalmanual.ui.models import Answer ANSWERS = [ Answer('thebutton', 'The Button'), Answer('complicatedwires', 'Complicated Wires'), Answer('morsecode', 'Morse Code'), Answer('passwords', 'Passwords'), Answer('whosonfirst', 'Who\'s on First'), ] def parse_args(): parser = ArgumentParser() parser.add_argument( '--gui', action='store_true', default=False, dest='use_gui', help='use graphical user interface') return parser.parse_args() def get_ui(use_gui): if use_gui: from bombdefusalmanual.ui.tk import TkGUI return TkGUI() else: return ConsoleUI() def ask_for_subject(ui): return ui.ask_for_choice('Which subject?', ANSWERS) def import_subject_module(name): return import_module('bombdefusalmanual.subjects.{}'.format(name)) if __name__ == '__main__': args = parse_args() ui = get_ui(args.use_gui) subject_name = ask_for_subject(ui) module = import_subject_module(subject_name) module.execute(ui)
Allow to enable graphical UI via command line option.
Allow to enable graphical UI via command line option.
Python
mit
homeworkprod/better-bomb-defusal-manual,homeworkprod/better-bomb-defusal-manual
+ from argparse import ArgumentParser from importlib import import_module from bombdefusalmanual.ui.console import ConsoleUI from bombdefusalmanual.ui.models import Answer ANSWERS = [ Answer('thebutton', 'The Button'), Answer('complicatedwires', 'Complicated Wires'), Answer('morsecode', 'Morse Code'), Answer('passwords', 'Passwords'), Answer('whosonfirst', 'Who\'s on First'), ] + def parse_args(): + parser = ArgumentParser() + + parser.add_argument( + '--gui', + action='store_true', + default=False, + dest='use_gui', + help='use graphical user interface') + + return parser.parse_args() + + + def get_ui(use_gui): + if use_gui: + from bombdefusalmanual.ui.tk import TkGUI + return TkGUI() + else: + return ConsoleUI() + + def ask_for_subject(ui): return ui.ask_for_choice('Which subject?', ANSWERS) def import_subject_module(name): return import_module('bombdefusalmanual.subjects.{}'.format(name)) if __name__ == '__main__': - ui = ConsoleUI() + args = parse_args() + ui = get_ui(args.use_gui) + subject_name = ask_for_subject(ui) + module = import_subject_module(subject_name) module.execute(ui)
Allow to enable graphical UI via command line option.
## Code Before: from importlib import import_module from bombdefusalmanual.ui.console import ConsoleUI from bombdefusalmanual.ui.models import Answer ANSWERS = [ Answer('thebutton', 'The Button'), Answer('complicatedwires', 'Complicated Wires'), Answer('morsecode', 'Morse Code'), Answer('passwords', 'Passwords'), Answer('whosonfirst', 'Who\'s on First'), ] def ask_for_subject(ui): return ui.ask_for_choice('Which subject?', ANSWERS) def import_subject_module(name): return import_module('bombdefusalmanual.subjects.{}'.format(name)) if __name__ == '__main__': ui = ConsoleUI() subject_name = ask_for_subject(ui) module = import_subject_module(subject_name) module.execute(ui) ## Instruction: Allow to enable graphical UI via command line option. ## Code After: from argparse import ArgumentParser from importlib import import_module from bombdefusalmanual.ui.console import ConsoleUI from bombdefusalmanual.ui.models import Answer ANSWERS = [ Answer('thebutton', 'The Button'), Answer('complicatedwires', 'Complicated Wires'), Answer('morsecode', 'Morse Code'), Answer('passwords', 'Passwords'), Answer('whosonfirst', 'Who\'s on First'), ] def parse_args(): parser = ArgumentParser() parser.add_argument( '--gui', action='store_true', default=False, dest='use_gui', help='use graphical user interface') return parser.parse_args() def get_ui(use_gui): if use_gui: from bombdefusalmanual.ui.tk import TkGUI return TkGUI() else: return ConsoleUI() def ask_for_subject(ui): return ui.ask_for_choice('Which subject?', ANSWERS) def import_subject_module(name): return import_module('bombdefusalmanual.subjects.{}'.format(name)) if __name__ == '__main__': args = parse_args() ui = get_ui(args.use_gui) subject_name = ask_for_subject(ui) module = import_subject_module(subject_name) module.execute(ui)
+ from argparse import ArgumentParser from importlib import import_module from bombdefusalmanual.ui.console import ConsoleUI from bombdefusalmanual.ui.models import Answer ANSWERS = [ Answer('thebutton', 'The Button'), Answer('complicatedwires', 'Complicated Wires'), Answer('morsecode', 'Morse Code'), Answer('passwords', 'Passwords'), Answer('whosonfirst', 'Who\'s on First'), ] + def parse_args(): + parser = ArgumentParser() + + parser.add_argument( + '--gui', + action='store_true', + default=False, + dest='use_gui', + help='use graphical user interface') + + return parser.parse_args() + + + def get_ui(use_gui): + if use_gui: + from bombdefusalmanual.ui.tk import TkGUI + return TkGUI() + else: + return ConsoleUI() + + def ask_for_subject(ui): return ui.ask_for_choice('Which subject?', ANSWERS) def import_subject_module(name): return import_module('bombdefusalmanual.subjects.{}'.format(name)) if __name__ == '__main__': - ui = ConsoleUI() + args = parse_args() + ui = get_ui(args.use_gui) + subject_name = ask_for_subject(ui) + module = import_subject_module(subject_name) module.execute(ui)
b011ccf5c4ce5f93c7b02f938385432325012569
tt/core/tt.py
tt/core/tt.py
from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun # main classes from .matrix import matrix from .vector import vector, tensor # utility from . import utils
from .matrix import matrix from .vector import vector, tensor # tools from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun # utility from . import utils
Revert "Import order changed to break tools dependency"
Revert "Import order changed to break tools dependency" This reverts commit 3a75fd530b1ecb9e6466ac99532d06032ae3a049.
Python
mit
uranix/ttpy,uranix/ttpy
+ from .matrix import matrix + from .vector import vector, tensor + + + # tools from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun - # main classes - from .matrix import matrix - from .vector import vector, tensor - # utility from . import utils + + + + +
Revert "Import order changed to break tools dependency"
## Code Before: from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun # main classes from .matrix import matrix from .vector import vector, tensor # utility from . import utils ## Instruction: Revert "Import order changed to break tools dependency" ## Code After: from .matrix import matrix from .vector import vector, tensor # tools from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun # utility from . import utils
+ from .matrix import matrix + from .vector import vector, tensor + + + # tools from .tools import matvec, col, kron, dot, mkron, concatenate, sum, reshape from .tools import eye, diag, Toeplitz, qshift, qlaplace_dd, IpaS from .tools import ones, rand, linspace, sin, cos, delta, stepfun, unit, xfun - # main classes - from .matrix import matrix - from .vector import vector, tensor - # utility from . import utils + + + + +
94c48d9f61b8f7e462ce5f7013b29ce2399e4190
log4django/views/__init__.py
log4django/views/__init__.py
from django.db.models import Q from ..models import LogRecord def _filter_records(request): getvars = request.GET logrecord_qs = LogRecord.objects.all().select_related('app') # Filtering by get params. if getvars.get('q'): q = getvars.get('q') logrecord_qs = logrecord_qs.filter( Q(app__name__icontains=q) | Q(message__icontains=q) | Q(fileName__icontains=q) | Q(loggerName__icontains=q) | Q(exception_message__icontains=q) | Q(_extra__icontains=q) ) if getvars.get('app'): logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app')) if getvars.get('logger'): logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger')) if getvars.getlist('level'): logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level')) if getvars.get('from'): logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from')) if getvars.get('to'): logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to')) return logrecord_qs
from django.db.models import Q from ..models import LogRecord def _filter_records(request): getvars = request.GET logrecord_qs = LogRecord.objects.all().select_related('app') # Filtering by get params. if getvars.get('q'): q = getvars.get('q') logrecord_qs = logrecord_qs.filter( Q(app__name__icontains=q) | Q(message__icontains=q) | Q(fileName__icontains=q) | Q(loggerName__icontains=q) | Q(exception_message__icontains=q) | Q(request_id__icontains=q) | Q(_extra__icontains=q) ) if getvars.get('app'): logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app')) if getvars.get('logger'): logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger')) if getvars.getlist('level'): logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level')) if getvars.get('from'): logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from')) if getvars.get('to'): logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to')) return logrecord_qs
Add search by request_id field.
Add search by request_id field.
Python
bsd-3-clause
CodeScaleInc/log4django,CodeScaleInc/log4django,CodeScaleInc/log4django
from django.db.models import Q from ..models import LogRecord def _filter_records(request): getvars = request.GET logrecord_qs = LogRecord.objects.all().select_related('app') # Filtering by get params. if getvars.get('q'): q = getvars.get('q') logrecord_qs = logrecord_qs.filter( Q(app__name__icontains=q) | Q(message__icontains=q) | Q(fileName__icontains=q) | Q(loggerName__icontains=q) | Q(exception_message__icontains=q) + | Q(request_id__icontains=q) | Q(_extra__icontains=q) ) if getvars.get('app'): logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app')) if getvars.get('logger'): logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger')) if getvars.getlist('level'): logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level')) if getvars.get('from'): logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from')) if getvars.get('to'): logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to')) return logrecord_qs
Add search by request_id field.
## Code Before: from django.db.models import Q from ..models import LogRecord def _filter_records(request): getvars = request.GET logrecord_qs = LogRecord.objects.all().select_related('app') # Filtering by get params. if getvars.get('q'): q = getvars.get('q') logrecord_qs = logrecord_qs.filter( Q(app__name__icontains=q) | Q(message__icontains=q) | Q(fileName__icontains=q) | Q(loggerName__icontains=q) | Q(exception_message__icontains=q) | Q(_extra__icontains=q) ) if getvars.get('app'): logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app')) if getvars.get('logger'): logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger')) if getvars.getlist('level'): logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level')) if getvars.get('from'): logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from')) if getvars.get('to'): logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to')) return logrecord_qs ## Instruction: Add search by request_id field. ## Code After: from django.db.models import Q from ..models import LogRecord def _filter_records(request): getvars = request.GET logrecord_qs = LogRecord.objects.all().select_related('app') # Filtering by get params. if getvars.get('q'): q = getvars.get('q') logrecord_qs = logrecord_qs.filter( Q(app__name__icontains=q) | Q(message__icontains=q) | Q(fileName__icontains=q) | Q(loggerName__icontains=q) | Q(exception_message__icontains=q) | Q(request_id__icontains=q) | Q(_extra__icontains=q) ) if getvars.get('app'): logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app')) if getvars.get('logger'): logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger')) if getvars.getlist('level'): logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level')) if getvars.get('from'): logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from')) if getvars.get('to'): logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to')) return logrecord_qs
from django.db.models import Q from ..models import LogRecord def _filter_records(request): getvars = request.GET logrecord_qs = LogRecord.objects.all().select_related('app') # Filtering by get params. if getvars.get('q'): q = getvars.get('q') logrecord_qs = logrecord_qs.filter( Q(app__name__icontains=q) | Q(message__icontains=q) | Q(fileName__icontains=q) | Q(loggerName__icontains=q) | Q(exception_message__icontains=q) + | Q(request_id__icontains=q) | Q(_extra__icontains=q) ) if getvars.get('app'): logrecord_qs = logrecord_qs.filter(app_id=getvars.get('app')) if getvars.get('logger'): logrecord_qs = logrecord_qs.filter(loggerName=getvars.get('logger')) if getvars.getlist('level'): logrecord_qs = logrecord_qs.filter(level__in=getvars.getlist('level')) if getvars.get('from'): logrecord_qs = logrecord_qs.filter(timestamp__gte=getvars.get('from')) if getvars.get('to'): logrecord_qs = logrecord_qs.filter(timestamp__lte=getvars.get('to')) return logrecord_qs
512ec31a3c022bc8a31d57bc51e4e6dac29dcf83
src/sentry/web/frontend/organization_api_key_settings.py
src/sentry/web/frontend/organization_api_key_settings.py
from __future__ import absolute_import from django import forms from django.utils.translation import ugettext_lazy as _ from sentry.models import ApiKey, OrganizationMemberType from sentry.web.forms.fields import OriginsField from sentry.web.frontend.base import OrganizationView class ApiKeyForm(forms.ModelForm): allowed_origins = OriginsField(label=_('Allowed Domains'), required=False, help_text=_('Separate multiple entries with a newline.')) class Meta: model = ApiKey fields = ('label', 'scopes', 'allowed_origins') class OrganizationApiKeySettingsView(OrganizationView): required_access = OrganizationMemberType.ADMIN def handle(self, request, organization, key_id): key = ApiKey.objects.get(organization=organization, id=key_id) form = ApiKeyForm(request.POST or None, instance=key) context = { 'key': key, 'form': form, } return self.respond('sentry/organization-api-key-settings.html', context)
from __future__ import absolute_import from django import forms from django.contrib import messages from django.http import HttpResponseRedirect from django.utils.translation import ugettext_lazy as _ from sentry.models import ApiKey, OrganizationMemberType from sentry.web.forms.fields import OriginsField from sentry.web.frontend.base import OrganizationView class ApiKeyForm(forms.ModelForm): allowed_origins = OriginsField(label=_('Allowed Domains'), required=False, help_text=_('Separate multiple entries with a newline.')) class Meta: model = ApiKey fields = ('label', 'scopes', 'allowed_origins') class OrganizationApiKeySettingsView(OrganizationView): required_access = OrganizationMemberType.ADMIN def handle(self, request, organization, key_id): key = ApiKey.objects.get(organization=organization, id=key_id) form = ApiKeyForm(request.POST or None, instance=key) if form.is_valid(): key.save() messages.add_message( request, messages.SUCCESS, 'Your settings were saved.', ) return HttpResponseRedirect(request.path) context = { 'key': key, 'form': form, } return self.respond('sentry/organization-api-key-settings.html', context)
Allow key settings to be saved
Allow key settings to be saved
Python
bsd-3-clause
hongliang5623/sentry,TedaLIEz/sentry,gg7/sentry,ifduyue/sentry,wujuguang/sentry,boneyao/sentry,fuziontech/sentry,fuziontech/sentry,pauloschilling/sentry,mvaled/sentry,JackDanger/sentry,imankulov/sentry,kevinlondon/sentry,kevinlondon/sentry,vperron/sentry,looker/sentry,Natim/sentry,looker/sentry,beeftornado/sentry,Kryz/sentry,llonchj/sentry,ngonzalvez/sentry,1tush/sentry,1tush/sentry,looker/sentry,mvaled/sentry,zenefits/sentry,drcapulet/sentry,hongliang5623/sentry,llonchj/sentry,JackDanger/sentry,ifduyue/sentry,korealerts1/sentry,mitsuhiko/sentry,zenefits/sentry,argonemyth/sentry,BuildingLink/sentry,gencer/sentry,zenefits/sentry,ngonzalvez/sentry,JamesMura/sentry,mvaled/sentry,drcapulet/sentry,argonemyth/sentry,ewdurbin/sentry,ifduyue/sentry,ngonzalvez/sentry,felixbuenemann/sentry,jean/sentry,ifduyue/sentry,JTCunning/sentry,kevinastone/sentry,JamesMura/sentry,gencer/sentry,JamesMura/sentry,alexm92/sentry,TedaLIEz/sentry,kevinlondon/sentry,nicholasserra/sentry,BuildingLink/sentry,argonemyth/sentry,daevaorn/sentry,mvaled/sentry,songyi199111/sentry,BayanGroup/sentry,korealerts1/sentry,wong2/sentry,fotinakis/sentry,JTCunning/sentry,1tush/sentry,llonchj/sentry,kevinastone/sentry,Kryz/sentry,beeftornado/sentry,BuildingLink/sentry,JamesMura/sentry,fotinakis/sentry,vperron/sentry,fotinakis/sentry,felixbuenemann/sentry,imankulov/sentry,mvaled/sentry,wong2/sentry,wong2/sentry,boneyao/sentry,pauloschilling/sentry,beeftornado/sentry,alexm92/sentry,daevaorn/sentry,jean/sentry,BuildingLink/sentry,Natim/sentry,daevaorn/sentry,zenefits/sentry,wujuguang/sentry,fuziontech/sentry,mvaled/sentry,hongliang5623/sentry,fotinakis/sentry,drcapulet/sentry,looker/sentry,JTCunning/sentry,jean/sentry,BayanGroup/sentry,vperron/sentry,jean/sentry,JackDanger/sentry,BayanGroup/sentry,boneyao/sentry,gencer/sentry,alexm92/sentry,looker/sentry,ifduyue/sentry,ewdurbin/sentry,daevaorn/sentry,gencer/sentry,JamesMura/sentry,wujuguang/sentry,imankulov/sentry,nicholasserra/sentry,Kryz/sentry,gg7/sentry,gencer/sentry,jean/sentry,BuildingLink/sentry,Natim/sentry,TedaLIEz/sentry,pauloschilling/sentry,nicholasserra/sentry,ewdurbin/sentry,songyi199111/sentry,mitsuhiko/sentry,felixbuenemann/sentry,gg7/sentry,songyi199111/sentry,korealerts1/sentry,kevinastone/sentry,zenefits/sentry
from __future__ import absolute_import from django import forms + from django.contrib import messages + from django.http import HttpResponseRedirect from django.utils.translation import ugettext_lazy as _ from sentry.models import ApiKey, OrganizationMemberType from sentry.web.forms.fields import OriginsField from sentry.web.frontend.base import OrganizationView class ApiKeyForm(forms.ModelForm): allowed_origins = OriginsField(label=_('Allowed Domains'), required=False, help_text=_('Separate multiple entries with a newline.')) class Meta: model = ApiKey fields = ('label', 'scopes', 'allowed_origins') class OrganizationApiKeySettingsView(OrganizationView): required_access = OrganizationMemberType.ADMIN def handle(self, request, organization, key_id): key = ApiKey.objects.get(organization=organization, id=key_id) form = ApiKeyForm(request.POST or None, instance=key) + if form.is_valid(): + key.save() + messages.add_message( + request, messages.SUCCESS, + 'Your settings were saved.', + ) + return HttpResponseRedirect(request.path) context = { 'key': key, 'form': form, } return self.respond('sentry/organization-api-key-settings.html', context)
Allow key settings to be saved
## Code Before: from __future__ import absolute_import from django import forms from django.utils.translation import ugettext_lazy as _ from sentry.models import ApiKey, OrganizationMemberType from sentry.web.forms.fields import OriginsField from sentry.web.frontend.base import OrganizationView class ApiKeyForm(forms.ModelForm): allowed_origins = OriginsField(label=_('Allowed Domains'), required=False, help_text=_('Separate multiple entries with a newline.')) class Meta: model = ApiKey fields = ('label', 'scopes', 'allowed_origins') class OrganizationApiKeySettingsView(OrganizationView): required_access = OrganizationMemberType.ADMIN def handle(self, request, organization, key_id): key = ApiKey.objects.get(organization=organization, id=key_id) form = ApiKeyForm(request.POST or None, instance=key) context = { 'key': key, 'form': form, } return self.respond('sentry/organization-api-key-settings.html', context) ## Instruction: Allow key settings to be saved ## Code After: from __future__ import absolute_import from django import forms from django.contrib import messages from django.http import HttpResponseRedirect from django.utils.translation import ugettext_lazy as _ from sentry.models import ApiKey, OrganizationMemberType from sentry.web.forms.fields import OriginsField from sentry.web.frontend.base import OrganizationView class ApiKeyForm(forms.ModelForm): allowed_origins = OriginsField(label=_('Allowed Domains'), required=False, help_text=_('Separate multiple entries with a newline.')) class Meta: model = ApiKey fields = ('label', 'scopes', 'allowed_origins') class OrganizationApiKeySettingsView(OrganizationView): required_access = OrganizationMemberType.ADMIN def handle(self, request, organization, key_id): key = ApiKey.objects.get(organization=organization, id=key_id) form = ApiKeyForm(request.POST or None, instance=key) if form.is_valid(): key.save() messages.add_message( request, messages.SUCCESS, 'Your settings were saved.', ) return HttpResponseRedirect(request.path) context = { 'key': key, 'form': form, } return self.respond('sentry/organization-api-key-settings.html', context)
from __future__ import absolute_import from django import forms + from django.contrib import messages + from django.http import HttpResponseRedirect from django.utils.translation import ugettext_lazy as _ from sentry.models import ApiKey, OrganizationMemberType from sentry.web.forms.fields import OriginsField from sentry.web.frontend.base import OrganizationView class ApiKeyForm(forms.ModelForm): allowed_origins = OriginsField(label=_('Allowed Domains'), required=False, help_text=_('Separate multiple entries with a newline.')) class Meta: model = ApiKey fields = ('label', 'scopes', 'allowed_origins') class OrganizationApiKeySettingsView(OrganizationView): required_access = OrganizationMemberType.ADMIN def handle(self, request, organization, key_id): key = ApiKey.objects.get(organization=organization, id=key_id) form = ApiKeyForm(request.POST or None, instance=key) + if form.is_valid(): + key.save() + messages.add_message( + request, messages.SUCCESS, + 'Your settings were saved.', + ) + return HttpResponseRedirect(request.path) context = { 'key': key, 'form': form, } return self.respond('sentry/organization-api-key-settings.html', context)
93e46310b8ea9e61dbabf02bd3dd4b6b6748dd6e
erpnext/accounts/doctype/bank/bank_dashboard.py
erpnext/accounts/doctype/bank/bank_dashboard.py
from __future__ import unicode_literals from frappe import _ def get_data(): return { 'fieldname': 'bank', 'non_standard_fieldnames': { 'Paymnet Order': 'company_bank' }, 'transactions': [ { 'label': _('Bank Deatils'), 'items': ['Bank Account', 'Bank Guarantee'] }, { 'items': ['Payment Order'] } ] }
from __future__ import unicode_literals from frappe import _ def get_data(): return { 'fieldname': 'bank', 'transactions': [ { 'label': _('Bank Deatils'), 'items': ['Bank Account', 'Bank Guarantee'] } ] }
Remove payment order from bank dashboard
fix: Remove payment order from bank dashboard
Python
agpl-3.0
gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext
from __future__ import unicode_literals from frappe import _ def get_data(): return { 'fieldname': 'bank', - 'non_standard_fieldnames': { - 'Paymnet Order': 'company_bank' - }, 'transactions': [ { 'label': _('Bank Deatils'), 'items': ['Bank Account', 'Bank Guarantee'] - }, - { - 'items': ['Payment Order'] } ] }
Remove payment order from bank dashboard
## Code Before: from __future__ import unicode_literals from frappe import _ def get_data(): return { 'fieldname': 'bank', 'non_standard_fieldnames': { 'Paymnet Order': 'company_bank' }, 'transactions': [ { 'label': _('Bank Deatils'), 'items': ['Bank Account', 'Bank Guarantee'] }, { 'items': ['Payment Order'] } ] } ## Instruction: Remove payment order from bank dashboard ## Code After: from __future__ import unicode_literals from frappe import _ def get_data(): return { 'fieldname': 'bank', 'transactions': [ { 'label': _('Bank Deatils'), 'items': ['Bank Account', 'Bank Guarantee'] } ] }
from __future__ import unicode_literals from frappe import _ def get_data(): return { 'fieldname': 'bank', - 'non_standard_fieldnames': { - 'Paymnet Order': 'company_bank' - }, 'transactions': [ { 'label': _('Bank Deatils'), 'items': ['Bank Account', 'Bank Guarantee'] - }, - { - 'items': ['Payment Order'] } ] }
ae583132ade7370595d6d9d14dba2b720c5415d6
cinemair/favorites/serializers.py
cinemair/favorites/serializers.py
from rest_framework import serializers as drf_serializers from cinemair.common.api import serializers from cinemair.shows.serializers import ShowRelatedSerializer from . import models class FavoriteSerializer(serializers.ModelSerializer): show_info = drf_serializers.SerializerMethodField() class Meta: model = models.Favorite def get_show_info(self, obj): data = ShowRelatedSerializer(obj.show).data del data["id"] return data
from rest_framework import serializers as drf_serializers from cinemair.common.api import serializers from cinemair.shows.serializers import ShowRelatedSerializer from . import models class FavoriteSerializer(serializers.ModelSerializer): show_info = drf_serializers.SerializerMethodField() class Meta: model = models.Favorite def get_show_info(self, obj): data = ShowRelatedSerializer(obj.show).data del data["id"] return data def validate_user(self, value): """ Check that the user is the same as request.user. """ if "request" in self.context: current_user = self.context["request"].user if current_user != value: raise drf_serializers.ValidationError("User must be you.") return value
Validate user when favorite a show
Validate user when favorite a show
Python
mit
Cinemair/cinemair-server,Cinemair/cinemair-server
from rest_framework import serializers as drf_serializers from cinemair.common.api import serializers from cinemair.shows.serializers import ShowRelatedSerializer from . import models class FavoriteSerializer(serializers.ModelSerializer): show_info = drf_serializers.SerializerMethodField() class Meta: model = models.Favorite def get_show_info(self, obj): data = ShowRelatedSerializer(obj.show).data del data["id"] return data + def validate_user(self, value): + """ + Check that the user is the same as request.user. + """ + if "request" in self.context: + current_user = self.context["request"].user + + if current_user != value: + raise drf_serializers.ValidationError("User must be you.") + return value +
Validate user when favorite a show
## Code Before: from rest_framework import serializers as drf_serializers from cinemair.common.api import serializers from cinemair.shows.serializers import ShowRelatedSerializer from . import models class FavoriteSerializer(serializers.ModelSerializer): show_info = drf_serializers.SerializerMethodField() class Meta: model = models.Favorite def get_show_info(self, obj): data = ShowRelatedSerializer(obj.show).data del data["id"] return data ## Instruction: Validate user when favorite a show ## Code After: from rest_framework import serializers as drf_serializers from cinemair.common.api import serializers from cinemair.shows.serializers import ShowRelatedSerializer from . import models class FavoriteSerializer(serializers.ModelSerializer): show_info = drf_serializers.SerializerMethodField() class Meta: model = models.Favorite def get_show_info(self, obj): data = ShowRelatedSerializer(obj.show).data del data["id"] return data def validate_user(self, value): """ Check that the user is the same as request.user. """ if "request" in self.context: current_user = self.context["request"].user if current_user != value: raise drf_serializers.ValidationError("User must be you.") return value
from rest_framework import serializers as drf_serializers from cinemair.common.api import serializers from cinemair.shows.serializers import ShowRelatedSerializer from . import models class FavoriteSerializer(serializers.ModelSerializer): show_info = drf_serializers.SerializerMethodField() class Meta: model = models.Favorite def get_show_info(self, obj): data = ShowRelatedSerializer(obj.show).data del data["id"] return data + + def validate_user(self, value): + """ + Check that the user is the same as request.user. + """ + if "request" in self.context: + current_user = self.context["request"].user + + if current_user != value: + raise drf_serializers.ValidationError("User must be you.") + return value
cfb0bda6096378de428a1460823626f3dc4c9059
spyder_terminal/__init__.py
spyder_terminal/__init__.py
"""Spyder Terminal Plugin.""" from .terminalplugin import TerminalPlugin as PLUGIN_CLASS PLUGIN_CLASS VERSION_INFO = (0, 2, 1) __version__ = '.'.join(map(str, VERSION_INFO))
"""Spyder Terminal Plugin.""" from .terminalplugin import TerminalPlugin as PLUGIN_CLASS PLUGIN_CLASS VERSION_INFO = (0, 3, 0, 'dev0') __version__ = '.'.join(map(str, VERSION_INFO))
Set package version info to 0.3.0.dev0
Set package version info to 0.3.0.dev0
Python
mit
spyder-ide/spyder-terminal,spyder-ide/spyder-terminal,andfoy/spyder-terminal,andfoy/spyder-terminal,andfoy/spyder-terminal,spyder-ide/spyder-terminal,spyder-ide/spyder-terminal
"""Spyder Terminal Plugin.""" from .terminalplugin import TerminalPlugin as PLUGIN_CLASS PLUGIN_CLASS - VERSION_INFO = (0, 2, 1) + VERSION_INFO = (0, 3, 0, 'dev0') __version__ = '.'.join(map(str, VERSION_INFO))
Set package version info to 0.3.0.dev0
## Code Before: """Spyder Terminal Plugin.""" from .terminalplugin import TerminalPlugin as PLUGIN_CLASS PLUGIN_CLASS VERSION_INFO = (0, 2, 1) __version__ = '.'.join(map(str, VERSION_INFO)) ## Instruction: Set package version info to 0.3.0.dev0 ## Code After: """Spyder Terminal Plugin.""" from .terminalplugin import TerminalPlugin as PLUGIN_CLASS PLUGIN_CLASS VERSION_INFO = (0, 3, 0, 'dev0') __version__ = '.'.join(map(str, VERSION_INFO))
"""Spyder Terminal Plugin.""" from .terminalplugin import TerminalPlugin as PLUGIN_CLASS PLUGIN_CLASS - VERSION_INFO = (0, 2, 1) ? ^ ^ + VERSION_INFO = (0, 3, 0, 'dev0') ? ^ ^^^^^^^^^ __version__ = '.'.join(map(str, VERSION_INFO))
3b79447e1027cc4965ab3272c34740b82d79c66c
tools/perf/benchmarks/start_with_url.py
tools/perf/benchmarks/start_with_url.py
from measurements import startup import page_sets from telemetry import benchmark @benchmark.Disabled class StartWithUrlCold(benchmark.Benchmark): """Measure time to start Chrome cold with startup URLs""" tag = 'cold' test = startup.StartWithUrl page_set = page_sets.StartupPagesPageSet options = {'cold': True, 'pageset_repeat': 5} @benchmark.Enabled('android', 'has tabs') class StartWithUrlWarm(benchmark.Benchmark): """Measure time to start Chrome warm with startup URLs""" tag = 'warm' test = startup.StartWithUrl page_set = page_sets.StartupPagesPageSet options = {'warm': True, 'pageset_repeat': 10}
from measurements import startup import page_sets from telemetry import benchmark @benchmark.Enabled('android', 'has tabs') class StartWithUrlCold(benchmark.Benchmark): """Measure time to start Chrome cold with startup URLs""" tag = 'cold' test = startup.StartWithUrl page_set = page_sets.StartupPagesPageSet options = {'cold': True, 'pageset_repeat': 5} @benchmark.Enabled('android', 'has tabs') class StartWithUrlWarm(benchmark.Benchmark): """Measure time to start Chrome warm with startup URLs""" tag = 'warm' test = startup.StartWithUrl page_set = page_sets.StartupPagesPageSet options = {'warm': True, 'pageset_repeat': 10}
Enable statup_with_url.cold benchmark on android.
Enable statup_with_url.cold benchmark on android. The benchmark works locally, and collects an important datapoint for our current optimization work. Review URL: https://codereview.chromium.org/508303004 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#298526}
Python
bsd-3-clause
axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,jaruba/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,M4sse/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,Chilledheart/chromium,axinging/chromium-crosswalk,dednal/chromium.src,dushu1203/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,jaruba/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,Chilledheart/chromium,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium,dednal/chromium.src,Just-D/chromium-1,Chilledheart/chromium,Jonekee/chromium.src,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,markYoungH/chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,Jonekee/chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,ltilve/chromium,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,dednal/chromium.src,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,ltilve/chromium,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ltilve/chromium,ltilve/chromium,Just-D/chromium-1,dednal/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,Jonekee/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,jaruba/chromium.src,krieger-od/nwjs_chromium.src,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,Just-D/chromium-1,Chilledheart/chromium,jaruba/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,Just-D/chromium-1,M4sse/chromium.src,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,M4sse/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk
from measurements import startup import page_sets from telemetry import benchmark - @benchmark.Disabled + @benchmark.Enabled('android', 'has tabs') class StartWithUrlCold(benchmark.Benchmark): """Measure time to start Chrome cold with startup URLs""" tag = 'cold' test = startup.StartWithUrl page_set = page_sets.StartupPagesPageSet options = {'cold': True, 'pageset_repeat': 5} @benchmark.Enabled('android', 'has tabs') class StartWithUrlWarm(benchmark.Benchmark): """Measure time to start Chrome warm with startup URLs""" tag = 'warm' test = startup.StartWithUrl page_set = page_sets.StartupPagesPageSet options = {'warm': True, 'pageset_repeat': 10}
Enable statup_with_url.cold benchmark on android.
## Code Before: from measurements import startup import page_sets from telemetry import benchmark @benchmark.Disabled class StartWithUrlCold(benchmark.Benchmark): """Measure time to start Chrome cold with startup URLs""" tag = 'cold' test = startup.StartWithUrl page_set = page_sets.StartupPagesPageSet options = {'cold': True, 'pageset_repeat': 5} @benchmark.Enabled('android', 'has tabs') class StartWithUrlWarm(benchmark.Benchmark): """Measure time to start Chrome warm with startup URLs""" tag = 'warm' test = startup.StartWithUrl page_set = page_sets.StartupPagesPageSet options = {'warm': True, 'pageset_repeat': 10} ## Instruction: Enable statup_with_url.cold benchmark on android. ## Code After: from measurements import startup import page_sets from telemetry import benchmark @benchmark.Enabled('android', 'has tabs') class StartWithUrlCold(benchmark.Benchmark): """Measure time to start Chrome cold with startup URLs""" tag = 'cold' test = startup.StartWithUrl page_set = page_sets.StartupPagesPageSet options = {'cold': True, 'pageset_repeat': 5} @benchmark.Enabled('android', 'has tabs') class StartWithUrlWarm(benchmark.Benchmark): """Measure time to start Chrome warm with startup URLs""" tag = 'warm' test = startup.StartWithUrl page_set = page_sets.StartupPagesPageSet options = {'warm': True, 'pageset_repeat': 10}
from measurements import startup import page_sets from telemetry import benchmark - @benchmark.Disabled + @benchmark.Enabled('android', 'has tabs') class StartWithUrlCold(benchmark.Benchmark): """Measure time to start Chrome cold with startup URLs""" tag = 'cold' test = startup.StartWithUrl page_set = page_sets.StartupPagesPageSet options = {'cold': True, 'pageset_repeat': 5} @benchmark.Enabled('android', 'has tabs') class StartWithUrlWarm(benchmark.Benchmark): """Measure time to start Chrome warm with startup URLs""" tag = 'warm' test = startup.StartWithUrl page_set = page_sets.StartupPagesPageSet options = {'warm': True, 'pageset_repeat': 10}