{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'PDF TO Markdown' && linkText !== 'PDF TO Markdown' ) { link.textContent = 'PDF TO Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== 'Voice Cloning' ) { link.textContent = 'Voice Cloning'; link.href = 'https://vibevoice.info/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'PDF TO Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); ' +\n '')\n return content\n\n\n@app.route('/api/restaurants')\n@cache.cached(timeout=3600)\ndef api_list_restaurants():\n return main.list_restaurants()\n\n\n@app.route('/api/restaurant/')\n@cache.cached(timeout=3600)\ndef api_get_restaurant(name):\n data = main.get_restaurant(name)\n if not data:\n abort(404)\n return data\n\n\n@app.route('/ki')\n@cache.cached(timeout=3600)\ndef make_menu_ki():\n return main.gen_ki_menu()\n\n\n@app.route('/uu')\n@cache.cached(timeout=3600)\ndef make_menu_uu():\n return main.gen_uu_menu()\n"},"new_contents":{"kind":"string","value":"import json\n\nfrom flask import abort\nfrom flask import Flask\nfrom flask_caching import Cache\n\nimport main\n\n\napp = Flask(__name__)\ncache = Cache(app, config={'CACHE_TYPE': 'simple'})\n\n\n@app.route('/')\ndef display_available():\n content = ('' +\n '' +\n 'Restaurant Menu Parser' +\n '' +\n '' +\n '

Campus Solna (KI)

' +\n '

Campus Uppsala (BMC)

' +\n '' +\n '')\n return content\n\n\n@app.route('/api/restaurants')\n@cache.cached(timeout=3600)\ndef api_list_restaurants():\n return json.dumps(main.list_restaurants())\n\n\n@app.route('/api/restaurant/')\n@cache.cached(timeout=3600)\ndef api_get_restaurant(name):\n data = main.get_restaurant(name)\n if not data:\n abort(404)\n return json.dumps(data)\n\n\n@app.route('/ki')\n@cache.cached(timeout=3600)\ndef make_menu_ki():\n return main.gen_ki_menu()\n\n\n@app.route('/uu')\n@cache.cached(timeout=3600)\ndef make_menu_uu():\n return main.gen_uu_menu()\n"},"subject":{"kind":"string","value":"Return str instead of dict."},"message":{"kind":"string","value":"Return str instead of dict.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"talavis/kimenu"},"ndiff":{"kind":"string","value":"+ import json\n+ \n from flask import abort\n from flask import Flask\n from flask_caching import Cache\n \n import main\n \n \n app = Flask(__name__)\n cache = Cache(app, config={'CACHE_TYPE': 'simple'})\n \n \n @app.route('/')\n def display_available():\n content = ('' +\n '' +\n 'Restaurant Menu Parser' +\n '' +\n '' +\n '

Campus Solna (KI)

' +\n '

Campus Uppsala (BMC)

' +\n '' +\n '')\n return content\n \n \n @app.route('/api/restaurants')\n @cache.cached(timeout=3600)\n def api_list_restaurants():\n- return main.list_restaurants()\n+ return json.dumps(main.list_restaurants())\n \n \n @app.route('/api/restaurant/')\n @cache.cached(timeout=3600)\n def api_get_restaurant(name):\n data = main.get_restaurant(name)\n if not data:\n abort(404)\n- return data\n+ return json.dumps(data)\n \n \n @app.route('/ki')\n @cache.cached(timeout=3600)\n def make_menu_ki():\n return main.gen_ki_menu()\n \n \n @app.route('/uu')\n @cache.cached(timeout=3600)\n def make_menu_uu():\n return main.gen_uu_menu()\n "},"instruction":{"kind":"string","value":"Return str instead of dict."},"content":{"kind":"string","value":"## Code Before:\nfrom flask import abort\nfrom flask import Flask\nfrom flask_caching import Cache\n\nimport main\n\n\napp = Flask(__name__)\ncache = Cache(app, config={'CACHE_TYPE': 'simple'})\n\n\n@app.route('/')\ndef display_available():\n content = ('' +\n '' +\n 'Restaurant Menu Parser' +\n '' +\n '' +\n '

Campus Solna (KI)

' +\n '

Campus Uppsala (BMC)

' +\n '' +\n '')\n return content\n\n\n@app.route('/api/restaurants')\n@cache.cached(timeout=3600)\ndef api_list_restaurants():\n return main.list_restaurants()\n\n\n@app.route('/api/restaurant/')\n@cache.cached(timeout=3600)\ndef api_get_restaurant(name):\n data = main.get_restaurant(name)\n if not data:\n abort(404)\n return data\n\n\n@app.route('/ki')\n@cache.cached(timeout=3600)\ndef make_menu_ki():\n return main.gen_ki_menu()\n\n\n@app.route('/uu')\n@cache.cached(timeout=3600)\ndef make_menu_uu():\n return main.gen_uu_menu()\n\n## Instruction:\nReturn str instead of dict.\n## Code After:\nimport json\n\nfrom flask import abort\nfrom flask import Flask\nfrom flask_caching import Cache\n\nimport main\n\n\napp = Flask(__name__)\ncache = Cache(app, config={'CACHE_TYPE': 'simple'})\n\n\n@app.route('/')\ndef display_available():\n content = ('' +\n '' +\n 'Restaurant Menu Parser' +\n '' +\n '' +\n '

Campus Solna (KI)

' +\n '

Campus Uppsala (BMC)

' +\n '' +\n '')\n return content\n\n\n@app.route('/api/restaurants')\n@cache.cached(timeout=3600)\ndef api_list_restaurants():\n return json.dumps(main.list_restaurants())\n\n\n@app.route('/api/restaurant/')\n@cache.cached(timeout=3600)\ndef api_get_restaurant(name):\n data = main.get_restaurant(name)\n if not data:\n abort(404)\n return json.dumps(data)\n\n\n@app.route('/ki')\n@cache.cached(timeout=3600)\ndef make_menu_ki():\n return main.gen_ki_menu()\n\n\n@app.route('/uu')\n@cache.cached(timeout=3600)\ndef make_menu_uu():\n return main.gen_uu_menu()\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport json\n\nfrom flask import abort\n\n\n// ... modified code ... \n\n\ndef api_list_restaurants():\n return json.dumps(main.list_restaurants())\n\n\n\n ... \n\n\n abort(404)\n return json.dumps(data)\n\n\n\n// ... rest of the code ..."}}},{"rowIdx":19268,"cells":{"commit":{"kind":"string","value":"874ead2ed9de86eea20c4a67ce7b53cb2766c09e"},"old_file":{"kind":"string","value":"erpnext/patches/v5_0/link_warehouse_with_account.py"},"new_file":{"kind":"string","value":"erpnext/patches/v5_0/link_warehouse_with_account.py"},"old_contents":{"kind":"string","value":"\nfrom __future__ import unicode_literals\nimport frappe\n\ndef execute():\n\tfrappe.db.sql(\"\"\"update tabAccount set warehouse=master_name\n\t\twhere ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''\"\"\")"},"new_contents":{"kind":"string","value":"\nfrom __future__ import unicode_literals\nimport frappe\n\ndef execute():\n\tif \"master_name\" in frappe.db.get_table_columns(\"Account\"):\t\n\t\tfrappe.db.sql(\"\"\"update tabAccount set warehouse=master_name\n\t\t\twhere ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''\"\"\")"},"subject":{"kind":"string","value":"Update warehouse as per master_name if master_name exists"},"message":{"kind":"string","value":"Update warehouse as per master_name if master_name exists\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"indictranstech/fbd_erpnext,gangadharkadam/saloon_erp_install,mbauskar/helpdesk-erpnext,gmarke/erpnext,Tejal011089/paypal_erpnext,Tejal011089/trufil-erpnext,treejames/erpnext,indictranstech/reciphergroup-erpnext,pombredanne/erpnext,gangadharkadam/saloon_erp,gangadharkadam/vlinkerp,hatwar/buyback-erpnext,shft117/SteckerApp,Drooids/erpnext,treejames/erpnext,mbauskar/omnitech-erpnext,susuchina/ERPNEXT,gmarke/erpnext,shft117/SteckerApp,mbauskar/alec_frappe5_erpnext,indictranstech/reciphergroup-erpnext,mbauskar/omnitech-demo-erpnext,Tejal011089/fbd_erpnext,Tejal011089/fbd_erpnext,sheafferusa/erpnext,mbauskar/alec_frappe5_erpnext,fuhongliang/erpnext,geekroot/erpnext,mahabuber/erpnext,hatwar/buyback-erpnext,saurabh6790/test-erp,gangadharkadam/saloon_erp,Tejal011089/osmosis_erpnext,mbauskar/Das_Erpnext,mbauskar/alec_frappe5_erpnext,gangadharkadam/contributionerp,mbauskar/helpdesk-erpnext,meisterkleister/erpnext,indictranstech/fbd_erpnext,SPKian/Testing2,hanselke/erpnext-1,sheafferusa/erpnext,hatwar/Das_erpnext,indictranstech/trufil-erpnext,mbauskar/omnitech-erpnext,anandpdoshi/erpnext,hatwar/buyback-erpnext,Tejal011089/osmosis_erpnext,susuchina/ERPNEXT,gangadharkadam/vlinkerp,mbauskar/helpdesk-erpnext,indictranstech/tele-erpnext,indictranstech/Das_Erpnext,mbauskar/Das_Erpnext,tmimori/erpnext,Aptitudetech/ERPNext,netfirms/erpnext,gangadharkadam/contributionerp,netfirms/erpnext,rohitwaghchaure/GenieManager-erpnext,ShashaQin/erpnext,pombredanne/erpnext,SPKian/Testing,hanselke/erpnext-1,hernad/erpnext,mbauskar/sapphire-erpnext,hernad/erpnext,mahabuber/erpnext,anandpdoshi/erpnext,Tejal011089/osmosis_erpnext,hanselke/erpnext-1,susuchina/ERPNEXT,rohitwaghchaure/erpnext-receipher,indictranstech/reciphergroup-erpnext,MartinEnder/erpnext-de,Tejal011089/huntercamp_erpnext,ThiagoGarciaAlves/erpnext,shft117/SteckerApp,rohitwaghchaure/GenieManager-erpnext,SPKian/Testing2,shitolepriya/test-erp,saurabh6790/test-erp,Drooids/erpnext,njmube/erpnext,pombredanne/erpnext,gsnbng/erpnext,mbauskar/omnitech-erpnext,gangadharkadam/v6_erp,indictranstech/biggift-erpnext,rohitwaghchaure/GenieManager-erpnext,indictranstech/tele-erpnext,ShashaQin/erpnext,gangadharkadam/saloon_erp_install,tmimori/erpnext,Tejal011089/huntercamp_erpnext,Tejal011089/paypal_erpnext,indictranstech/erpnext,njmube/erpnext,mbauskar/Das_Erpnext,ThiagoGarciaAlves/erpnext,hatwar/Das_erpnext,gangadhar-kadam/helpdesk-erpnext,hernad/erpnext,rohitwaghchaure/GenieManager-erpnext,Drooids/erpnext,indictranstech/reciphergroup-erpnext,shitolepriya/test-erp,fuhongliang/erpnext,dieface/erpnext,indictranstech/osmosis-erpnext,gangadharkadam/contributionerp,Tejal011089/huntercamp_erpnext,mahabuber/erpnext,mbauskar/Das_Erpnext,gmarke/erpnext,indictranstech/tele-erpnext,saurabh6790/test-erp,pombredanne/erpnext,Suninus/erpnext,ShashaQin/erpnext,sheafferusa/erpnext,treejames/erpnext,SPKian/Testing,fuhongliang/erpnext,indictranstech/fbd_erpnext,mahabuber/erpnext,gangadharkadam/saloon_erp_install,MartinEnder/erpnext-de,Suninus/erpnext,ThiagoGarciaAlves/erpnext,Tejal011089/trufil-erpnext,MartinEnder/erpnext-de,Suninus/erpnext,indictranstech/erpnext,rohitwaghchaure/erpnext-receipher,mbauskar/helpdesk-erpnext,indictranstech/biggift-erpnext,indictranstech/Das_Erpnext,gangadharkadam/v6_erp,gmarke/erpnext,gsnbng/erpnext,sagar30051991/ozsmart-erp,indictranstech/erpnext,geekroot/erpnext,susuchina/ERPNEXT,netfirms/erpnext,dieface/erpnext,SPKian/Testing,indictranstech/fbd_erpnext,treejames/erpnext,tmimori/erpnext,gangadharkadam/saloon_erp,indictranstech/tele-erpnext,hatwar/Das_erpnext,aruizramon/alec_erpnext,mbauskar/sapphire-erpnext,ThiagoGarciaAlves/erpnext,mbauskar/omnitech-erpnext,Tejal011089/osmosis_erpnext,Tejal011089/huntercamp_erpnext,indictranstech/osmosis-erpnext,fuhongliang/erpnext,gangadharkadam/v6_erp,gangadharkadam/vlinkerp,aruizramon/alec_erpnext,indictranstech/osmosis-erpnext,SPKian/Testing,sheafferusa/erpnext,gangadhar-kadam/helpdesk-erpnext,Tejal011089/trufil-erpnext,indictranstech/trufil-erpnext,anandpdoshi/erpnext,indictranstech/Das_Erpnext,gangadhar-kadam/helpdesk-erpnext,netfirms/erpnext,mbauskar/sapphire-erpnext,gangadharkadam/saloon_erp_install,dieface/erpnext,aruizramon/alec_erpnext,shft117/SteckerApp,sagar30051991/ozsmart-erp,gangadharkadam/vlinkerp,saurabh6790/test-erp,Drooids/erpnext,indictranstech/biggift-erpnext,SPKian/Testing2,Suninus/erpnext,gsnbng/erpnext,indictranstech/trufil-erpnext,mbauskar/sapphire-erpnext,Tejal011089/fbd_erpnext,ShashaQin/erpnext,shitolepriya/test-erp,njmube/erpnext,MartinEnder/erpnext-de,Tejal011089/paypal_erpnext,aruizramon/alec_erpnext,tmimori/erpnext,Tejal011089/fbd_erpnext,indictranstech/trufil-erpnext,mbauskar/omnitech-demo-erpnext,gangadhar-kadam/helpdesk-erpnext,sagar30051991/ozsmart-erp,indictranstech/Das_Erpnext,hatwar/buyback-erpnext,anandpdoshi/erpnext,indictranstech/osmosis-erpnext,rohitwaghchaure/erpnext-receipher,meisterkleister/erpnext,hanselke/erpnext-1,gangadharkadam/saloon_erp,sagar30051991/ozsmart-erp,mbauskar/alec_frappe5_erpnext,SPKian/Testing2,mbauskar/omnitech-demo-erpnext,geekroot/erpnext,njmube/erpnext,meisterkleister/erpnext,hatwar/Das_erpnext,hernad/erpnext,indictranstech/erpnext,gsnbng/erpnext,gangadharkadam/contributionerp,indictranstech/biggift-erpnext,meisterkleister/erpnext,gangadharkadam/v6_erp,shitolepriya/test-erp,geekroot/erpnext,Tejal011089/paypal_erpnext,dieface/erpnext,mbauskar/omnitech-demo-erpnext,Tejal011089/trufil-erpnext,rohitwaghchaure/erpnext-receipher"},"ndiff":{"kind":"string","value":" \n from __future__ import unicode_literals\n import frappe\n \n def execute():\n+ \tif \"master_name\" in frappe.db.get_table_columns(\"Account\"):\t\n- \tfrappe.db.sql(\"\"\"update tabAccount set warehouse=master_name\n+ \t\tfrappe.db.sql(\"\"\"update tabAccount set warehouse=master_name\n- \t\twhere ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''\"\"\")\n+ \t\t\twhere ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''\"\"\")"},"instruction":{"kind":"string","value":"Update warehouse as per master_name if master_name exists"},"content":{"kind":"string","value":"## Code Before:\n\nfrom __future__ import unicode_literals\nimport frappe\n\ndef execute():\n\tfrappe.db.sql(\"\"\"update tabAccount set warehouse=master_name\n\t\twhere ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''\"\"\")\n## Instruction:\nUpdate warehouse as per master_name if master_name exists\n## Code After:\n\nfrom __future__ import unicode_literals\nimport frappe\n\ndef execute():\n\tif \"master_name\" in frappe.db.get_table_columns(\"Account\"):\t\n\t\tfrappe.db.sql(\"\"\"update tabAccount set warehouse=master_name\n\t\t\twhere ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''\"\"\")"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\ndef execute():\n\tif \"master_name\" in frappe.db.get_table_columns(\"Account\"):\t\n\t\tfrappe.db.sql(\"\"\"update tabAccount set warehouse=master_name\n\t\t\twhere ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''\"\"\")\n\n\n// ... rest of the code ..."}}},{"rowIdx":19269,"cells":{"commit":{"kind":"string","value":"c8a7a53f09f72d9dbe44b1bcb5b85c8ee5ba2c2c"},"old_file":{"kind":"string","value":"services/migrations/0012_unit_data_source.py"},"new_file":{"kind":"string","value":"services/migrations/0012_unit_data_source.py"},"old_contents":{"kind":"string","value":"from __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('services', '0011_unit_extensions'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='unit',\n name='data_source',\n field=models.CharField(null=True, max_length=20),\n ),\n ]\n"},"new_contents":{"kind":"string","value":"from __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('services', '0011_unit_extensions'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='unit',\n name='data_source',\n field=models.CharField(null=True, max_length=20, default='tprek'),\n preserve_default=False\n ),\n ]\n"},"subject":{"kind":"string","value":"Add default to data_source migration."},"message":{"kind":"string","value":"Add default to data_source migration.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"City-of-Helsinki/smbackend,City-of-Helsinki/smbackend"},"ndiff":{"kind":"string","value":" from __future__ import unicode_literals\n \n from django.db import migrations, models\n \n \n class Migration(migrations.Migration):\n \n dependencies = [\n ('services', '0011_unit_extensions'),\n ]\n \n operations = [\n migrations.AddField(\n model_name='unit',\n name='data_source',\n- field=models.CharField(null=True, max_length=20),\n+ field=models.CharField(null=True, max_length=20, default='tprek'),\n+ preserve_default=False\n ),\n ]\n "},"instruction":{"kind":"string","value":"Add default to data_source migration."},"content":{"kind":"string","value":"## Code Before:\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('services', '0011_unit_extensions'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='unit',\n name='data_source',\n field=models.CharField(null=True, max_length=20),\n ),\n ]\n\n## Instruction:\nAdd default to data_source migration.\n## Code After:\nfrom __future__ import unicode_literals\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('services', '0011_unit_extensions'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='unit',\n name='data_source',\n field=models.CharField(null=True, max_length=20, default='tprek'),\n preserve_default=False\n ),\n ]\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n name='data_source',\n field=models.CharField(null=True, max_length=20, default='tprek'),\n preserve_default=False\n ),\n\n\n ... "}}},{"rowIdx":19270,"cells":{"commit":{"kind":"string","value":"9185d882dc5fc7131b90d3b93dff8b6603538a3d"},"old_file":{"kind":"string","value":"app/cogs/twitch_emotes.py"},"new_file":{"kind":"string","value":"app/cogs/twitch_emotes.py"},"old_contents":{"kind":"string","value":"from io import BytesIO\nimport requests\n\nfrom discord.ext import commands\nfrom discord.ext.commands import Bot\n\nTWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'\n\n\nclass TwitchEmotes:\n\n def __init__(self, bot: Bot):\n self.bot = bot\n\n r = requests.get(TWITCH_EMOTES_API)\n emote_data = r.json()\n\n emote_template = emote_data['template']['small']\n emote_ids = {name: info['image_id'] for name, info in\n emote_data['emotes'].items()}\n emote_cache = {}\n\n @bot.listen('on_message')\n async def respond(message):\n if message.author == bot.user:\n return\n\n text = message.content\n\n if text in emote_ids:\n if text not in emote_cache:\n url = emote_template.replace('{image_id}',\n str(emote_ids[text]))\n\n emote_img = requests.get(url).content\n emote_cache[text] = emote_img\n\n data = BytesIO(emote_cache[text])\n filename = '%s.png' % text\n await bot.send_file(message.channel, data, filename=filename)\n\n\ndef setup(bot: Bot):\n bot.add_cog(TwitchEmotes(bot))\n"},"new_contents":{"kind":"string","value":"from io import BytesIO\nimport logging\nimport requests\n\nfrom discord.ext import commands\nfrom discord.ext.commands import Bot\n\nTWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass TwitchEmotes:\n\n def __init__(self, bot: Bot):\n self.bot = bot\n\n r = requests.get(TWITCH_EMOTES_API)\n emote_data = r.json()\n\n emote_template = emote_data['template']['small']\n emote_ids = {name: info['image_id'] for name, info in\n emote_data['emotes'].items()}\n emote_cache = {}\n logger.info('Got %d emotes from Twitchemotes.com API' % len(emote_ids))\n logger.info('Using template: %s' % emote_template)\n\n @bot.listen('on_message')\n async def respond(message):\n if message.author == bot.user:\n return\n\n text = message.content\n\n if text in emote_ids:\n if text not in emote_cache:\n url = emote_template.replace('{image_id}',\n str(emote_ids[text]))\n logger.info('Fetching emote %s from %s' % (text, url))\n\n emote_img = requests.get(url).content\n emote_cache[text] = emote_img\n\n data = BytesIO(emote_cache[text])\n filename = '%s.png' % text\n await bot.send_file(message.channel, data, filename=filename)\n\n\ndef setup(bot: Bot):\n bot.add_cog(TwitchEmotes(bot))\n"},"subject":{"kind":"string","value":"Add logging to Twitch emotes module"},"message":{"kind":"string","value":"Add logging to Twitch emotes module\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"andrewlin16/duckbot,andrewlin16/duckbot"},"ndiff":{"kind":"string","value":" from io import BytesIO\n+ import logging\n import requests\n \n from discord.ext import commands\n from discord.ext.commands import Bot\n \n TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'\n+ \n+ \n+ logger = logging.getLogger(__name__)\n \n \n class TwitchEmotes:\n \n def __init__(self, bot: Bot):\n self.bot = bot\n \n r = requests.get(TWITCH_EMOTES_API)\n emote_data = r.json()\n \n emote_template = emote_data['template']['small']\n emote_ids = {name: info['image_id'] for name, info in\n emote_data['emotes'].items()}\n emote_cache = {}\n+ logger.info('Got %d emotes from Twitchemotes.com API' % len(emote_ids))\n+ logger.info('Using template: %s' % emote_template)\n \n @bot.listen('on_message')\n async def respond(message):\n if message.author == bot.user:\n return\n \n text = message.content\n \n if text in emote_ids:\n if text not in emote_cache:\n url = emote_template.replace('{image_id}',\n str(emote_ids[text]))\n+ logger.info('Fetching emote %s from %s' % (text, url))\n \n emote_img = requests.get(url).content\n emote_cache[text] = emote_img\n \n data = BytesIO(emote_cache[text])\n filename = '%s.png' % text\n await bot.send_file(message.channel, data, filename=filename)\n \n \n def setup(bot: Bot):\n bot.add_cog(TwitchEmotes(bot))\n "},"instruction":{"kind":"string","value":"Add logging to Twitch emotes module"},"content":{"kind":"string","value":"## Code Before:\nfrom io import BytesIO\nimport requests\n\nfrom discord.ext import commands\nfrom discord.ext.commands import Bot\n\nTWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'\n\n\nclass TwitchEmotes:\n\n def __init__(self, bot: Bot):\n self.bot = bot\n\n r = requests.get(TWITCH_EMOTES_API)\n emote_data = r.json()\n\n emote_template = emote_data['template']['small']\n emote_ids = {name: info['image_id'] for name, info in\n emote_data['emotes'].items()}\n emote_cache = {}\n\n @bot.listen('on_message')\n async def respond(message):\n if message.author == bot.user:\n return\n\n text = message.content\n\n if text in emote_ids:\n if text not in emote_cache:\n url = emote_template.replace('{image_id}',\n str(emote_ids[text]))\n\n emote_img = requests.get(url).content\n emote_cache[text] = emote_img\n\n data = BytesIO(emote_cache[text])\n filename = '%s.png' % text\n await bot.send_file(message.channel, data, filename=filename)\n\n\ndef setup(bot: Bot):\n bot.add_cog(TwitchEmotes(bot))\n\n## Instruction:\nAdd logging to Twitch emotes module\n## Code After:\nfrom io import BytesIO\nimport logging\nimport requests\n\nfrom discord.ext import commands\nfrom discord.ext.commands import Bot\n\nTWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'\n\n\nlogger = logging.getLogger(__name__)\n\n\nclass TwitchEmotes:\n\n def __init__(self, bot: Bot):\n self.bot = bot\n\n r = requests.get(TWITCH_EMOTES_API)\n emote_data = r.json()\n\n emote_template = emote_data['template']['small']\n emote_ids = {name: info['image_id'] for name, info in\n emote_data['emotes'].items()}\n emote_cache = {}\n logger.info('Got %d emotes from Twitchemotes.com API' % len(emote_ids))\n logger.info('Using template: %s' % emote_template)\n\n @bot.listen('on_message')\n async def respond(message):\n if message.author == bot.user:\n return\n\n text = message.content\n\n if text in emote_ids:\n if text not in emote_cache:\n url = emote_template.replace('{image_id}',\n str(emote_ids[text]))\n logger.info('Fetching emote %s from %s' % (text, url))\n\n emote_img = requests.get(url).content\n emote_cache[text] = emote_img\n\n data = BytesIO(emote_cache[text])\n filename = '%s.png' % text\n await bot.send_file(message.channel, data, filename=filename)\n\n\ndef setup(bot: Bot):\n bot.add_cog(TwitchEmotes(bot))\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nfrom io import BytesIO\nimport logging\nimport requests\n\n\n# ... modified code ... \n\n\nTWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'\n\n\nlogger = logging.getLogger(__name__)\n\n\n\n ... \n\n\n emote_cache = {}\n logger.info('Got %d emotes from Twitchemotes.com API' % len(emote_ids))\n logger.info('Using template: %s' % emote_template)\n\n\n\n ... \n\n\n str(emote_ids[text]))\n logger.info('Fetching emote %s from %s' % (text, url))\n\n\n\n# ... rest of the code ..."}}},{"rowIdx":19271,"cells":{"commit":{"kind":"string","value":"0983361e6fba5812416d8fb5b695f6b3034bc927"},"old_file":{"kind":"string","value":"registration/management/commands/cleanupregistration.py"},"new_file":{"kind":"string","value":"registration/management/commands/cleanupregistration.py"},"old_contents":{"kind":"string","value":"\nfrom django.core.management.base import NoArgsCommand\n\nfrom ...models import RegistrationProfile\n\n\nclass Command(NoArgsCommand):\n help = \"Delete expired user registrations from the database\"\n\n def handle_noargs(self, **options):\n RegistrationProfile.objects.delete_expired_users()\n"},"new_contents":{"kind":"string","value":"\nfrom django.core.management.base import BaseCommand\n\nfrom ...models import RegistrationProfile\n\n\nclass Command(BaseCommand):\n help = \"Delete expired user registrations from the database\"\n\n def handle(self, *args, **options):\n RegistrationProfile.objects.delete_expired_users()\n"},"subject":{"kind":"string","value":"Fix deprecated class NoArgsCommand class."},"message":{"kind":"string","value":"Fix deprecated class NoArgsCommand class.\n\nSolve the warning RemovedInDjango110Warning: NoArgsCommand class is deprecated and will be removed in Django 1.10. Use BaseCommand instead, which takes no arguments by default.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"sergafts/django-registration,timgraham/django-registration,sergafts/django-registration,pando85/django-registration,pando85/django-registration,allo-/django-registration,allo-/django-registration,timgraham/django-registration"},"ndiff":{"kind":"string","value":" \n- from django.core.management.base import NoArgsCommand\n+ from django.core.management.base import BaseCommand\n \n from ...models import RegistrationProfile\n \n \n- class Command(NoArgsCommand):\n+ class Command(BaseCommand):\n help = \"Delete expired user registrations from the database\"\n \n- def handle_noargs(self, **options):\n+ def handle(self, *args, **options):\n RegistrationProfile.objects.delete_expired_users()\n "},"instruction":{"kind":"string","value":"Fix deprecated class NoArgsCommand class."},"content":{"kind":"string","value":"## Code Before:\n\nfrom django.core.management.base import NoArgsCommand\n\nfrom ...models import RegistrationProfile\n\n\nclass Command(NoArgsCommand):\n help = \"Delete expired user registrations from the database\"\n\n def handle_noargs(self, **options):\n RegistrationProfile.objects.delete_expired_users()\n\n## Instruction:\nFix deprecated class NoArgsCommand class.\n## Code After:\n\nfrom django.core.management.base import BaseCommand\n\nfrom ...models import RegistrationProfile\n\n\nclass Command(BaseCommand):\n help = \"Delete expired user registrations from the database\"\n\n def handle(self, *args, **options):\n RegistrationProfile.objects.delete_expired_users()\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n\nfrom django.core.management.base import BaseCommand\n\n\n\n ... \n\n\n\nclass Command(BaseCommand):\n help = \"Delete expired user registrations from the database\"\n\n\n ... \n\n\n\n def handle(self, *args, **options):\n RegistrationProfile.objects.delete_expired_users()\n\n\n ... "}}},{"rowIdx":19272,"cells":{"commit":{"kind":"string","value":"8cc88e1f6e09e91f2ffc5bbf43b58b2d129a12c9"},"old_file":{"kind":"string","value":"bnc.py"},"new_file":{"kind":"string","value":"bnc.py"},"old_contents":{"kind":"string","value":"import nltk.corpus.reader.bnc\n"},"new_contents":{"kind":"string","value":"import nltk.corpus.reader.bnc\nimport time\n\n\nstart_time = time.perf_counter()\nBNC_data = nltk.corpus.reader.bnc.BNCCorpusReader(root='/home/ubuntu/ug-d/bncbaby/',\n fileids=r'aca/\\w*\\.xml', # r'aca/\\w*\\.xml', # r'[a-z]{3}/\\w*\\.xml')\n lazy=False) # found here: https://github.com/nltk/nltk/issues/781 talk about how much more efficient it is\ntime_taken = time.perf_counter() - start_time\nprint('\\n|| Successfully loaded the British National Corpus in {:.1f}'.format(time_taken), 'seconds. ||\\n')\n"},"subject":{"kind":"string","value":"Load BNC into memory and time process."},"message":{"kind":"string","value":"Load BNC into memory and time process.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"albertomh/ug-dissertation"},"ndiff":{"kind":"string","value":" import nltk.corpus.reader.bnc\n+ import time\n \n+ \n+ start_time = time.perf_counter()\n+ BNC_data = nltk.corpus.reader.bnc.BNCCorpusReader(root='/home/ubuntu/ug-d/bncbaby/',\n+ fileids=r'aca/\\w*\\.xml', # r'aca/\\w*\\.xml', # r'[a-z]{3}/\\w*\\.xml')\n+ lazy=False) # found here: https://github.com/nltk/nltk/issues/781 talk about how much more efficient it is\n+ time_taken = time.perf_counter() - start_time\n+ print('\\n|| Successfully loaded the British National Corpus in {:.1f}'.format(time_taken), 'seconds. ||\\n')\n+ "},"instruction":{"kind":"string","value":"Load BNC into memory and time process."},"content":{"kind":"string","value":"## Code Before:\nimport nltk.corpus.reader.bnc\n\n## Instruction:\nLoad BNC into memory and time process.\n## Code After:\nimport nltk.corpus.reader.bnc\nimport time\n\n\nstart_time = time.perf_counter()\nBNC_data = nltk.corpus.reader.bnc.BNCCorpusReader(root='/home/ubuntu/ug-d/bncbaby/',\n fileids=r'aca/\\w*\\.xml', # r'aca/\\w*\\.xml', # r'[a-z]{3}/\\w*\\.xml')\n lazy=False) # found here: https://github.com/nltk/nltk/issues/781 talk about how much more efficient it is\ntime_taken = time.perf_counter() - start_time\nprint('\\n|| Successfully loaded the British National Corpus in {:.1f}'.format(time_taken), 'seconds. ||\\n')\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport nltk.corpus.reader.bnc\nimport time\n\n\nstart_time = time.perf_counter()\nBNC_data = nltk.corpus.reader.bnc.BNCCorpusReader(root='/home/ubuntu/ug-d/bncbaby/',\n fileids=r'aca/\\w*\\.xml', # r'aca/\\w*\\.xml', # r'[a-z]{3}/\\w*\\.xml')\n lazy=False) # found here: https://github.com/nltk/nltk/issues/781 talk about how much more efficient it is\ntime_taken = time.perf_counter() - start_time\nprint('\\n|| Successfully loaded the British National Corpus in {:.1f}'.format(time_taken), 'seconds. ||\\n')\n\n\n// ... rest of the code ..."}}},{"rowIdx":19273,"cells":{"commit":{"kind":"string","value":"b973a1686f269044e670704b56c07ca79336c29c"},"old_file":{"kind":"string","value":"mythril/laser/ethereum/strategy/basic.py"},"new_file":{"kind":"string","value":"mythril/laser/ethereum/strategy/basic.py"},"old_contents":{"kind":"string","value":"class DepthFirstSearchStrategy:\n\n def __init__(self, content, max_depth):\n self.content = content\n self.max_depth = max_depth\n\n def __iter__(self):\n return self\n\n def __next__(self):\n try:\n global_state = self.content.pop(0)\n if global_state.mstate.depth >= self.max_depth:\n return self.__next__()\n return global_state\n except IndexError:\n raise StopIteration()\n"},"new_contents":{"kind":"string","value":"\n\nclass DepthFirstSearchStrategy:\n \"\"\"\n Implements a depth first search strategy\n I.E. Follow one path to a leaf, and then continue to the next one\n \"\"\"\n def __init__(self, work_list, max_depth):\n self.work_list = work_list\n self.max_depth = max_depth\n\n def __iter__(self):\n return self\n\n def __next__(self):\n \"\"\" Picks the next state to execute \"\"\"\n try:\n # This strategies assumes that new states are appended at the end of the work_list\n # By taking the last element we effectively pick the \"newest\" states, which amounts to dfs\n global_state = self.work_list.pop()\n if global_state.mstate.depth >= self.max_depth:\n return self.__next__()\n return global_state\n except IndexError:\n raise StopIteration()\n\n"},"subject":{"kind":"string","value":"Add documentation and fix pop"},"message":{"kind":"string","value":"Add documentation and fix pop\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril"},"ndiff":{"kind":"string","value":"+ \n+ \n class DepthFirstSearchStrategy:\n- \n+ \"\"\"\n+ Implements a depth first search strategy\n+ I.E. Follow one path to a leaf, and then continue to the next one\n+ \"\"\"\n- def __init__(self, content, max_depth):\n+ def __init__(self, work_list, max_depth):\n- self.content = content\n+ self.work_list = work_list\n self.max_depth = max_depth\n \n def __iter__(self):\n return self\n \n def __next__(self):\n+ \"\"\" Picks the next state to execute \"\"\"\n try:\n+ # This strategies assumes that new states are appended at the end of the work_list\n+ # By taking the last element we effectively pick the \"newest\" states, which amounts to dfs\n- global_state = self.content.pop(0)\n+ global_state = self.work_list.pop()\n if global_state.mstate.depth >= self.max_depth:\n return self.__next__()\n return global_state\n except IndexError:\n raise StopIteration()\n \n+ "},"instruction":{"kind":"string","value":"Add documentation and fix pop"},"content":{"kind":"string","value":"## Code Before:\nclass DepthFirstSearchStrategy:\n\n def __init__(self, content, max_depth):\n self.content = content\n self.max_depth = max_depth\n\n def __iter__(self):\n return self\n\n def __next__(self):\n try:\n global_state = self.content.pop(0)\n if global_state.mstate.depth >= self.max_depth:\n return self.__next__()\n return global_state\n except IndexError:\n raise StopIteration()\n\n## Instruction:\nAdd documentation and fix pop\n## Code After:\n\n\nclass DepthFirstSearchStrategy:\n \"\"\"\n Implements a depth first search strategy\n I.E. Follow one path to a leaf, and then continue to the next one\n \"\"\"\n def __init__(self, work_list, max_depth):\n self.work_list = work_list\n self.max_depth = max_depth\n\n def __iter__(self):\n return self\n\n def __next__(self):\n \"\"\" Picks the next state to execute \"\"\"\n try:\n # This strategies assumes that new states are appended at the end of the work_list\n # By taking the last element we effectively pick the \"newest\" states, which amounts to dfs\n global_state = self.work_list.pop()\n if global_state.mstate.depth >= self.max_depth:\n return self.__next__()\n return global_state\n except IndexError:\n raise StopIteration()\n\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\n\nclass DepthFirstSearchStrategy:\n \"\"\"\n Implements a depth first search strategy\n I.E. Follow one path to a leaf, and then continue to the next one\n \"\"\"\n def __init__(self, work_list, max_depth):\n self.work_list = work_list\n self.max_depth = max_depth\n\n\n// ... modified code ... \n\n\n def __next__(self):\n \"\"\" Picks the next state to execute \"\"\"\n try:\n # This strategies assumes that new states are appended at the end of the work_list\n # By taking the last element we effectively pick the \"newest\" states, which amounts to dfs\n global_state = self.work_list.pop()\n if global_state.mstate.depth >= self.max_depth:\n\n\n ... \n\n\n raise StopIteration()\n\n\n\n// ... rest of the code ..."}}},{"rowIdx":19274,"cells":{"commit":{"kind":"string","value":"5da928fd9b08aeb0028b71535413159da18393b4"},"old_file":{"kind":"string","value":"comics/sets/forms.py"},"new_file":{"kind":"string","value":"comics/sets/forms.py"},"old_contents":{"kind":"string","value":"import datetime\n\nfrom django import forms\nfrom django.template.defaultfilters import slugify\n\nfrom comics.core.models import Comic\nfrom comics.sets.models import Set\n\nclass NewSetForm(forms.ModelForm):\n class Meta:\n model = Set\n fields = ('name',)\n\n def save(self, commit=True):\n set = super(NewSetForm, self).save(commit=False)\n set.name = slugify(set.name)\n set.last_modified = datetime.datetime.now()\n set.last_loaded = datetime.datetime.now()\n if commit:\n set.save()\n return set\n\nclass EditSetForm(forms.ModelForm):\n comics = forms.ModelMultipleChoiceField(\n Comic.objects.all(),\n required=False,\n widget=forms.CheckboxSelectMultiple)\n add_new_comics = forms.BooleanField(\n label='Automatically add new comics to the set', required=False)\n hide_empty_comics = forms.BooleanField(\n label='Hide comics without matching releases from view', required=False)\n\n class Meta:\n model = Set\n fields = ('comics', 'add_new_comics', 'hide_empty_comics')\n\n def save(self, commit=True):\n comics_set = super(EditSetForm, self).save(commit=False)\n comics_set.last_modified = datetime.datetime.now()\n if commit:\n comics_set.save()\n self.save_m2m()\n return comics_set\n"},"new_contents":{"kind":"string","value":"import datetime\n\nfrom django import forms\nfrom django.template.defaultfilters import slugify\n\nfrom comics.core.models import Comic\nfrom comics.sets.models import Set\n\nclass NewSetForm(forms.ModelForm):\n class Meta:\n model = Set\n fields = ('name',)\n\n def save(self, commit=True):\n set = super(NewSetForm, self).save(commit=False)\n set.name = slugify(set.name)\n set.last_modified = datetime.datetime.now()\n set.last_loaded = datetime.datetime.now()\n if commit:\n set.save()\n return set\n\nclass EditSetForm(forms.ModelForm):\n comics = forms.ModelMultipleChoiceField(\n Comic.objects.filter(active=True),\n required=False,\n widget=forms.CheckboxSelectMultiple)\n add_new_comics = forms.BooleanField(\n label='Automatically add new comics to the set', required=False)\n hide_empty_comics = forms.BooleanField(\n label='Hide comics without matching releases from view', required=False)\n\n class Meta:\n model = Set\n fields = ('comics', 'add_new_comics', 'hide_empty_comics')\n\n def save(self, commit=True):\n comics_set = super(EditSetForm, self).save(commit=False)\n comics_set.last_modified = datetime.datetime.now()\n if commit:\n comics_set.save()\n self.save_m2m()\n return comics_set\n"},"subject":{"kind":"string","value":"Exclude inactive comics from sets editing, effectively throwing them out of the set when saved"},"message":{"kind":"string","value":"Exclude inactive comics from sets editing, effectively throwing them out of the set when saved\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"datagutten/comics,klette/comics,jodal/comics,datagutten/comics,jodal/comics,klette/comics,jodal/comics,datagutten/comics,jodal/comics,klette/comics,datagutten/comics"},"ndiff":{"kind":"string","value":" import datetime\n \n from django import forms\n from django.template.defaultfilters import slugify\n \n from comics.core.models import Comic\n from comics.sets.models import Set\n \n class NewSetForm(forms.ModelForm):\n class Meta:\n model = Set\n fields = ('name',)\n \n def save(self, commit=True):\n set = super(NewSetForm, self).save(commit=False)\n set.name = slugify(set.name)\n set.last_modified = datetime.datetime.now()\n set.last_loaded = datetime.datetime.now()\n if commit:\n set.save()\n return set\n \n class EditSetForm(forms.ModelForm):\n comics = forms.ModelMultipleChoiceField(\n- Comic.objects.all(),\n+ Comic.objects.filter(active=True),\n required=False,\n widget=forms.CheckboxSelectMultiple)\n add_new_comics = forms.BooleanField(\n label='Automatically add new comics to the set', required=False)\n hide_empty_comics = forms.BooleanField(\n label='Hide comics without matching releases from view', required=False)\n \n class Meta:\n model = Set\n fields = ('comics', 'add_new_comics', 'hide_empty_comics')\n \n def save(self, commit=True):\n comics_set = super(EditSetForm, self).save(commit=False)\n comics_set.last_modified = datetime.datetime.now()\n if commit:\n comics_set.save()\n self.save_m2m()\n return comics_set\n "},"instruction":{"kind":"string","value":"Exclude inactive comics from sets editing, effectively throwing them out of the set when saved"},"content":{"kind":"string","value":"## Code Before:\nimport datetime\n\nfrom django import forms\nfrom django.template.defaultfilters import slugify\n\nfrom comics.core.models import Comic\nfrom comics.sets.models import Set\n\nclass NewSetForm(forms.ModelForm):\n class Meta:\n model = Set\n fields = ('name',)\n\n def save(self, commit=True):\n set = super(NewSetForm, self).save(commit=False)\n set.name = slugify(set.name)\n set.last_modified = datetime.datetime.now()\n set.last_loaded = datetime.datetime.now()\n if commit:\n set.save()\n return set\n\nclass EditSetForm(forms.ModelForm):\n comics = forms.ModelMultipleChoiceField(\n Comic.objects.all(),\n required=False,\n widget=forms.CheckboxSelectMultiple)\n add_new_comics = forms.BooleanField(\n label='Automatically add new comics to the set', required=False)\n hide_empty_comics = forms.BooleanField(\n label='Hide comics without matching releases from view', required=False)\n\n class Meta:\n model = Set\n fields = ('comics', 'add_new_comics', 'hide_empty_comics')\n\n def save(self, commit=True):\n comics_set = super(EditSetForm, self).save(commit=False)\n comics_set.last_modified = datetime.datetime.now()\n if commit:\n comics_set.save()\n self.save_m2m()\n return comics_set\n\n## Instruction:\nExclude inactive comics from sets editing, effectively throwing them out of the set when saved\n## Code After:\nimport datetime\n\nfrom django import forms\nfrom django.template.defaultfilters import slugify\n\nfrom comics.core.models import Comic\nfrom comics.sets.models import Set\n\nclass NewSetForm(forms.ModelForm):\n class Meta:\n model = Set\n fields = ('name',)\n\n def save(self, commit=True):\n set = super(NewSetForm, self).save(commit=False)\n set.name = slugify(set.name)\n set.last_modified = datetime.datetime.now()\n set.last_loaded = datetime.datetime.now()\n if commit:\n set.save()\n return set\n\nclass EditSetForm(forms.ModelForm):\n comics = forms.ModelMultipleChoiceField(\n Comic.objects.filter(active=True),\n required=False,\n widget=forms.CheckboxSelectMultiple)\n add_new_comics = forms.BooleanField(\n label='Automatically add new comics to the set', required=False)\n hide_empty_comics = forms.BooleanField(\n label='Hide comics without matching releases from view', required=False)\n\n class Meta:\n model = Set\n fields = ('comics', 'add_new_comics', 'hide_empty_comics')\n\n def save(self, commit=True):\n comics_set = super(EditSetForm, self).save(commit=False)\n comics_set.last_modified = datetime.datetime.now()\n if commit:\n comics_set.save()\n self.save_m2m()\n return comics_set\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n comics = forms.ModelMultipleChoiceField(\n Comic.objects.filter(active=True),\n required=False,\n\n\n ... "}}},{"rowIdx":19275,"cells":{"commit":{"kind":"string","value":"fe974197217eff350f1dc0bc5687c83066d6dd34"},"old_file":{"kind":"string","value":"kaggle_tools/features_engineering/dates_engineering.py"},"new_file":{"kind":"string","value":"kaggle_tools/features_engineering/dates_engineering.py"},"old_contents":{"kind":"string","value":"import pandas as pd\n\n\ndef date_features(input_df, datetime_column='tms_gmt'):\n \"\"\"\n Given a datetime column, extracts useful date information\n (minute, hour, dow...)\n \"\"\"\n df = input_df.copy()\n\n return (df.set_index(time_column)\n .assign(minute=lambda df: df.index.minute,\n hour=lambda df: df.index.hour,\n day=lambda df: df.index.day,\n dow=lambda df: df.index.dayofweek,\n month=lambda df: df.index.month,\n week=lambda df: df.index.week,\n woy=lambda df: df.index.weekofyear,\n year=lambda df: df.index.year))\n"},"new_contents":{"kind":"string","value":"import pandas as pd\nimport pytz\n\n\ndef date_features(input_df, datetime_column='tms_gmt'):\n \"\"\"\n Given a datetime column, extracts useful date information\n (minute, hour, dow...)\n \"\"\"\n df = input_df.copy()\n\n return (df.set_index(time_column)\n .assign(minute=lambda df: df.index.minute,\n hour=lambda df: df.index.hour,\n day=lambda df: df.index.day,\n dow=lambda df: df.index.dayofweek,\n month=lambda df: df.index.month,\n week=lambda df: df.index.week,\n woy=lambda df: df.index.weekofyear,\n year=lambda df: df.index.year))\n\n\ndef localize_datetime(input_df, timezone='Europe/Paris',\n datetime_column='tms_gmt'):\n \"\"\"\n Convert datetime column from UTC to another timezone.\n \"\"\"\n tmz = pytz.timezone(timezone)\n df = input_df.copy()\n return (df.set_index(datetime_column)\n .tz_localize(pytz.utc) #  UTC time\n .tz_convert(tmz)) # Timezone time\n"},"subject":{"kind":"string","value":"Add a datetime localization function"},"message":{"kind":"string","value":"Add a datetime localization function\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"yassineAlouini/kaggle-tools,yassineAlouini/kaggle-tools"},"ndiff":{"kind":"string","value":" import pandas as pd\n+ import pytz\n \n \n def date_features(input_df, datetime_column='tms_gmt'):\n \"\"\"\n Given a datetime column, extracts useful date information\n (minute, hour, dow...)\n \"\"\"\n df = input_df.copy()\n \n return (df.set_index(time_column)\n .assign(minute=lambda df: df.index.minute,\n hour=lambda df: df.index.hour,\n day=lambda df: df.index.day,\n dow=lambda df: df.index.dayofweek,\n month=lambda df: df.index.month,\n week=lambda df: df.index.week,\n woy=lambda df: df.index.weekofyear,\n year=lambda df: df.index.year))\n \n+ \n+ def localize_datetime(input_df, timezone='Europe/Paris',\n+ datetime_column='tms_gmt'):\n+ \"\"\"\n+ Convert datetime column from UTC to another timezone.\n+ \"\"\"\n+ tmz = pytz.timezone(timezone)\n+ df = input_df.copy()\n+ return (df.set_index(datetime_column)\n+ .tz_localize(pytz.utc) #  UTC time\n+ .tz_convert(tmz)) # Timezone time\n+ "},"instruction":{"kind":"string","value":"Add a datetime localization function"},"content":{"kind":"string","value":"## Code Before:\nimport pandas as pd\n\n\ndef date_features(input_df, datetime_column='tms_gmt'):\n \"\"\"\n Given a datetime column, extracts useful date information\n (minute, hour, dow...)\n \"\"\"\n df = input_df.copy()\n\n return (df.set_index(time_column)\n .assign(minute=lambda df: df.index.minute,\n hour=lambda df: df.index.hour,\n day=lambda df: df.index.day,\n dow=lambda df: df.index.dayofweek,\n month=lambda df: df.index.month,\n week=lambda df: df.index.week,\n woy=lambda df: df.index.weekofyear,\n year=lambda df: df.index.year))\n\n## Instruction:\nAdd a datetime localization function\n## Code After:\nimport pandas as pd\nimport pytz\n\n\ndef date_features(input_df, datetime_column='tms_gmt'):\n \"\"\"\n Given a datetime column, extracts useful date information\n (minute, hour, dow...)\n \"\"\"\n df = input_df.copy()\n\n return (df.set_index(time_column)\n .assign(minute=lambda df: df.index.minute,\n hour=lambda df: df.index.hour,\n day=lambda df: df.index.day,\n dow=lambda df: df.index.dayofweek,\n month=lambda df: df.index.month,\n week=lambda df: df.index.week,\n woy=lambda df: df.index.weekofyear,\n year=lambda df: df.index.year))\n\n\ndef localize_datetime(input_df, timezone='Europe/Paris',\n datetime_column='tms_gmt'):\n \"\"\"\n Convert datetime column from UTC to another timezone.\n \"\"\"\n tmz = pytz.timezone(timezone)\n df = input_df.copy()\n return (df.set_index(datetime_column)\n .tz_localize(pytz.utc) #  UTC time\n .tz_convert(tmz)) # Timezone time\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport pandas as pd\nimport pytz\n\n\n\n// ... modified code ... \n\n\n year=lambda df: df.index.year))\n\n\ndef localize_datetime(input_df, timezone='Europe/Paris',\n datetime_column='tms_gmt'):\n \"\"\"\n Convert datetime column from UTC to another timezone.\n \"\"\"\n tmz = pytz.timezone(timezone)\n df = input_df.copy()\n return (df.set_index(datetime_column)\n .tz_localize(pytz.utc) #  UTC time\n .tz_convert(tmz)) # Timezone time\n\n\n// ... rest of the code ..."}}},{"rowIdx":19276,"cells":{"commit":{"kind":"string","value":"013154d359570d591f9315b10c738616d9cddb49"},"old_file":{"kind":"string","value":"loqusdb/build_models/profile_variant.py"},"new_file":{"kind":"string","value":"loqusdb/build_models/profile_variant.py"},"old_contents":{"kind":"string","value":"import logging\nimport json\n\nfrom loqusdb.models import ProfileVariant\nfrom .variant import get_variant_id\n\nLOG = logging.getLogger(__name__)\n\ndef get_maf(variant):\n \"\"\"\n if ID CAF exists in INFO column, return the allele frequency for\n the alt allele. The CAF INFO tag from dbSNP is a Comma delimited list of\n allele frequencies based on 1000Genomes.\n\n Args:\n variant (cyvcf2.Variant)\n\n Returns:\n maf (float): Minor allele frequency \n\n \"\"\"\n\n if not variant.INFO.get('CAF'):\n return None\n maf_list = json.loads(variant.INFO.get('CAF'))\n return maf_list[1]\n\n\ndef build_profile_variant(variant):\n \"\"\"Returns a ProfileVariant object\n\n Args:\n variant (cyvcf2.Variant)\n\n Returns:\n variant (models.ProfileVariant)\n \"\"\"\n\n chrom = variant.CHROM\n if chrom.startswith(('chr', 'CHR', 'Chr')):\n chrom = chrom[3:]\n\n pos = int(variant.POS)\n\n variant_id = get_variant_id(variant)\n\n ref = variant.REF\n alt = variant.ALT[0]\n\n maf = get_maf(variant)\n\n profile_variant = ProfileVariant(\n variant_id=variant_id,\n chrom=chrom,\n pos=pos,\n ref=ref,\n alt=alt,\n maf=maf,\n id_column = variant.ID\n )\n\n return profile_variant\n"},"new_contents":{"kind":"string","value":"import logging\n\nfrom loqusdb.models import ProfileVariant\nfrom .variant import get_variant_id\n\nLOG = logging.getLogger(__name__)\n\ndef get_maf(variant):\n \"\"\"\n Gets the MAF (minor allele frequency) tag from the info field for the\n variant.\n\n Args:\n variant (cyvcf2.Variant)\n\n Returns:\n maf (float): Minor allele frequency\n\n \"\"\"\n\n return variant.INFO.get('MAF')\n\n\ndef build_profile_variant(variant):\n \"\"\"Returns a ProfileVariant object\n\n Args:\n variant (cyvcf2.Variant)\n\n Returns:\n variant (models.ProfileVariant)\n \"\"\"\n\n chrom = variant.CHROM\n if chrom.startswith(('chr', 'CHR', 'Chr')):\n chrom = chrom[3:]\n\n pos = int(variant.POS)\n\n variant_id = get_variant_id(variant)\n\n ref = variant.REF\n alt = variant.ALT[0]\n\n maf = get_maf(variant)\n\n profile_variant = ProfileVariant(\n variant_id=variant_id,\n chrom=chrom,\n pos=pos,\n ref=ref,\n alt=alt,\n maf=maf,\n id_column = variant.ID\n )\n\n return profile_variant\n"},"subject":{"kind":"string","value":"Change from CAF to MAF tag when looking for MAF in vcf file"},"message":{"kind":"string","value":"Change from CAF to MAF tag when looking for MAF in vcf file\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"moonso/loqusdb"},"ndiff":{"kind":"string","value":" import logging\n- import json\n \n from loqusdb.models import ProfileVariant\n from .variant import get_variant_id\n \n LOG = logging.getLogger(__name__)\n \n def get_maf(variant):\n \"\"\"\n+ Gets the MAF (minor allele frequency) tag from the info field for the\n+ variant.\n- if ID CAF exists in INFO column, return the allele frequency for\n- the alt allele. The CAF INFO tag from dbSNP is a Comma delimited list of\n- allele frequencies based on 1000Genomes.\n \n Args:\n variant (cyvcf2.Variant)\n \n Returns:\n- maf (float): Minor allele frequency \n+ maf (float): Minor allele frequency\n \n \"\"\"\n \n- if not variant.INFO.get('CAF'):\n+ return variant.INFO.get('MAF')\n- return None\n- maf_list = json.loads(variant.INFO.get('CAF'))\n- return maf_list[1]\n \n \n def build_profile_variant(variant):\n \"\"\"Returns a ProfileVariant object\n \n Args:\n variant (cyvcf2.Variant)\n \n Returns:\n variant (models.ProfileVariant)\n \"\"\"\n \n chrom = variant.CHROM\n if chrom.startswith(('chr', 'CHR', 'Chr')):\n chrom = chrom[3:]\n \n pos = int(variant.POS)\n \n variant_id = get_variant_id(variant)\n \n ref = variant.REF\n alt = variant.ALT[0]\n \n maf = get_maf(variant)\n \n profile_variant = ProfileVariant(\n variant_id=variant_id,\n chrom=chrom,\n pos=pos,\n ref=ref,\n alt=alt,\n maf=maf,\n id_column = variant.ID\n )\n \n return profile_variant\n "},"instruction":{"kind":"string","value":"Change from CAF to MAF tag when looking for MAF in vcf file"},"content":{"kind":"string","value":"## Code Before:\nimport logging\nimport json\n\nfrom loqusdb.models import ProfileVariant\nfrom .variant import get_variant_id\n\nLOG = logging.getLogger(__name__)\n\ndef get_maf(variant):\n \"\"\"\n if ID CAF exists in INFO column, return the allele frequency for\n the alt allele. The CAF INFO tag from dbSNP is a Comma delimited list of\n allele frequencies based on 1000Genomes.\n\n Args:\n variant (cyvcf2.Variant)\n\n Returns:\n maf (float): Minor allele frequency \n\n \"\"\"\n\n if not variant.INFO.get('CAF'):\n return None\n maf_list = json.loads(variant.INFO.get('CAF'))\n return maf_list[1]\n\n\ndef build_profile_variant(variant):\n \"\"\"Returns a ProfileVariant object\n\n Args:\n variant (cyvcf2.Variant)\n\n Returns:\n variant (models.ProfileVariant)\n \"\"\"\n\n chrom = variant.CHROM\n if chrom.startswith(('chr', 'CHR', 'Chr')):\n chrom = chrom[3:]\n\n pos = int(variant.POS)\n\n variant_id = get_variant_id(variant)\n\n ref = variant.REF\n alt = variant.ALT[0]\n\n maf = get_maf(variant)\n\n profile_variant = ProfileVariant(\n variant_id=variant_id,\n chrom=chrom,\n pos=pos,\n ref=ref,\n alt=alt,\n maf=maf,\n id_column = variant.ID\n )\n\n return profile_variant\n\n## Instruction:\nChange from CAF to MAF tag when looking for MAF in vcf file\n## Code After:\nimport logging\n\nfrom loqusdb.models import ProfileVariant\nfrom .variant import get_variant_id\n\nLOG = logging.getLogger(__name__)\n\ndef get_maf(variant):\n \"\"\"\n Gets the MAF (minor allele frequency) tag from the info field for the\n variant.\n\n Args:\n variant (cyvcf2.Variant)\n\n Returns:\n maf (float): Minor allele frequency\n\n \"\"\"\n\n return variant.INFO.get('MAF')\n\n\ndef build_profile_variant(variant):\n \"\"\"Returns a ProfileVariant object\n\n Args:\n variant (cyvcf2.Variant)\n\n Returns:\n variant (models.ProfileVariant)\n \"\"\"\n\n chrom = variant.CHROM\n if chrom.startswith(('chr', 'CHR', 'Chr')):\n chrom = chrom[3:]\n\n pos = int(variant.POS)\n\n variant_id = get_variant_id(variant)\n\n ref = variant.REF\n alt = variant.ALT[0]\n\n maf = get_maf(variant)\n\n profile_variant = ProfileVariant(\n variant_id=variant_id,\n chrom=chrom,\n pos=pos,\n ref=ref,\n alt=alt,\n maf=maf,\n id_column = variant.ID\n )\n\n return profile_variant\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport logging\n\n\n\n// ... modified code ... \n\n\n \"\"\"\n Gets the MAF (minor allele frequency) tag from the info field for the\n variant.\n\n\n\n ... \n\n\n Returns:\n maf (float): Minor allele frequency\n\n\n\n ... \n\n\n\n return variant.INFO.get('MAF')\n\n\n\n// ... rest of the code ..."}}},{"rowIdx":19277,"cells":{"commit":{"kind":"string","value":"5f1fa23dd8e0850a9f0e6a054ec6738e5a174ff7"},"old_file":{"kind":"string","value":"database/tables.py"},"new_file":{"kind":"string","value":"database/tables.py"},"old_contents":{"kind":"string","value":"\nfrom sqlalchemy import MetaData, Table, Column, Integer, String\n\nMETADATA = MetaData()\n\nTable(\"quote\", METADATA,\n Column(\"qid\", Integer, primary_key=True),\n Column(\"text\", String, nullable=False))\n"},"new_contents":{"kind":"string","value":"\nfrom sqlalchemy import MetaData, Table, Column, Integer, String\n\nMETADATA = MetaData()\n\nTable(\"quote\", METADATA,\n Column(\"qid\", Integer, primary_key=True),\n Column(\"text\", String, nullable=False))\n\nTable(\"moderator\", METADATA,\n Column(\"stream\", String, primary_key=True),\n Column(\"name\", String, primary_key=True))\n"},"subject":{"kind":"string","value":"Add a table for caching moderators"},"message":{"kind":"string","value":"Add a table for caching moderators\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"pyrige/pump19"},"ndiff":{"kind":"string","value":" \n from sqlalchemy import MetaData, Table, Column, Integer, String\n \n METADATA = MetaData()\n \n Table(\"quote\", METADATA,\n Column(\"qid\", Integer, primary_key=True),\n Column(\"text\", String, nullable=False))\n \n+ Table(\"moderator\", METADATA,\n+ Column(\"stream\", String, primary_key=True),\n+ Column(\"name\", String, primary_key=True))\n+ "},"instruction":{"kind":"string","value":"Add a table for caching moderators"},"content":{"kind":"string","value":"## Code Before:\n\nfrom sqlalchemy import MetaData, Table, Column, Integer, String\n\nMETADATA = MetaData()\n\nTable(\"quote\", METADATA,\n Column(\"qid\", Integer, primary_key=True),\n Column(\"text\", String, nullable=False))\n\n## Instruction:\nAdd a table for caching moderators\n## Code After:\n\nfrom sqlalchemy import MetaData, Table, Column, Integer, String\n\nMETADATA = MetaData()\n\nTable(\"quote\", METADATA,\n Column(\"qid\", Integer, primary_key=True),\n Column(\"text\", String, nullable=False))\n\nTable(\"moderator\", METADATA,\n Column(\"stream\", String, primary_key=True),\n Column(\"name\", String, primary_key=True))\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n Column(\"text\", String, nullable=False))\n\nTable(\"moderator\", METADATA,\n Column(\"stream\", String, primary_key=True),\n Column(\"name\", String, primary_key=True))\n\n\n ... "}}},{"rowIdx":19278,"cells":{"commit":{"kind":"string","value":"526b1028925a59957e805b29fc624dae318661ef"},"old_file":{"kind":"string","value":"finances/models.py"},"new_file":{"kind":"string","value":"finances/models.py"},"old_contents":{"kind":"string","value":"import os\nimport hashlib\nimport datetime\n\nimport peewee\n\n\ndatabase = peewee.Proxy()\n\n\nclass BaseModel(peewee.Model):\n class Meta:\n database = database\n\n\nclass User(BaseModel):\n id = peewee.IntegerField(primary_key=True)\n name = peewee.CharField(unique=True)\n password = peewee.CharField()\n salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace'))\n join_date = peewee.DateTimeField(default=datetime.datetime.now)\n\n class AuthError(Exception):\n pass\n\n class RegisterError(Exception):\n pass\n\n @classmethod\n def auth(cls, name, password):\n user = User.get(name=name)\n\n pass_with_salt = password + user.salt\n pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest()\n\n if not pass_hash == user.password:\n raise cls.AuthError('Wrong password!')\n\n return user\n\n @classmethod\n def register(cls, name, password):\n try:\n User.get(name=name)\n raise cls.RegisterError('User with that name does exist')\n except User.DoesNotExist:\n pass\n\n user = User(name=name)\n pass_with_salt = password + user.salt\n user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest()\n user.save()\n"},"new_contents":{"kind":"string","value":"import os\nimport hashlib\nimport datetime\n\nimport peewee\n\n\ndatabase = peewee.Proxy()\n\n\nclass BaseModel(peewee.Model):\n class Meta:\n database = database\n\n\nclass User(BaseModel):\n id = peewee.IntegerField(primary_key=True)\n name = peewee.CharField(unique=True)\n password = peewee.CharField()\n salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace'))\n join_date = peewee.DateTimeField(default=datetime.datetime.now)\n\n class AuthError(Exception):\n pass\n\n class RegisterError(Exception):\n pass\n\n @classmethod\n def auth(cls, name, password):\n user = User.get(name=name)\n\n pass_with_salt = password + user.salt\n pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest()\n\n if not pass_hash == user.password:\n raise cls.AuthError('Wrong password!')\n\n return user\n\n @classmethod\n def register(cls, name, password):\n try:\n User.get(name=name)\n raise cls.RegisterError('User with that name does exist')\n except User.DoesNotExist:\n pass\n\n user = User(name=name)\n pass_with_salt = password + user.salt\n user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest()\n user.save()\n\n def __repr__(self):\n return '' % self.username\n"},"subject":{"kind":"string","value":"Add __repr__ for User model"},"message":{"kind":"string","value":"Add __repr__ for User model\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"Afonasev/YourFinances"},"ndiff":{"kind":"string","value":" import os\n import hashlib\n import datetime\n \n import peewee\n \n \n database = peewee.Proxy()\n \n \n class BaseModel(peewee.Model):\n class Meta:\n database = database\n \n \n class User(BaseModel):\n id = peewee.IntegerField(primary_key=True)\n name = peewee.CharField(unique=True)\n password = peewee.CharField()\n salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace'))\n join_date = peewee.DateTimeField(default=datetime.datetime.now)\n \n class AuthError(Exception):\n pass\n \n class RegisterError(Exception):\n pass\n \n @classmethod\n def auth(cls, name, password):\n user = User.get(name=name)\n \n pass_with_salt = password + user.salt\n pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest()\n \n if not pass_hash == user.password:\n raise cls.AuthError('Wrong password!')\n \n return user\n \n @classmethod\n def register(cls, name, password):\n try:\n User.get(name=name)\n raise cls.RegisterError('User with that name does exist')\n except User.DoesNotExist:\n pass\n \n user = User(name=name)\n pass_with_salt = password + user.salt\n user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest()\n user.save()\n \n+ def __repr__(self):\n+ return '' % self.username\n+ "},"instruction":{"kind":"string","value":"Add __repr__ for User model"},"content":{"kind":"string","value":"## Code Before:\nimport os\nimport hashlib\nimport datetime\n\nimport peewee\n\n\ndatabase = peewee.Proxy()\n\n\nclass BaseModel(peewee.Model):\n class Meta:\n database = database\n\n\nclass User(BaseModel):\n id = peewee.IntegerField(primary_key=True)\n name = peewee.CharField(unique=True)\n password = peewee.CharField()\n salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace'))\n join_date = peewee.DateTimeField(default=datetime.datetime.now)\n\n class AuthError(Exception):\n pass\n\n class RegisterError(Exception):\n pass\n\n @classmethod\n def auth(cls, name, password):\n user = User.get(name=name)\n\n pass_with_salt = password + user.salt\n pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest()\n\n if not pass_hash == user.password:\n raise cls.AuthError('Wrong password!')\n\n return user\n\n @classmethod\n def register(cls, name, password):\n try:\n User.get(name=name)\n raise cls.RegisterError('User with that name does exist')\n except User.DoesNotExist:\n pass\n\n user = User(name=name)\n pass_with_salt = password + user.salt\n user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest()\n user.save()\n\n## Instruction:\nAdd __repr__ for User model\n## Code After:\nimport os\nimport hashlib\nimport datetime\n\nimport peewee\n\n\ndatabase = peewee.Proxy()\n\n\nclass BaseModel(peewee.Model):\n class Meta:\n database = database\n\n\nclass User(BaseModel):\n id = peewee.IntegerField(primary_key=True)\n name = peewee.CharField(unique=True)\n password = peewee.CharField()\n salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace'))\n join_date = peewee.DateTimeField(default=datetime.datetime.now)\n\n class AuthError(Exception):\n pass\n\n class RegisterError(Exception):\n pass\n\n @classmethod\n def auth(cls, name, password):\n user = User.get(name=name)\n\n pass_with_salt = password + user.salt\n pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest()\n\n if not pass_hash == user.password:\n raise cls.AuthError('Wrong password!')\n\n return user\n\n @classmethod\n def register(cls, name, password):\n try:\n User.get(name=name)\n raise cls.RegisterError('User with that name does exist')\n except User.DoesNotExist:\n pass\n\n user = User(name=name)\n pass_with_salt = password + user.salt\n user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest()\n user.save()\n\n def __repr__(self):\n return '' % self.username\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n user.save()\n\n def __repr__(self):\n return '' % self.username\n\n\n ... "}}},{"rowIdx":19279,"cells":{"commit":{"kind":"string","value":"d2b4ec50442a00df85ef525cc82aca971b72eb86"},"old_file":{"kind":"string","value":"erpnext/patches/v11_0/rename_field_max_days_allowed.py"},"new_file":{"kind":"string","value":"erpnext/patches/v11_0/rename_field_max_days_allowed.py"},"old_contents":{"kind":"string","value":"import frappe\nfrom frappe.model.utils.rename_field import rename_field\n\ndef execute():\n\tfrappe.reload_doc(\"hr\", \"doctype\", \"leave_type\")\n\tfrappe.db.sql_ddl(\"\"\"ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL\"\"\")\n\trename_field(\"Leave Type\", \"max_days_allowed\", \"max_continuous_days_allowed\") \n"},"new_contents":{"kind":"string","value":"import frappe\n\ndef execute():\n\tfrappe.db.sql(\"\"\"\n\t\tUPDATE `tabLeave Type`\n\t\tSET max_days_allowed = '0'\n\t\tWHERE trim(coalesce(max_days_allowed, '')) = ''\n\t\"\"\")\n\tfrappe.db.sql_ddl(\"\"\"ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL\"\"\")\n\n"},"subject":{"kind":"string","value":"Set null values to '0' before changing column type"},"message":{"kind":"string","value":"[fix] Set null values to '0' before changing column type\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext"},"ndiff":{"kind":"string","value":" import frappe\n- from frappe.model.utils.rename_field import rename_field\n \n def execute():\n- \tfrappe.reload_doc(\"hr\", \"doctype\", \"leave_type\")\n+ \tfrappe.db.sql(\"\"\"\n+ \t\tUPDATE `tabLeave Type`\n+ \t\tSET max_days_allowed = '0'\n+ \t\tWHERE trim(coalesce(max_days_allowed, '')) = ''\n+ \t\"\"\")\n \tfrappe.db.sql_ddl(\"\"\"ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL\"\"\")\n- \trename_field(\"Leave Type\", \"max_days_allowed\", \"max_continuous_days_allowed\") \n \n+ "},"instruction":{"kind":"string","value":"Set null values to '0' before changing column type"},"content":{"kind":"string","value":"## Code Before:\nimport frappe\nfrom frappe.model.utils.rename_field import rename_field\n\ndef execute():\n\tfrappe.reload_doc(\"hr\", \"doctype\", \"leave_type\")\n\tfrappe.db.sql_ddl(\"\"\"ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL\"\"\")\n\trename_field(\"Leave Type\", \"max_days_allowed\", \"max_continuous_days_allowed\") \n\n## Instruction:\nSet null values to '0' before changing column type\n## Code After:\nimport frappe\n\ndef execute():\n\tfrappe.db.sql(\"\"\"\n\t\tUPDATE `tabLeave Type`\n\t\tSET max_days_allowed = '0'\n\t\tWHERE trim(coalesce(max_days_allowed, '')) = ''\n\t\"\"\")\n\tfrappe.db.sql_ddl(\"\"\"ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL\"\"\")\n\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nimport frappe\n\n\n\n# ... modified code ... \n\n\ndef execute():\n\tfrappe.db.sql(\"\"\"\n\t\tUPDATE `tabLeave Type`\n\t\tSET max_days_allowed = '0'\n\t\tWHERE trim(coalesce(max_days_allowed, '')) = ''\n\t\"\"\")\n\tfrappe.db.sql_ddl(\"\"\"ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL\"\"\")\n\n\n\n# ... rest of the code ..."}}},{"rowIdx":19280,"cells":{"commit":{"kind":"string","value":"6be3a40010b7256cb5b8fadfe4ef40b6c5691a06"},"old_file":{"kind":"string","value":"jungle/session.py"},"new_file":{"kind":"string","value":"jungle/session.py"},"old_contents":{"kind":"string","value":"import boto3\n\n\ndef create_session(profile_name):\n if not profile_name:\n return boto3\n else:\n return boto3.Session(profile_name=profile_name)\n"},"new_contents":{"kind":"string","value":"import sys\n\nimport boto3\nimport botocore\nimport click\n\n\ndef create_session(profile_name):\n if profile_name is None:\n return boto3\n else:\n try:\n session = boto3.Session(profile_name=profile_name)\n return session\n except botocore.exceptions.ProfileNotFound as e:\n click.echo(\"Invalid profile name: {0}\".format(profile_name, e), err=True)\n sys.exit(2)\n"},"subject":{"kind":"string","value":"Add error message when wrong AWS Profile Name is given"},"message":{"kind":"string","value":"Add error message when wrong AWS Profile Name is given\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"achiku/jungle"},"ndiff":{"kind":"string","value":"+ import sys\n+ \n import boto3\n+ import botocore\n+ import click\n \n \n def create_session(profile_name):\n- if not profile_name:\n+ if profile_name is None:\n return boto3\n else:\n+ try:\n- return boto3.Session(profile_name=profile_name)\n+ session = boto3.Session(profile_name=profile_name)\n+ return session\n+ except botocore.exceptions.ProfileNotFound as e:\n+ click.echo(\"Invalid profile name: {0}\".format(profile_name, e), err=True)\n+ sys.exit(2)\n "},"instruction":{"kind":"string","value":"Add error message when wrong AWS Profile Name is given"},"content":{"kind":"string","value":"## Code Before:\nimport boto3\n\n\ndef create_session(profile_name):\n if not profile_name:\n return boto3\n else:\n return boto3.Session(profile_name=profile_name)\n\n## Instruction:\nAdd error message when wrong AWS Profile Name is given\n## Code After:\nimport sys\n\nimport boto3\nimport botocore\nimport click\n\n\ndef create_session(profile_name):\n if profile_name is None:\n return boto3\n else:\n try:\n session = boto3.Session(profile_name=profile_name)\n return session\n except botocore.exceptions.ProfileNotFound as e:\n click.echo(\"Invalid profile name: {0}\".format(profile_name, e), err=True)\n sys.exit(2)\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nimport sys\n\nimport boto3\nimport botocore\nimport click\n\n\n\n ... \n\n\ndef create_session(profile_name):\n if profile_name is None:\n return boto3\n\n\n ... \n\n\n else:\n try:\n session = boto3.Session(profile_name=profile_name)\n return session\n except botocore.exceptions.ProfileNotFound as e:\n click.echo(\"Invalid profile name: {0}\".format(profile_name, e), err=True)\n sys.exit(2)\n\n\n ... "}}},{"rowIdx":19281,"cells":{"commit":{"kind":"string","value":"dbe7bfdba6392cb2cc5c8d0e710682c2cb9c2bc5"},"old_file":{"kind":"string","value":"cellom2tif/filetypes.py"},"new_file":{"kind":"string","value":"cellom2tif/filetypes.py"},"old_contents":{"kind":"string","value":"def is_cellomics_image(fn):\n \"\"\"Determine whether a file is a Cellomics image.\n\n Parameters\n ----------\n fn : string\n The filename of the file in question.\n\n Returns\n -------\n is_cellom : bool\n True if the filename points to a Cellomics image.\n \"\"\"\n is_cellom = fn.endswith('.C01') or fn.endswith('.c01')\n return is_cellom\n\n\ndef is_cellomics_mask(fn):\n \"\"\"Determine whether a file is a Cellomics mask image.\n\n Parameters\n ----------\n fn : string\n The filename.\n\n Returns\n -------\n is_mask : bool\n True if the filename points to a Cellomics mask image.\n \"\"\"\n is_mask = fn.endswith('o1.C01') or fn.endswith('o1.c01')\n return is_mask\n\n\n"},"new_contents":{"kind":"string","value":"import os\n\n\ndef fn_has_ext(fn, ext, case_sensitive=False):\n \"\"\"\n Determine whether a file has a particular extension.\n\n Parameters\n ----------\n fn : string\n The filename of the query file.\n ext : string\n The extension being checked.\n case_sensitive : bool\n Whether or not to treat the extension as case sensitive.\n\n Returns\n -------\n file_has_ext : bool\n True if the filename has the specified extension.\n \"\"\"\n fn_ext = os.path.splitext(fn)[1][1:]\n if case_sensitive:\n file_has_ext = fn_ext == ext\n else:\n file_has_ext = fn_ext.lower() == ext.lower()\n return file_has_ext\n\n\ndef is_cellomics_image(fn):\n \"\"\"Determine whether a file is a Cellomics image.\n\n Parameters\n ----------\n fn : string\n The filename of the file in question.\n\n Returns\n -------\n is_cellom : bool\n True if the filename points to a Cellomics image.\n \"\"\"\n is_cellom = fn_has_ext(fn, 'C01') or fn_has_ext(fn, 'DIB')\n return is_cellom\n\n\ndef is_cellomics_mask(fn):\n \"\"\"Determine whether a file is a Cellomics mask image.\n\n Parameters\n ----------\n fn : string\n The filename.\n\n Returns\n -------\n is_mask : bool\n True if the filename points to a Cellomics mask image.\n \"\"\"\n is_mask = fn.endswith('o1.C01') or fn.endswith('o1.c01')\n return is_mask\n\n\n"},"subject":{"kind":"string","value":"Add DIB files to cellomics file filter"},"message":{"kind":"string","value":"Add DIB files to cellomics file filter\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"jni/cellom2tif"},"ndiff":{"kind":"string","value":"+ import os\n+ \n+ \n+ def fn_has_ext(fn, ext, case_sensitive=False):\n+ \"\"\"\n+ Determine whether a file has a particular extension.\n+ \n+ Parameters\n+ ----------\n+ fn : string\n+ The filename of the query file.\n+ ext : string\n+ The extension being checked.\n+ case_sensitive : bool\n+ Whether or not to treat the extension as case sensitive.\n+ \n+ Returns\n+ -------\n+ file_has_ext : bool\n+ True if the filename has the specified extension.\n+ \"\"\"\n+ fn_ext = os.path.splitext(fn)[1][1:]\n+ if case_sensitive:\n+ file_has_ext = fn_ext == ext\n+ else:\n+ file_has_ext = fn_ext.lower() == ext.lower()\n+ return file_has_ext\n+ \n+ \n def is_cellomics_image(fn):\n \"\"\"Determine whether a file is a Cellomics image.\n \n Parameters\n ----------\n fn : string\n The filename of the file in question.\n \n Returns\n -------\n is_cellom : bool\n True if the filename points to a Cellomics image.\n \"\"\"\n- is_cellom = fn.endswith('.C01') or fn.endswith('.c01')\n+ is_cellom = fn_has_ext(fn, 'C01') or fn_has_ext(fn, 'DIB')\n return is_cellom\n \n \n def is_cellomics_mask(fn):\n \"\"\"Determine whether a file is a Cellomics mask image.\n \n Parameters\n ----------\n fn : string\n The filename.\n \n Returns\n -------\n is_mask : bool\n True if the filename points to a Cellomics mask image.\n \"\"\"\n is_mask = fn.endswith('o1.C01') or fn.endswith('o1.c01')\n return is_mask\n \n \n "},"instruction":{"kind":"string","value":"Add DIB files to cellomics file filter"},"content":{"kind":"string","value":"## Code Before:\ndef is_cellomics_image(fn):\n \"\"\"Determine whether a file is a Cellomics image.\n\n Parameters\n ----------\n fn : string\n The filename of the file in question.\n\n Returns\n -------\n is_cellom : bool\n True if the filename points to a Cellomics image.\n \"\"\"\n is_cellom = fn.endswith('.C01') or fn.endswith('.c01')\n return is_cellom\n\n\ndef is_cellomics_mask(fn):\n \"\"\"Determine whether a file is a Cellomics mask image.\n\n Parameters\n ----------\n fn : string\n The filename.\n\n Returns\n -------\n is_mask : bool\n True if the filename points to a Cellomics mask image.\n \"\"\"\n is_mask = fn.endswith('o1.C01') or fn.endswith('o1.c01')\n return is_mask\n\n\n\n## Instruction:\nAdd DIB files to cellomics file filter\n## Code After:\nimport os\n\n\ndef fn_has_ext(fn, ext, case_sensitive=False):\n \"\"\"\n Determine whether a file has a particular extension.\n\n Parameters\n ----------\n fn : string\n The filename of the query file.\n ext : string\n The extension being checked.\n case_sensitive : bool\n Whether or not to treat the extension as case sensitive.\n\n Returns\n -------\n file_has_ext : bool\n True if the filename has the specified extension.\n \"\"\"\n fn_ext = os.path.splitext(fn)[1][1:]\n if case_sensitive:\n file_has_ext = fn_ext == ext\n else:\n file_has_ext = fn_ext.lower() == ext.lower()\n return file_has_ext\n\n\ndef is_cellomics_image(fn):\n \"\"\"Determine whether a file is a Cellomics image.\n\n Parameters\n ----------\n fn : string\n The filename of the file in question.\n\n Returns\n -------\n is_cellom : bool\n True if the filename points to a Cellomics image.\n \"\"\"\n is_cellom = fn_has_ext(fn, 'C01') or fn_has_ext(fn, 'DIB')\n return is_cellom\n\n\ndef is_cellomics_mask(fn):\n \"\"\"Determine whether a file is a Cellomics mask image.\n\n Parameters\n ----------\n fn : string\n The filename.\n\n Returns\n -------\n is_mask : bool\n True if the filename points to a Cellomics mask image.\n \"\"\"\n is_mask = fn.endswith('o1.C01') or fn.endswith('o1.c01')\n return is_mask\n\n\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nimport os\n\n\ndef fn_has_ext(fn, ext, case_sensitive=False):\n \"\"\"\n Determine whether a file has a particular extension.\n\n Parameters\n ----------\n fn : string\n The filename of the query file.\n ext : string\n The extension being checked.\n case_sensitive : bool\n Whether or not to treat the extension as case sensitive.\n\n Returns\n -------\n file_has_ext : bool\n True if the filename has the specified extension.\n \"\"\"\n fn_ext = os.path.splitext(fn)[1][1:]\n if case_sensitive:\n file_has_ext = fn_ext == ext\n else:\n file_has_ext = fn_ext.lower() == ext.lower()\n return file_has_ext\n\n\ndef is_cellomics_image(fn):\n\n\n ... \n\n\n \"\"\"\n is_cellom = fn_has_ext(fn, 'C01') or fn_has_ext(fn, 'DIB')\n return is_cellom\n\n\n ... "}}},{"rowIdx":19282,"cells":{"commit":{"kind":"string","value":"1ba3536e214e283f503db0a9bf0d1ac4aa64f771"},"old_file":{"kind":"string","value":"tcconfig/_tc_command_helper.py"},"new_file":{"kind":"string","value":"tcconfig/_tc_command_helper.py"},"old_contents":{"kind":"string","value":"\nfrom __future__ import absolute_import, unicode_literals\n\nimport errno\nimport sys\n\nimport subprocrunner as spr\n\nfrom ._common import find_bin_path\nfrom ._const import Tc, TcSubCommand\nfrom ._error import NetworkInterfaceNotFoundError\nfrom ._logger import logger\n\n\ndef check_tc_command_installation():\n try:\n spr.Which(\"tc\").verify()\n except spr.CommandNotFoundError as e:\n logger.error(\"{:s}: {}\".format(e.__class__.__name__, e))\n sys.exit(errno.ENOENT)\n\n\ndef get_tc_base_command(tc_subcommand):\n if tc_subcommand not in TcSubCommand:\n raise ValueError(\"the argument must be a TcSubCommand value\")\n\n return \"{:s} {:s}\".format(find_bin_path(\"tc\"), tc_subcommand.value)\n\n\ndef run_tc_show(subcommand, device):\n from ._network import verify_network_interface\n\n verify_network_interface(device)\n\n runner = spr.SubprocessRunner(\n \"{:s} show dev {:s}\".format(get_tc_base_command(subcommand), device))\n if runner.run() != 0 and runner.stderr.find(\"Cannot find device\") != -1:\n # reach here if the device does not exist at the system and netiface\n # not installed.\n raise NetworkInterfaceNotFoundError(device=device)\n\n return runner.stdout\n"},"new_contents":{"kind":"string","value":"\nfrom __future__ import absolute_import, unicode_literals\n\nimport errno\nimport sys\n\nimport subprocrunner as spr\n\nfrom ._common import find_bin_path\nfrom ._const import Tc, TcSubCommand\nfrom ._error import NetworkInterfaceNotFoundError\nfrom ._logger import logger\n\n\ndef check_tc_command_installation():\n if find_bin_path(\"tc\"):\n return\n\n logger.error(\"command not found: tc\")\n sys.exit(errno.ENOENT)\n\n\ndef get_tc_base_command(tc_subcommand):\n if tc_subcommand not in TcSubCommand:\n raise ValueError(\"the argument must be a TcSubCommand value\")\n\n return \"{:s} {:s}\".format(find_bin_path(\"tc\"), tc_subcommand.value)\n\n\ndef run_tc_show(subcommand, device):\n from ._network import verify_network_interface\n\n verify_network_interface(device)\n\n runner = spr.SubprocessRunner(\n \"{:s} show dev {:s}\".format(get_tc_base_command(subcommand), device))\n if runner.run() != 0 and runner.stderr.find(\"Cannot find device\") != -1:\n # reach here if the device does not exist at the system and netiface\n # not installed.\n raise NetworkInterfaceNotFoundError(device=device)\n\n return runner.stdout\n"},"subject":{"kind":"string","value":"Change command installation check process"},"message":{"kind":"string","value":"Change command installation check process\n\nTo properly check even if the user is not root.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"thombashi/tcconfig,thombashi/tcconfig"},"ndiff":{"kind":"string","value":" \n from __future__ import absolute_import, unicode_literals\n \n import errno\n import sys\n \n import subprocrunner as spr\n \n from ._common import find_bin_path\n from ._const import Tc, TcSubCommand\n from ._error import NetworkInterfaceNotFoundError\n from ._logger import logger\n \n \n def check_tc_command_installation():\n- try:\n- spr.Which(\"tc\").verify()\n- except spr.CommandNotFoundError as e:\n- logger.error(\"{:s}: {}\".format(e.__class__.__name__, e))\n+ if find_bin_path(\"tc\"):\n+ return\n+ \n+ logger.error(\"command not found: tc\")\n- sys.exit(errno.ENOENT)\n+ sys.exit(errno.ENOENT)\n \n \n def get_tc_base_command(tc_subcommand):\n if tc_subcommand not in TcSubCommand:\n raise ValueError(\"the argument must be a TcSubCommand value\")\n \n return \"{:s} {:s}\".format(find_bin_path(\"tc\"), tc_subcommand.value)\n \n \n def run_tc_show(subcommand, device):\n from ._network import verify_network_interface\n \n verify_network_interface(device)\n \n runner = spr.SubprocessRunner(\n \"{:s} show dev {:s}\".format(get_tc_base_command(subcommand), device))\n if runner.run() != 0 and runner.stderr.find(\"Cannot find device\") != -1:\n # reach here if the device does not exist at the system and netiface\n # not installed.\n raise NetworkInterfaceNotFoundError(device=device)\n \n return runner.stdout\n "},"instruction":{"kind":"string","value":"Change command installation check process"},"content":{"kind":"string","value":"## Code Before:\n\nfrom __future__ import absolute_import, unicode_literals\n\nimport errno\nimport sys\n\nimport subprocrunner as spr\n\nfrom ._common import find_bin_path\nfrom ._const import Tc, TcSubCommand\nfrom ._error import NetworkInterfaceNotFoundError\nfrom ._logger import logger\n\n\ndef check_tc_command_installation():\n try:\n spr.Which(\"tc\").verify()\n except spr.CommandNotFoundError as e:\n logger.error(\"{:s}: {}\".format(e.__class__.__name__, e))\n sys.exit(errno.ENOENT)\n\n\ndef get_tc_base_command(tc_subcommand):\n if tc_subcommand not in TcSubCommand:\n raise ValueError(\"the argument must be a TcSubCommand value\")\n\n return \"{:s} {:s}\".format(find_bin_path(\"tc\"), tc_subcommand.value)\n\n\ndef run_tc_show(subcommand, device):\n from ._network import verify_network_interface\n\n verify_network_interface(device)\n\n runner = spr.SubprocessRunner(\n \"{:s} show dev {:s}\".format(get_tc_base_command(subcommand), device))\n if runner.run() != 0 and runner.stderr.find(\"Cannot find device\") != -1:\n # reach here if the device does not exist at the system and netiface\n # not installed.\n raise NetworkInterfaceNotFoundError(device=device)\n\n return runner.stdout\n\n## Instruction:\nChange command installation check process\n## Code After:\n\nfrom __future__ import absolute_import, unicode_literals\n\nimport errno\nimport sys\n\nimport subprocrunner as spr\n\nfrom ._common import find_bin_path\nfrom ._const import Tc, TcSubCommand\nfrom ._error import NetworkInterfaceNotFoundError\nfrom ._logger import logger\n\n\ndef check_tc_command_installation():\n if find_bin_path(\"tc\"):\n return\n\n logger.error(\"command not found: tc\")\n sys.exit(errno.ENOENT)\n\n\ndef get_tc_base_command(tc_subcommand):\n if tc_subcommand not in TcSubCommand:\n raise ValueError(\"the argument must be a TcSubCommand value\")\n\n return \"{:s} {:s}\".format(find_bin_path(\"tc\"), tc_subcommand.value)\n\n\ndef run_tc_show(subcommand, device):\n from ._network import verify_network_interface\n\n verify_network_interface(device)\n\n runner = spr.SubprocessRunner(\n \"{:s} show dev {:s}\".format(get_tc_base_command(subcommand), device))\n if runner.run() != 0 and runner.stderr.find(\"Cannot find device\") != -1:\n # reach here if the device does not exist at the system and netiface\n # not installed.\n raise NetworkInterfaceNotFoundError(device=device)\n\n return runner.stdout\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\ndef check_tc_command_installation():\n if find_bin_path(\"tc\"):\n return\n\n logger.error(\"command not found: tc\")\n sys.exit(errno.ENOENT)\n\n\n\n ... "}}},{"rowIdx":19283,"cells":{"commit":{"kind":"string","value":"00ef4db967b00c5cef5c72d5266327bbd9db5909"},"old_file":{"kind":"string","value":"ibmcnx/test/loadFunction.py"},"new_file":{"kind":"string","value":"ibmcnx/test/loadFunction.py"},"old_contents":{"kind":"string","value":"\nimport sys\nfrom java.lang import String\nfrom java.util import HashSet\nfrom java.util import HashMap\nimport java\nimport lotusConnectionsCommonAdmin\n\nglobdict = globals()\n\ndef loadFilesService():\n global globdict\n exec open(\"filesAdmin.py\").read()\n"},"new_contents":{"kind":"string","value":"\nimport sys\nfrom java.lang import String\nfrom java.util import HashSet\nfrom java.util import HashMap\nimport java\nimport lotusConnectionsCommonAdmin\n\nglobdict = globals()\nlocdict = locals()\n\ndef loadFilesService():\n global globdict\n global locdict\n execfile(\"filesAdmin.py\",globdict,locdict)\n"},"subject":{"kind":"string","value":"Customize scripts to work with menu"},"message":{"kind":"string","value":"Customize scripts to work with menu\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"stoeps13/ibmcnx2,stoeps13/ibmcnx2"},"ndiff":{"kind":"string","value":" \n import sys\n from java.lang import String\n from java.util import HashSet\n from java.util import HashMap\n import java\n import lotusConnectionsCommonAdmin\n \n globdict = globals()\n+ locdict = locals()\n \n def loadFilesService():\n global globdict\n- exec open(\"filesAdmin.py\").read()\n+ global locdict\n+ execfile(\"filesAdmin.py\",globdict,locdict)\n "},"instruction":{"kind":"string","value":"Customize scripts to work with menu"},"content":{"kind":"string","value":"## Code Before:\n\nimport sys\nfrom java.lang import String\nfrom java.util import HashSet\nfrom java.util import HashMap\nimport java\nimport lotusConnectionsCommonAdmin\n\nglobdict = globals()\n\ndef loadFilesService():\n global globdict\n exec open(\"filesAdmin.py\").read()\n\n## Instruction:\nCustomize scripts to work with menu\n## Code After:\n\nimport sys\nfrom java.lang import String\nfrom java.util import HashSet\nfrom java.util import HashMap\nimport java\nimport lotusConnectionsCommonAdmin\n\nglobdict = globals()\nlocdict = locals()\n\ndef loadFilesService():\n global globdict\n global locdict\n execfile(\"filesAdmin.py\",globdict,locdict)\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nglobdict = globals()\nlocdict = locals()\n\n\n\n ... \n\n\n global globdict\n global locdict\n execfile(\"filesAdmin.py\",globdict,locdict)\n\n\n ... "}}},{"rowIdx":19284,"cells":{"commit":{"kind":"string","value":"2a57e5c17115e9c89936e6667985af1a47bf3247"},"old_file":{"kind":"string","value":"raiden/utils/typing.py"},"new_file":{"kind":"string","value":"raiden/utils/typing.py"},"old_contents":{"kind":"string","value":"from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import\nfrom typing import NewType\n\nT_Address = bytes\nAddress = NewType('Address', bytes)\n\nT_BlockExpiration = int\nBlockExpiration = NewType('BlockExpiration', int)\n\nT_BlockNumber = int\nBlockNumber = NewType('BlockNumber', int)\n\nT_BlockTimeout = int\nBlockTimeout = NewType('BlockNumber', int)\n\nT_ChannelID = T_Address\nChannelID = NewType('ChannelID', Address)\n\nT_Keccak256 = bytes\nKeccak256 = NewType('Keccak256', bytes)\n\nT_Secret = bytes\nSecret = NewType('Secret', bytes)\n\nT_Signature = bytes\nSignature = NewType('Signature', bytes)\n\nT_TokenAmount = int\nTokenAmount = NewType('TokenAmount', int)\n"},"new_contents":{"kind":"string","value":"from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import\nfrom typing import NewType\n\nT_Address = bytes\nAddress = NewType('Address', bytes)\n\nT_BlockExpiration = int\nBlockExpiration = NewType('BlockExpiration', int)\n\nT_BlockNumber = int\nBlockNumber = NewType('BlockNumber', int)\n\nT_BlockTimeout = int\nBlockTimeout = NewType('BlockNumber', int)\n\nT_ChannelID = bytes\nChannelID = NewType('ChannelID', bytes)\n\nT_Keccak256 = bytes\nKeccak256 = NewType('Keccak256', bytes)\n\nT_Secret = bytes\nSecret = NewType('Secret', bytes)\n\nT_Signature = bytes\nSignature = NewType('Signature', bytes)\n\nT_TokenAmount = int\nTokenAmount = NewType('TokenAmount', int)\n"},"subject":{"kind":"string","value":"Fix an oversight in new type definitions"},"message":{"kind":"string","value":"Fix an oversight in new type definitions\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"hackaugusto/raiden,hackaugusto/raiden"},"ndiff":{"kind":"string","value":" from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import\n from typing import NewType\n \n T_Address = bytes\n Address = NewType('Address', bytes)\n \n T_BlockExpiration = int\n BlockExpiration = NewType('BlockExpiration', int)\n \n T_BlockNumber = int\n BlockNumber = NewType('BlockNumber', int)\n \n T_BlockTimeout = int\n BlockTimeout = NewType('BlockNumber', int)\n \n- T_ChannelID = T_Address\n+ T_ChannelID = bytes\n- ChannelID = NewType('ChannelID', Address)\n+ ChannelID = NewType('ChannelID', bytes)\n \n T_Keccak256 = bytes\n Keccak256 = NewType('Keccak256', bytes)\n \n T_Secret = bytes\n Secret = NewType('Secret', bytes)\n \n T_Signature = bytes\n Signature = NewType('Signature', bytes)\n \n T_TokenAmount = int\n TokenAmount = NewType('TokenAmount', int)\n "},"instruction":{"kind":"string","value":"Fix an oversight in new type definitions"},"content":{"kind":"string","value":"## Code Before:\nfrom typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import\nfrom typing import NewType\n\nT_Address = bytes\nAddress = NewType('Address', bytes)\n\nT_BlockExpiration = int\nBlockExpiration = NewType('BlockExpiration', int)\n\nT_BlockNumber = int\nBlockNumber = NewType('BlockNumber', int)\n\nT_BlockTimeout = int\nBlockTimeout = NewType('BlockNumber', int)\n\nT_ChannelID = T_Address\nChannelID = NewType('ChannelID', Address)\n\nT_Keccak256 = bytes\nKeccak256 = NewType('Keccak256', bytes)\n\nT_Secret = bytes\nSecret = NewType('Secret', bytes)\n\nT_Signature = bytes\nSignature = NewType('Signature', bytes)\n\nT_TokenAmount = int\nTokenAmount = NewType('TokenAmount', int)\n\n## Instruction:\nFix an oversight in new type definitions\n## Code After:\nfrom typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import\nfrom typing import NewType\n\nT_Address = bytes\nAddress = NewType('Address', bytes)\n\nT_BlockExpiration = int\nBlockExpiration = NewType('BlockExpiration', int)\n\nT_BlockNumber = int\nBlockNumber = NewType('BlockNumber', int)\n\nT_BlockTimeout = int\nBlockTimeout = NewType('BlockNumber', int)\n\nT_ChannelID = bytes\nChannelID = NewType('ChannelID', bytes)\n\nT_Keccak256 = bytes\nKeccak256 = NewType('Keccak256', bytes)\n\nT_Secret = bytes\nSecret = NewType('Secret', bytes)\n\nT_Signature = bytes\nSignature = NewType('Signature', bytes)\n\nT_TokenAmount = int\nTokenAmount = NewType('TokenAmount', int)\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\nT_ChannelID = bytes\nChannelID = NewType('ChannelID', bytes)\n\n\n\n# ... rest of the code ..."}}},{"rowIdx":19285,"cells":{"commit":{"kind":"string","value":"e5f662d9cebe4133705eca74a300c325d432ad04"},"old_file":{"kind":"string","value":"anvil/components/cinder_client.py"},"new_file":{"kind":"string","value":"anvil/components/cinder_client.py"},"old_contents":{"kind":"string","value":"\nfrom anvil import components as comp\n\n\nclass CinderClientUninstaller(comp.PythonUninstallComponent):\n def __init__(self, *args, **kargs):\n comp.PythonUninstallComponent.__init__(self, *args, **kargs)\n\n\nclass CinderClientInstaller(comp.PythonInstallComponent):\n def __init__(self, *args, **kargs):\n comp.PythonInstallComponent.__init__(self, *args, **kargs)\n\n def _filter_pip_requires_line(self, line):\n if line.lower().find('keystoneclient') != -1:\n return None\n if line.lower().find('novaclient') != -1:\n return None\n if line.lower().find('glanceclient') != -1:\n return None\n return line\n\n\nclass CinderClientRuntime(comp.EmptyRuntime):\n def __init__(self, *args, **kargs):\n comp.EmptyRuntime.__init__(self, *args, **kargs)\n"},"new_contents":{"kind":"string","value":"\nfrom anvil import components as comp\n\n\nclass CinderClientUninstaller(comp.PythonUninstallComponent):\n def __init__(self, *args, **kargs):\n comp.PythonUninstallComponent.__init__(self, *args, **kargs)\n\n\nclass CinderClientInstaller(comp.PythonInstallComponent):\n def __init__(self, *args, **kargs):\n comp.PythonInstallComponent.__init__(self, *args, **kargs)\n\n\nclass CinderClientRuntime(comp.EmptyRuntime):\n def __init__(self, *args, **kargs):\n comp.EmptyRuntime.__init__(self, *args, **kargs)\n"},"subject":{"kind":"string","value":"Remove destruction of pips/test requires entries that don't exist."},"message":{"kind":"string","value":"Remove destruction of pips/test requires entries that don't exist.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"stackforge/anvil,stackforge/anvil,mc2014/anvil,mc2014/anvil"},"ndiff":{"kind":"string","value":" \n from anvil import components as comp\n \n \n class CinderClientUninstaller(comp.PythonUninstallComponent):\n def __init__(self, *args, **kargs):\n comp.PythonUninstallComponent.__init__(self, *args, **kargs)\n \n \n class CinderClientInstaller(comp.PythonInstallComponent):\n def __init__(self, *args, **kargs):\n comp.PythonInstallComponent.__init__(self, *args, **kargs)\n \n- def _filter_pip_requires_line(self, line):\n- if line.lower().find('keystoneclient') != -1:\n- return None\n- if line.lower().find('novaclient') != -1:\n- return None\n- if line.lower().find('glanceclient') != -1:\n- return None\n- return line\n- \n \n class CinderClientRuntime(comp.EmptyRuntime):\n def __init__(self, *args, **kargs):\n comp.EmptyRuntime.__init__(self, *args, **kargs)\n "},"instruction":{"kind":"string","value":"Remove destruction of pips/test requires entries that don't exist."},"content":{"kind":"string","value":"## Code Before:\n\nfrom anvil import components as comp\n\n\nclass CinderClientUninstaller(comp.PythonUninstallComponent):\n def __init__(self, *args, **kargs):\n comp.PythonUninstallComponent.__init__(self, *args, **kargs)\n\n\nclass CinderClientInstaller(comp.PythonInstallComponent):\n def __init__(self, *args, **kargs):\n comp.PythonInstallComponent.__init__(self, *args, **kargs)\n\n def _filter_pip_requires_line(self, line):\n if line.lower().find('keystoneclient') != -1:\n return None\n if line.lower().find('novaclient') != -1:\n return None\n if line.lower().find('glanceclient') != -1:\n return None\n return line\n\n\nclass CinderClientRuntime(comp.EmptyRuntime):\n def __init__(self, *args, **kargs):\n comp.EmptyRuntime.__init__(self, *args, **kargs)\n\n## Instruction:\nRemove destruction of pips/test requires entries that don't exist.\n## Code After:\n\nfrom anvil import components as comp\n\n\nclass CinderClientUninstaller(comp.PythonUninstallComponent):\n def __init__(self, *args, **kargs):\n comp.PythonUninstallComponent.__init__(self, *args, **kargs)\n\n\nclass CinderClientInstaller(comp.PythonInstallComponent):\n def __init__(self, *args, **kargs):\n comp.PythonInstallComponent.__init__(self, *args, **kargs)\n\n\nclass CinderClientRuntime(comp.EmptyRuntime):\n def __init__(self, *args, **kargs):\n comp.EmptyRuntime.__init__(self, *args, **kargs)\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n\n\n\n\n ... "}}},{"rowIdx":19286,"cells":{"commit":{"kind":"string","value":"69e6db7a4a28ff1f50bd4f12f550a2b65f05eb38"},"old_file":{"kind":"string","value":"utils/dusk/__init__.py"},"new_file":{"kind":"string","value":"utils/dusk/__init__.py"},"old_contents":{"kind":"string","value":"\nfrom .context import Context # NOQA\nfrom .command import * # NOQA\nfrom .command_holder import CommandHolder # NOQA\nfrom .constants import * # NOQA\n"},"new_contents":{"kind":"string","value":"\nfrom .context import Context # NOQA\nfrom .command import * # NOQA\nfrom .command_holder import CommandHolder # NOQA\nfrom .constants import * # NOQA\n\n__version__ = \"1.0.0\""},"subject":{"kind":"string","value":"Remove obsolete TODO and add version"},"message":{"kind":"string","value":"Remove obsolete TODO and add version\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"awau/Amethyst,HexadecimalPython/Xeili"},"ndiff":{"kind":"string","value":" \n from .context import Context # NOQA\n from .command import * # NOQA\n from .command_holder import CommandHolder # NOQA\n from .constants import * # NOQA\n \n+ __version__ = \"1.0.0\""},"instruction":{"kind":"string","value":"Remove obsolete TODO and add version"},"content":{"kind":"string","value":"## Code Before:\n\nfrom .context import Context # NOQA\nfrom .command import * # NOQA\nfrom .command_holder import CommandHolder # NOQA\nfrom .constants import * # NOQA\n\n## Instruction:\nRemove obsolete TODO and add version\n## Code After:\n\nfrom .context import Context # NOQA\nfrom .command import * # NOQA\nfrom .command_holder import CommandHolder # NOQA\nfrom .constants import * # NOQA\n\n__version__ = \"1.0.0\""},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nfrom .constants import * # NOQA\n\n__version__ = \"1.0.0\"\n\n\n// ... rest of the code ..."}}},{"rowIdx":19287,"cells":{"commit":{"kind":"string","value":"fd4dc4bdd32283b67577630c38624d3df705efd3"},"old_file":{"kind":"string","value":"mathphys/functions.py"},"new_file":{"kind":"string","value":"mathphys/functions.py"},"old_contents":{"kind":"string","value":"\"\"\"Useful functions.\"\"\"\n\nimport numpy as _np\n\n\ndef polyfit(x, y, monomials, algorithm='lstsq'):\n \"\"\"Implement Custom polyfit.\"\"\"\n X = _np.zeros((len(x), len(monomials)))\n N = _np.zeros((len(x), len(monomials)))\n for i in range(X.shape[1]):\n X[:, i] = x\n N[:, i] = monomials[i]\n XN = X ** N\n y_ = _np.zeros((len(y), 1))\n y_[:, 0] = y\n XNt = _np.transpose(XN)\n b = _np.dot(XNt, y_)\n X = _np.dot(XNt, XN)\n\n if algorithm is 'lstsq':\n r = _np.linalg.lstsq(X, b)\n coeffs = r[0][:, 0]\n else:\n r = _np.linalg.solve(X, b)\n coeffs = r[:, 0]\n\n # finds maximum diff and its base value\n y_fitted = _np.dot(XN, coeffs)\n y_diff = abs(y_fitted - y_[:, 0])\n max_error = max(y_diff)\n idx = [i for i, value in enumerate(y_diff) if value == max_error]\n base_value = y_[idx[0], 0]\n\n return (coeffs, (max_error, base_value))\n"},"new_contents":{"kind":"string","value":"\"\"\"Useful functions.\"\"\"\n\nimport numpy as _np\n\n\ndef polyfit(x, y, monomials):\n \"\"\"Implement Custom polyfit.\"\"\"\n coef = _np.polynomial.polynomial.polyfit(x, y, deg=monomials)\n\n # finds maximum diff and its base value\n y_fitted = _np.polynomial.polynomial.polyval(x, coef)\n y_diff = abs(y_fitted - y)\n idx = _np.argmax(y_diff)\n\n coeffs = coef[monomials]\n return (coeffs, (y_diff[idx], y[idx]))\n"},"subject":{"kind":"string","value":"Change implementaton of polyfit method."},"message":{"kind":"string","value":"API: Change implementaton of polyfit method.\n\nUse new numpy.polynomial.polynomial.polyfit instead of implementing leastsquares by hand. This method is supposed to be more robust to numerical errors.\nWith this change, the keyword argument algorithm was removed.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"lnls-fac/mathphys"},"ndiff":{"kind":"string","value":" \"\"\"Useful functions.\"\"\"\n \n import numpy as _np\n \n \n- def polyfit(x, y, monomials, algorithm='lstsq'):\n+ def polyfit(x, y, monomials):\n \"\"\"Implement Custom polyfit.\"\"\"\n+ coef = _np.polynomial.polynomial.polyfit(x, y, deg=monomials)\n- X = _np.zeros((len(x), len(monomials)))\n- N = _np.zeros((len(x), len(monomials)))\n- for i in range(X.shape[1]):\n- X[:, i] = x\n- N[:, i] = monomials[i]\n- XN = X ** N\n- y_ = _np.zeros((len(y), 1))\n- y_[:, 0] = y\n- XNt = _np.transpose(XN)\n- b = _np.dot(XNt, y_)\n- X = _np.dot(XNt, XN)\n- \n- if algorithm is 'lstsq':\n- r = _np.linalg.lstsq(X, b)\n- coeffs = r[0][:, 0]\n- else:\n- r = _np.linalg.solve(X, b)\n- coeffs = r[:, 0]\n \n # finds maximum diff and its base value\n- y_fitted = _np.dot(XN, coeffs)\n+ y_fitted = _np.polynomial.polynomial.polyval(x, coef)\n- y_diff = abs(y_fitted - y_[:, 0])\n+ y_diff = abs(y_fitted - y)\n+ idx = _np.argmax(y_diff)\n- max_error = max(y_diff)\n- idx = [i for i, value in enumerate(y_diff) if value == max_error]\n- base_value = y_[idx[0], 0]\n \n- return (coeffs, (max_error, base_value))\n+ coeffs = coef[monomials]\n+ return (coeffs, (y_diff[idx], y[idx]))\n "},"instruction":{"kind":"string","value":"Change implementaton of polyfit method."},"content":{"kind":"string","value":"## Code Before:\n\"\"\"Useful functions.\"\"\"\n\nimport numpy as _np\n\n\ndef polyfit(x, y, monomials, algorithm='lstsq'):\n \"\"\"Implement Custom polyfit.\"\"\"\n X = _np.zeros((len(x), len(monomials)))\n N = _np.zeros((len(x), len(monomials)))\n for i in range(X.shape[1]):\n X[:, i] = x\n N[:, i] = monomials[i]\n XN = X ** N\n y_ = _np.zeros((len(y), 1))\n y_[:, 0] = y\n XNt = _np.transpose(XN)\n b = _np.dot(XNt, y_)\n X = _np.dot(XNt, XN)\n\n if algorithm is 'lstsq':\n r = _np.linalg.lstsq(X, b)\n coeffs = r[0][:, 0]\n else:\n r = _np.linalg.solve(X, b)\n coeffs = r[:, 0]\n\n # finds maximum diff and its base value\n y_fitted = _np.dot(XN, coeffs)\n y_diff = abs(y_fitted - y_[:, 0])\n max_error = max(y_diff)\n idx = [i for i, value in enumerate(y_diff) if value == max_error]\n base_value = y_[idx[0], 0]\n\n return (coeffs, (max_error, base_value))\n\n## Instruction:\nChange implementaton of polyfit method.\n## Code After:\n\"\"\"Useful functions.\"\"\"\n\nimport numpy as _np\n\n\ndef polyfit(x, y, monomials):\n \"\"\"Implement Custom polyfit.\"\"\"\n coef = _np.polynomial.polynomial.polyfit(x, y, deg=monomials)\n\n # finds maximum diff and its base value\n y_fitted = _np.polynomial.polynomial.polyval(x, coef)\n y_diff = abs(y_fitted - y)\n idx = _np.argmax(y_diff)\n\n coeffs = coef[monomials]\n return (coeffs, (y_diff[idx], y[idx]))\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n\ndef polyfit(x, y, monomials):\n \"\"\"Implement Custom polyfit.\"\"\"\n coef = _np.polynomial.polynomial.polyfit(x, y, deg=monomials)\n\n\n\n ... \n\n\n # finds maximum diff and its base value\n y_fitted = _np.polynomial.polynomial.polyval(x, coef)\n y_diff = abs(y_fitted - y)\n idx = _np.argmax(y_diff)\n\n coeffs = coef[monomials]\n return (coeffs, (y_diff[idx], y[idx]))\n\n\n ... "}}},{"rowIdx":19288,"cells":{"commit":{"kind":"string","value":"8a44705413d3a01e897d4a922e7c1383b60a2927"},"old_file":{"kind":"string","value":"plugins/VersionUpgrade/VersionUpgrade21to22/__init__.py"},"new_file":{"kind":"string","value":"plugins/VersionUpgrade/VersionUpgrade21to22/__init__.py"},"old_contents":{"kind":"string","value":"\nfrom . import VersionUpgrade21to22\n\nfrom UM.i18n import i18nCatalog\ncatalog = i18nCatalog(\"cura\")\n\ndef getMetaData():\n return {\n \"plugin\": {\n \"name\": catalog.i18nc(\"@label\", \"Version Upgrade 2.1 to 2.2\"),\n \"author\": \"Ultimaker\",\n \"version\": \"1.0\",\n \"description\": catalog.i18nc(\"@info:whatsthis\", \"Upgrades configurations from Cura 2.1 to Cura 2.2.\"),\n \"api\": 2\n },\n \"version_upgrade\": {\n \"profile\": {\n \"from\": 1,\n \"to\": 2\n },\n \"machine_instance\": {\n \"from\": 1,\n \"to\": 2\n }\n }\n }\n\ndef register(app):\n return { \"version_upgrade\": VersionUpgrade21to22.VersionUpgrade21to22() }\n"},"new_contents":{"kind":"string","value":"\nfrom . import VersionUpgrade21to22\n\nfrom UM.i18n import i18nCatalog\ncatalog = i18nCatalog(\"cura\")\n\ndef getMetaData():\n return {\n \"plugin\": {\n \"name\": catalog.i18nc(\"@label\", \"Version Upgrade 2.1 to 2.2\"),\n \"author\": \"Ultimaker\",\n \"version\": \"1.0\",\n \"description\": catalog.i18nc(\"@info:whatsthis\", \"Upgrades configurations from Cura 2.1 to Cura 2.2.\"),\n \"api\": 2\n },\n \"version_upgrade\": {\n # From To Upgrade function\n (\"profile\", 1): (\"instance_container\", 2, VersionUpgrade21to22.upgradeProfile),\n (\"machine_instance\", 1): (\"container_stack\", 2, VersionUpgrade21to22.upgradeMachineInstance),\n (\"preferences\", 1): (\"preferences\", 2, VersionUpgrade21to22.upgradePreferences)\n },\n \"sources\": {\n \"profile\": {\n \"get_version\": VersionUpgrade21to22.getCfgVersion,\n \"location\": {\"./profiles\"}\n },\n \"machine_instance\": {\n \"get_version\": VersionUpgrade21to22.getCfgVersion,\n \"location\": {\"./machine_instances\"}\n },\n \"preferences\": {\n \"get_version\": VersionUpgrade21to22.getCfgVersion,\n \"location\": {\".\"}\n }\n }\n }\n\ndef register(app):\n return { \"version_upgrade\": VersionUpgrade21to22.VersionUpgrade21to22() }\n"},"subject":{"kind":"string","value":"Update metadata with dynamic config types"},"message":{"kind":"string","value":"Update metadata with dynamic config types\n\nAfter settings rework, we decided to make the upgrade plug-ins define their own configuration types. This is basically the definition for these configuration types. Only the get_version function is not yet implemented.\n\nContributes to issue CURA-844.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"Curahelper/Cura,ynotstartups/Wanhao,hmflash/Cura,totalretribution/Cura,hmflash/Cura,Curahelper/Cura,senttech/Cura,fieldOfView/Cura,senttech/Cura,fieldOfView/Cura,totalretribution/Cura,ynotstartups/Wanhao"},"ndiff":{"kind":"string","value":" \n from . import VersionUpgrade21to22\n \n from UM.i18n import i18nCatalog\n catalog = i18nCatalog(\"cura\")\n \n def getMetaData():\n return {\n \"plugin\": {\n \"name\": catalog.i18nc(\"@label\", \"Version Upgrade 2.1 to 2.2\"),\n \"author\": \"Ultimaker\",\n \"version\": \"1.0\",\n \"description\": catalog.i18nc(\"@info:whatsthis\", \"Upgrades configurations from Cura 2.1 to Cura 2.2.\"),\n \"api\": 2\n },\n \"version_upgrade\": {\n+ # From To Upgrade function\n+ (\"profile\", 1): (\"instance_container\", 2, VersionUpgrade21to22.upgradeProfile),\n+ (\"machine_instance\", 1): (\"container_stack\", 2, VersionUpgrade21to22.upgradeMachineInstance),\n+ (\"preferences\", 1): (\"preferences\", 2, VersionUpgrade21to22.upgradePreferences)\n+ },\n+ \"sources\": {\n \"profile\": {\n- \"from\": 1,\n- \"to\": 2\n+ \"get_version\": VersionUpgrade21to22.getCfgVersion,\n+ \"location\": {\"./profiles\"}\n },\n \"machine_instance\": {\n- \"from\": 1,\n+ \"get_version\": VersionUpgrade21to22.getCfgVersion,\n+ \"location\": {\"./machine_instances\"}\n+ },\n+ \"preferences\": {\n+ \"get_version\": VersionUpgrade21to22.getCfgVersion,\n- \"to\": 2\n+ \"location\": {\".\"}\n }\n }\n }\n \n def register(app):\n return { \"version_upgrade\": VersionUpgrade21to22.VersionUpgrade21to22() }\n "},"instruction":{"kind":"string","value":"Update metadata with dynamic config types"},"content":{"kind":"string","value":"## Code Before:\n\nfrom . import VersionUpgrade21to22\n\nfrom UM.i18n import i18nCatalog\ncatalog = i18nCatalog(\"cura\")\n\ndef getMetaData():\n return {\n \"plugin\": {\n \"name\": catalog.i18nc(\"@label\", \"Version Upgrade 2.1 to 2.2\"),\n \"author\": \"Ultimaker\",\n \"version\": \"1.0\",\n \"description\": catalog.i18nc(\"@info:whatsthis\", \"Upgrades configurations from Cura 2.1 to Cura 2.2.\"),\n \"api\": 2\n },\n \"version_upgrade\": {\n \"profile\": {\n \"from\": 1,\n \"to\": 2\n },\n \"machine_instance\": {\n \"from\": 1,\n \"to\": 2\n }\n }\n }\n\ndef register(app):\n return { \"version_upgrade\": VersionUpgrade21to22.VersionUpgrade21to22() }\n\n## Instruction:\nUpdate metadata with dynamic config types\n## Code After:\n\nfrom . import VersionUpgrade21to22\n\nfrom UM.i18n import i18nCatalog\ncatalog = i18nCatalog(\"cura\")\n\ndef getMetaData():\n return {\n \"plugin\": {\n \"name\": catalog.i18nc(\"@label\", \"Version Upgrade 2.1 to 2.2\"),\n \"author\": \"Ultimaker\",\n \"version\": \"1.0\",\n \"description\": catalog.i18nc(\"@info:whatsthis\", \"Upgrades configurations from Cura 2.1 to Cura 2.2.\"),\n \"api\": 2\n },\n \"version_upgrade\": {\n # From To Upgrade function\n (\"profile\", 1): (\"instance_container\", 2, VersionUpgrade21to22.upgradeProfile),\n (\"machine_instance\", 1): (\"container_stack\", 2, VersionUpgrade21to22.upgradeMachineInstance),\n (\"preferences\", 1): (\"preferences\", 2, VersionUpgrade21to22.upgradePreferences)\n },\n \"sources\": {\n \"profile\": {\n \"get_version\": VersionUpgrade21to22.getCfgVersion,\n \"location\": {\"./profiles\"}\n },\n \"machine_instance\": {\n \"get_version\": VersionUpgrade21to22.getCfgVersion,\n \"location\": {\"./machine_instances\"}\n },\n \"preferences\": {\n \"get_version\": VersionUpgrade21to22.getCfgVersion,\n \"location\": {\".\"}\n }\n }\n }\n\ndef register(app):\n return { \"version_upgrade\": VersionUpgrade21to22.VersionUpgrade21to22() }\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n \"version_upgrade\": {\n # From To Upgrade function\n (\"profile\", 1): (\"instance_container\", 2, VersionUpgrade21to22.upgradeProfile),\n (\"machine_instance\", 1): (\"container_stack\", 2, VersionUpgrade21to22.upgradeMachineInstance),\n (\"preferences\", 1): (\"preferences\", 2, VersionUpgrade21to22.upgradePreferences)\n },\n \"sources\": {\n \"profile\": {\n \"get_version\": VersionUpgrade21to22.getCfgVersion,\n \"location\": {\"./profiles\"}\n },\n\n\n# ... modified code ... \n\n\n \"machine_instance\": {\n \"get_version\": VersionUpgrade21to22.getCfgVersion,\n \"location\": {\"./machine_instances\"}\n },\n \"preferences\": {\n \"get_version\": VersionUpgrade21to22.getCfgVersion,\n \"location\": {\".\"}\n }\n\n\n# ... rest of the code ..."}}},{"rowIdx":19289,"cells":{"commit":{"kind":"string","value":"77e9d92e040b60cc5e894a59ecfde0a91a8f1f8c"},"old_file":{"kind":"string","value":"coop_cms/apps/email_auth/forms.py"},"new_file":{"kind":"string","value":"coop_cms/apps/email_auth/forms.py"},"old_contents":{"kind":"string","value":"\nfrom django import forms\nfrom django.contrib.auth import authenticate\nfrom django.utils.translation import ugettext as _\n\nclass EmailAuthForm(forms.Form):\n email = forms.EmailField(required=True, label=_(u\"Email\"))\n password = forms.CharField(label=_(\"Password\"), widget=forms.PasswordInput)\n\n def __init__(self, request=None, *args, **kwargs):\n super(EmailAuthForm, self).__init__(*args, **kwargs)\n \n def _authenticate(self):\n email = self.cleaned_data.get('email')\n password = self.cleaned_data.get('password')\n \n error_messages = {\n 'invalid_login': _(\"Please enter a correct %(email)s and password. \"\n \"Note that both fields may be case-sensitive.\"),\n }\n \n if email and password:\n self.user_cache = authenticate(email=email, password=password)\n if self.user_cache is None:\n raise forms.ValidationError(\n error_messages['invalid_login'],\n code='invalid_login',\n params={'email': _(u\"email\")},\n )\n \n def get_user(self):\n return self.user_cache\n \n def clean(self):\n self._authenticate()\n return self.cleaned_data"},"new_contents":{"kind":"string","value":"\nfrom django import forms\nfrom django.contrib.auth import authenticate\nfrom django.utils.translation import ugettext as _, ugettext_lazy as __\n\nclass EmailAuthForm(forms.Form):\n email = forms.EmailField(required=True, label=__(u\"Email\"))\n password = forms.CharField(label=__(\"Password\"), widget=forms.PasswordInput)\n\n def __init__(self, request=None, *args, **kwargs):\n super(EmailAuthForm, self).__init__(*args, **kwargs)\n \n def _authenticate(self):\n email = self.cleaned_data.get('email')\n password = self.cleaned_data.get('password')\n \n error_messages = {\n 'invalid_login': _(\"Please enter a correct %(email)s and password. \"\n \"Note that both fields may be case-sensitive.\"),\n }\n \n if email and password:\n self.user_cache = authenticate(email=email, password=password)\n if self.user_cache is None:\n raise forms.ValidationError(\n error_messages['invalid_login'],\n code='invalid_login',\n params={'email': _(u\"email\")},\n )\n \n def get_user(self):\n return self.user_cache\n \n def clean(self):\n self._authenticate()\n return self.cleaned_data"},"subject":{"kind":"string","value":"Fix translation issue on EmailAuthForm"},"message":{"kind":"string","value":"Fix translation issue on EmailAuthForm\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"ljean/coop_cms,ljean/coop_cms,ljean/coop_cms"},"ndiff":{"kind":"string","value":" \n from django import forms\n from django.contrib.auth import authenticate\n- from django.utils.translation import ugettext as _\n+ from django.utils.translation import ugettext as _, ugettext_lazy as __\n \n class EmailAuthForm(forms.Form):\n- email = forms.EmailField(required=True, label=_(u\"Email\"))\n+ email = forms.EmailField(required=True, label=__(u\"Email\"))\n- password = forms.CharField(label=_(\"Password\"), widget=forms.PasswordInput)\n+ password = forms.CharField(label=__(\"Password\"), widget=forms.PasswordInput)\n \n def __init__(self, request=None, *args, **kwargs):\n super(EmailAuthForm, self).__init__(*args, **kwargs)\n \n def _authenticate(self):\n email = self.cleaned_data.get('email')\n password = self.cleaned_data.get('password')\n \n error_messages = {\n 'invalid_login': _(\"Please enter a correct %(email)s and password. \"\n \"Note that both fields may be case-sensitive.\"),\n }\n \n if email and password:\n self.user_cache = authenticate(email=email, password=password)\n if self.user_cache is None:\n raise forms.ValidationError(\n error_messages['invalid_login'],\n code='invalid_login',\n params={'email': _(u\"email\")},\n )\n \n def get_user(self):\n return self.user_cache\n \n def clean(self):\n self._authenticate()\n return self.cleaned_data"},"instruction":{"kind":"string","value":"Fix translation issue on EmailAuthForm"},"content":{"kind":"string","value":"## Code Before:\n\nfrom django import forms\nfrom django.contrib.auth import authenticate\nfrom django.utils.translation import ugettext as _\n\nclass EmailAuthForm(forms.Form):\n email = forms.EmailField(required=True, label=_(u\"Email\"))\n password = forms.CharField(label=_(\"Password\"), widget=forms.PasswordInput)\n\n def __init__(self, request=None, *args, **kwargs):\n super(EmailAuthForm, self).__init__(*args, **kwargs)\n \n def _authenticate(self):\n email = self.cleaned_data.get('email')\n password = self.cleaned_data.get('password')\n \n error_messages = {\n 'invalid_login': _(\"Please enter a correct %(email)s and password. \"\n \"Note that both fields may be case-sensitive.\"),\n }\n \n if email and password:\n self.user_cache = authenticate(email=email, password=password)\n if self.user_cache is None:\n raise forms.ValidationError(\n error_messages['invalid_login'],\n code='invalid_login',\n params={'email': _(u\"email\")},\n )\n \n def get_user(self):\n return self.user_cache\n \n def clean(self):\n self._authenticate()\n return self.cleaned_data\n## Instruction:\nFix translation issue on EmailAuthForm\n## Code After:\n\nfrom django import forms\nfrom django.contrib.auth import authenticate\nfrom django.utils.translation import ugettext as _, ugettext_lazy as __\n\nclass EmailAuthForm(forms.Form):\n email = forms.EmailField(required=True, label=__(u\"Email\"))\n password = forms.CharField(label=__(\"Password\"), widget=forms.PasswordInput)\n\n def __init__(self, request=None, *args, **kwargs):\n super(EmailAuthForm, self).__init__(*args, **kwargs)\n \n def _authenticate(self):\n email = self.cleaned_data.get('email')\n password = self.cleaned_data.get('password')\n \n error_messages = {\n 'invalid_login': _(\"Please enter a correct %(email)s and password. \"\n \"Note that both fields may be case-sensitive.\"),\n }\n \n if email and password:\n self.user_cache = authenticate(email=email, password=password)\n if self.user_cache is None:\n raise forms.ValidationError(\n error_messages['invalid_login'],\n code='invalid_login',\n params={'email': _(u\"email\")},\n )\n \n def get_user(self):\n return self.user_cache\n \n def clean(self):\n self._authenticate()\n return self.cleaned_data"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nfrom django.contrib.auth import authenticate\nfrom django.utils.translation import ugettext as _, ugettext_lazy as __\n\n\n\n ... \n\n\nclass EmailAuthForm(forms.Form):\n email = forms.EmailField(required=True, label=__(u\"Email\"))\n password = forms.CharField(label=__(\"Password\"), widget=forms.PasswordInput)\n\n\n\n ... "}}},{"rowIdx":19290,"cells":{"commit":{"kind":"string","value":"8207d86b7b2a6e1f81454eefea4784d89c8674a8"},"old_file":{"kind":"string","value":"resolver_test/django_test.py"},"new_file":{"kind":"string","value":"resolver_test/django_test.py"},"old_contents":{"kind":"string","value":"\nfrom urlparse import urljoin\n\nfrom mock import Mock\n\nfrom resolver_test import ResolverTestMixins\n\nimport django\nfrom django.conf import settings\nfrom django.contrib.auth.models import AnonymousUser, User\nfrom django.http import HttpRequest\n\n\n\n\nclass ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):\n maxDiff = None\n\n\nclass ResolverViewTestCase(ResolverDjangoTestCase):\n\n def setUp(self):\n self.user = User(username='cherie')\n self.user.save()\n\n self.request = HttpRequest()\n self.request.session = Mock()\n self.request.user = self.user\n\n self.client.force_login(self.user)\n\n\n def assert_login_required(self, view_to_call):\n self.owner = self.request.user = AnonymousUser()\n self.request.get_full_path = lambda: \"my_path\"\n self.request.build_absolute_uri = lambda: \"my_path\"\n\n response = view_to_call()\n\n self.assertEquals(response.status_code, 302)\n self.assertEquals(\n response['Location'],\n urljoin(settings.LOGIN_URL, '?next=my_path')\n )\n\n"},"new_contents":{"kind":"string","value":"\nfrom urlparse import urljoin\n\nfrom mock import Mock\n\nfrom resolver_test import ResolverTestMixins\n\nimport django\nfrom django.conf import settings\nfrom django.contrib.auth.models import AnonymousUser, User\nfrom django.http import HttpRequest\n\n\n\n\nclass ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):\n maxDiff = None\n\n\n\nusernumber = 0\n\nclass ResolverViewTestCase(ResolverDjangoTestCase):\n\n def setUp(self):\n global usernumber\n self.user = User.objects.create(username='cherie{}'.format(usernumber))\n usernumber += 1\n\n self.request = HttpRequest()\n self.request.session = Mock()\n self.request.user = self.user\n\n self.client.force_login(self.user)\n\n\n def assert_login_required(self, view_to_call):\n self.owner = self.request.user = AnonymousUser()\n self.request.get_full_path = lambda: \"my_path\"\n self.request.build_absolute_uri = lambda: \"my_path\"\n\n response = view_to_call()\n\n self.assertEquals(response.status_code, 302)\n self.assertEquals(\n response['Location'],\n urljoin(settings.LOGIN_URL, '?next=my_path')\n )\n\n"},"subject":{"kind":"string","value":"Use different usernames for each test. by: Glenn, Giles"},"message":{"kind":"string","value":"Use different usernames for each test. by: Glenn, Giles\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"pythonanywhere/resolver_test"},"ndiff":{"kind":"string","value":" \n from urlparse import urljoin\n \n from mock import Mock\n \n from resolver_test import ResolverTestMixins\n \n import django\n from django.conf import settings\n from django.contrib.auth.models import AnonymousUser, User\n from django.http import HttpRequest\n \n \n \n \n class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):\n maxDiff = None\n \n \n+ \n+ usernumber = 0\n+ \n class ResolverViewTestCase(ResolverDjangoTestCase):\n \n def setUp(self):\n- self.user = User(username='cherie')\n- self.user.save()\n+ global usernumber\n+ self.user = User.objects.create(username='cherie{}'.format(usernumber))\n+ usernumber += 1\n \n self.request = HttpRequest()\n self.request.session = Mock()\n self.request.user = self.user\n \n self.client.force_login(self.user)\n \n \n def assert_login_required(self, view_to_call):\n self.owner = self.request.user = AnonymousUser()\n self.request.get_full_path = lambda: \"my_path\"\n self.request.build_absolute_uri = lambda: \"my_path\"\n \n response = view_to_call()\n \n self.assertEquals(response.status_code, 302)\n self.assertEquals(\n response['Location'],\n urljoin(settings.LOGIN_URL, '?next=my_path')\n )\n \n "},"instruction":{"kind":"string","value":"Use different usernames for each test. by: Glenn, Giles"},"content":{"kind":"string","value":"## Code Before:\n\nfrom urlparse import urljoin\n\nfrom mock import Mock\n\nfrom resolver_test import ResolverTestMixins\n\nimport django\nfrom django.conf import settings\nfrom django.contrib.auth.models import AnonymousUser, User\nfrom django.http import HttpRequest\n\n\n\n\nclass ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):\n maxDiff = None\n\n\nclass ResolverViewTestCase(ResolverDjangoTestCase):\n\n def setUp(self):\n self.user = User(username='cherie')\n self.user.save()\n\n self.request = HttpRequest()\n self.request.session = Mock()\n self.request.user = self.user\n\n self.client.force_login(self.user)\n\n\n def assert_login_required(self, view_to_call):\n self.owner = self.request.user = AnonymousUser()\n self.request.get_full_path = lambda: \"my_path\"\n self.request.build_absolute_uri = lambda: \"my_path\"\n\n response = view_to_call()\n\n self.assertEquals(response.status_code, 302)\n self.assertEquals(\n response['Location'],\n urljoin(settings.LOGIN_URL, '?next=my_path')\n )\n\n\n## Instruction:\nUse different usernames for each test. by: Glenn, Giles\n## Code After:\n\nfrom urlparse import urljoin\n\nfrom mock import Mock\n\nfrom resolver_test import ResolverTestMixins\n\nimport django\nfrom django.conf import settings\nfrom django.contrib.auth.models import AnonymousUser, User\nfrom django.http import HttpRequest\n\n\n\n\nclass ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):\n maxDiff = None\n\n\n\nusernumber = 0\n\nclass ResolverViewTestCase(ResolverDjangoTestCase):\n\n def setUp(self):\n global usernumber\n self.user = User.objects.create(username='cherie{}'.format(usernumber))\n usernumber += 1\n\n self.request = HttpRequest()\n self.request.session = Mock()\n self.request.user = self.user\n\n self.client.force_login(self.user)\n\n\n def assert_login_required(self, view_to_call):\n self.owner = self.request.user = AnonymousUser()\n self.request.get_full_path = lambda: \"my_path\"\n self.request.build_absolute_uri = lambda: \"my_path\"\n\n response = view_to_call()\n\n self.assertEquals(response.status_code, 302)\n self.assertEquals(\n response['Location'],\n urljoin(settings.LOGIN_URL, '?next=my_path')\n )\n\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n\n\nusernumber = 0\n\nclass ResolverViewTestCase(ResolverDjangoTestCase):\n\n\n ... \n\n\n def setUp(self):\n global usernumber\n self.user = User.objects.create(username='cherie{}'.format(usernumber))\n usernumber += 1\n\n\n\n ... "}}},{"rowIdx":19291,"cells":{"commit":{"kind":"string","value":"fd061738d025b5371c1415a1f5466bcf5f6476b7"},"old_file":{"kind":"string","value":"py2deb/config/__init__.py"},"new_file":{"kind":"string","value":"py2deb/config/__init__.py"},"old_contents":{"kind":"string","value":"import os\n\nconfig_dir = os.path.dirname(os.path.abspath(__file__))\n\n# Destination of built packages.\nPKG_REPO = '/tmp/'\n"},"new_contents":{"kind":"string","value":"import os\n\nconfig_dir = os.path.dirname(os.path.abspath(__file__))\n\n# Destination of built packages.\nif os.getuid() == 0:\n PKG_REPO = '/var/repos/deb-repo/repository/pl-py2deb'\nelse:\n PKG_REPO = '/tmp'\n"},"subject":{"kind":"string","value":"Make it work out of the box on the build-server and locally"},"message":{"kind":"string","value":"Make it work out of the box on the build-server and locally\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"paylogic/py2deb,paylogic/py2deb"},"ndiff":{"kind":"string","value":" import os\n \n config_dir = os.path.dirname(os.path.abspath(__file__))\n \n # Destination of built packages.\n+ if os.getuid() == 0:\n+ PKG_REPO = '/var/repos/deb-repo/repository/pl-py2deb'\n+ else:\n- PKG_REPO = '/tmp/'\n+ PKG_REPO = '/tmp'\n "},"instruction":{"kind":"string","value":"Make it work out of the box on the build-server and locally"},"content":{"kind":"string","value":"## Code Before:\nimport os\n\nconfig_dir = os.path.dirname(os.path.abspath(__file__))\n\n# Destination of built packages.\nPKG_REPO = '/tmp/'\n\n## Instruction:\nMake it work out of the box on the build-server and locally\n## Code After:\nimport os\n\nconfig_dir = os.path.dirname(os.path.abspath(__file__))\n\n# Destination of built packages.\nif os.getuid() == 0:\n PKG_REPO = '/var/repos/deb-repo/repository/pl-py2deb'\nelse:\n PKG_REPO = '/tmp'\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n# Destination of built packages.\nif os.getuid() == 0:\n PKG_REPO = '/var/repos/deb-repo/repository/pl-py2deb'\nelse:\n PKG_REPO = '/tmp'\n\n\n# ... rest of the code ..."}}},{"rowIdx":19292,"cells":{"commit":{"kind":"string","value":"ebbc68da19755097b2131d60bc9757ecb4dc6d4c"},"old_file":{"kind":"string","value":"bundles/auth/models/token.py"},"new_file":{"kind":"string","value":"bundles/auth/models/token.py"},"old_contents":{"kind":"string","value":"import hashlib\nimport random\nimport string\n\nfrom ext.aboard.model import *\n\ndef set_value(token):\n \"\"\"Randomly create and return a value.\"\"\"\n value = str(token.user) + \"_\" + str(token.timestamp)\n len_rand = random.randint(20, 40)\n to_pick = string.digits + string.ascii_letters + \\\n \"_-+^$\"\n for i in range(len_rand):\n value += random.choice(to_pick)\n \n print(\"Private value\", value)\n # Hash the value\n hashed = hashlib.sha512(value.encode())\n value = hashed.hexdigest()\n print(\"Public value\", value)\n return value\n\nclass Token(Model):\n \n \"\"\"A token model.\"\"\"\n \n id = None\n user = Integer()\n timestamp = Integer()\n value = String(pkey=True, default=set_value)\n"},"new_contents":{"kind":"string","value":"import hashlib\nimport random\nimport string\n\nfrom ext.aboard.model import *\n\nclass Token(Model):\n \n \"\"\"A token model.\"\"\"\n \n id = None\n user = Integer()\n timestamp = Integer()\n value = String(pkey=True)\n \n def __init__(self, user=None, timestamp=None):\n value = None\n if user and timestamp:\n value = Token.get_token_value(user, timestamp)\n \n Model.__init__(self, user=user, timestamp=timestamp, value=value)\n \n @staticmethod\n def get_token_value(user, timestamp):\n \"\"\"Randomly create and return a token value.\"\"\"\n value = str(user) + \"_\" + str(timestamp)\n len_rand = random.randint(20, 40)\n to_pick = string.digits + string.ascii_letters + \\\n \"_-+^$\"\n for i in range(len_rand):\n value += random.choice(to_pick)\n \n print(\"Private value\", value)\n \n # Hash the value\n hashed = hashlib.sha512(value.encode())\n value = hashed.hexdigest()\n print(\"Public value\", value)\n return value\n"},"subject":{"kind":"string","value":"Use the Model constructor to generate a default value"},"message":{"kind":"string","value":"[user] Use the Model constructor to generate a default value\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"v-legoff/pa-poc2,v-legoff/pa-poc2"},"ndiff":{"kind":"string","value":" import hashlib\n import random\n import string\n \n from ext.aboard.model import *\n- \n- def set_value(token):\n- \"\"\"Randomly create and return a value.\"\"\"\n- value = str(token.user) + \"_\" + str(token.timestamp)\n- len_rand = random.randint(20, 40)\n- to_pick = string.digits + string.ascii_letters + \\\n- \"_-+^$\"\n- for i in range(len_rand):\n- value += random.choice(to_pick)\n- \n- print(\"Private value\", value)\n- # Hash the value\n- hashed = hashlib.sha512(value.encode())\n- value = hashed.hexdigest()\n- print(\"Public value\", value)\n- return value\n \n class Token(Model):\n \n \"\"\"A token model.\"\"\"\n \n id = None\n user = Integer()\n timestamp = Integer()\n- value = String(pkey=True, default=set_value)\n+ value = String(pkey=True)\n+ \n+ def __init__(self, user=None, timestamp=None):\n+ value = None\n+ if user and timestamp:\n+ value = Token.get_token_value(user, timestamp)\n+ \n+ Model.__init__(self, user=user, timestamp=timestamp, value=value)\n+ \n+ @staticmethod\n+ def get_token_value(user, timestamp):\n+ \"\"\"Randomly create and return a token value.\"\"\"\n+ value = str(user) + \"_\" + str(timestamp)\n+ len_rand = random.randint(20, 40)\n+ to_pick = string.digits + string.ascii_letters + \\\n+ \"_-+^$\"\n+ for i in range(len_rand):\n+ value += random.choice(to_pick)\n+ \n+ print(\"Private value\", value)\n+ \n+ # Hash the value\n+ hashed = hashlib.sha512(value.encode())\n+ value = hashed.hexdigest()\n+ print(\"Public value\", value)\n+ return value\n "},"instruction":{"kind":"string","value":"Use the Model constructor to generate a default value"},"content":{"kind":"string","value":"## Code Before:\nimport hashlib\nimport random\nimport string\n\nfrom ext.aboard.model import *\n\ndef set_value(token):\n \"\"\"Randomly create and return a value.\"\"\"\n value = str(token.user) + \"_\" + str(token.timestamp)\n len_rand = random.randint(20, 40)\n to_pick = string.digits + string.ascii_letters + \\\n \"_-+^$\"\n for i in range(len_rand):\n value += random.choice(to_pick)\n \n print(\"Private value\", value)\n # Hash the value\n hashed = hashlib.sha512(value.encode())\n value = hashed.hexdigest()\n print(\"Public value\", value)\n return value\n\nclass Token(Model):\n \n \"\"\"A token model.\"\"\"\n \n id = None\n user = Integer()\n timestamp = Integer()\n value = String(pkey=True, default=set_value)\n\n## Instruction:\nUse the Model constructor to generate a default value\n## Code After:\nimport hashlib\nimport random\nimport string\n\nfrom ext.aboard.model import *\n\nclass Token(Model):\n \n \"\"\"A token model.\"\"\"\n \n id = None\n user = Integer()\n timestamp = Integer()\n value = String(pkey=True)\n \n def __init__(self, user=None, timestamp=None):\n value = None\n if user and timestamp:\n value = Token.get_token_value(user, timestamp)\n \n Model.__init__(self, user=user, timestamp=timestamp, value=value)\n \n @staticmethod\n def get_token_value(user, timestamp):\n \"\"\"Randomly create and return a token value.\"\"\"\n value = str(user) + \"_\" + str(timestamp)\n len_rand = random.randint(20, 40)\n to_pick = string.digits + string.ascii_letters + \\\n \"_-+^$\"\n for i in range(len_rand):\n value += random.choice(to_pick)\n \n print(\"Private value\", value)\n \n # Hash the value\n hashed = hashlib.sha512(value.encode())\n value = hashed.hexdigest()\n print(\"Public value\", value)\n return value\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nfrom ext.aboard.model import *\n\n\n\n# ... modified code ... \n\n\n timestamp = Integer()\n value = String(pkey=True)\n \n def __init__(self, user=None, timestamp=None):\n value = None\n if user and timestamp:\n value = Token.get_token_value(user, timestamp)\n \n Model.__init__(self, user=user, timestamp=timestamp, value=value)\n \n @staticmethod\n def get_token_value(user, timestamp):\n \"\"\"Randomly create and return a token value.\"\"\"\n value = str(user) + \"_\" + str(timestamp)\n len_rand = random.randint(20, 40)\n to_pick = string.digits + string.ascii_letters + \\\n \"_-+^$\"\n for i in range(len_rand):\n value += random.choice(to_pick)\n \n print(\"Private value\", value)\n \n # Hash the value\n hashed = hashlib.sha512(value.encode())\n value = hashed.hexdigest()\n print(\"Public value\", value)\n return value\n\n\n# ... rest of the code ..."}}},{"rowIdx":19293,"cells":{"commit":{"kind":"string","value":"bb229be50e37bb710c32541cec7b159da9508335"},"old_file":{"kind":"string","value":"tests/functional/subcommands/test_subcommands.py"},"new_file":{"kind":"string","value":"tests/functional/subcommands/test_subcommands.py"},"old_contents":{"kind":"string","value":"import subprocess\n\n\ndef test_subcommand():\n \"\"\"\n Test that a command from the example project is registered.\n \"\"\"\n output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT)\n assert b'testcommand' in output\n\n\ndef test_subcommand_group():\n \"\"\"\n Test that a command group is registered.\n \"\"\"\n output = subprocess.check_output(['textx', 'testgroup'],\n stderr=subprocess.STDOUT)\n assert b'groupcommand1' in output\n assert b'groupcommand2' in output\n"},"new_contents":{"kind":"string","value":"import sys\nimport pytest\nimport subprocess\n\n\nif (3, 6) <= sys.version_info < (3, 8):\n pytest.skip(\"Temporary workaround for Travis problems\", allow_module_level=True)\n\n\ndef test_subcommand():\n \"\"\"\n Test that a command from the example project is registered.\n \"\"\"\n output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT)\n assert b'testcommand' in output\n\n\ndef test_subcommand_group():\n \"\"\"\n Test that a command group is registered.\n \"\"\"\n output = subprocess.check_output(['textx', 'testgroup'],\n stderr=subprocess.STDOUT)\n assert b'groupcommand1' in output\n assert b'groupcommand2' in output\n"},"subject":{"kind":"string","value":"Add workaround for Travis CI problems"},"message":{"kind":"string","value":"Add workaround for Travis CI problems\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"igordejanovic/textX,igordejanovic/textX,igordejanovic/textX"},"ndiff":{"kind":"string","value":"+ import sys\n+ import pytest\n import subprocess\n+ \n+ \n+ if (3, 6) <= sys.version_info < (3, 8):\n+ pytest.skip(\"Temporary workaround for Travis problems\", allow_module_level=True)\n \n \n def test_subcommand():\n \"\"\"\n Test that a command from the example project is registered.\n \"\"\"\n output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT)\n assert b'testcommand' in output\n \n \n def test_subcommand_group():\n \"\"\"\n Test that a command group is registered.\n \"\"\"\n output = subprocess.check_output(['textx', 'testgroup'],\n stderr=subprocess.STDOUT)\n assert b'groupcommand1' in output\n assert b'groupcommand2' in output\n "},"instruction":{"kind":"string","value":"Add workaround for Travis CI problems"},"content":{"kind":"string","value":"## Code Before:\nimport subprocess\n\n\ndef test_subcommand():\n \"\"\"\n Test that a command from the example project is registered.\n \"\"\"\n output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT)\n assert b'testcommand' in output\n\n\ndef test_subcommand_group():\n \"\"\"\n Test that a command group is registered.\n \"\"\"\n output = subprocess.check_output(['textx', 'testgroup'],\n stderr=subprocess.STDOUT)\n assert b'groupcommand1' in output\n assert b'groupcommand2' in output\n\n## Instruction:\nAdd workaround for Travis CI problems\n## Code After:\nimport sys\nimport pytest\nimport subprocess\n\n\nif (3, 6) <= sys.version_info < (3, 8):\n pytest.skip(\"Temporary workaround for Travis problems\", allow_module_level=True)\n\n\ndef test_subcommand():\n \"\"\"\n Test that a command from the example project is registered.\n \"\"\"\n output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT)\n assert b'testcommand' in output\n\n\ndef test_subcommand_group():\n \"\"\"\n Test that a command group is registered.\n \"\"\"\n output = subprocess.check_output(['textx', 'testgroup'],\n stderr=subprocess.STDOUT)\n assert b'groupcommand1' in output\n assert b'groupcommand2' in output\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport sys\nimport pytest\nimport subprocess\n\n\nif (3, 6) <= sys.version_info < (3, 8):\n pytest.skip(\"Temporary workaround for Travis problems\", allow_module_level=True)\n\n\n\n// ... rest of the code ..."}}},{"rowIdx":19294,"cells":{"commit":{"kind":"string","value":"5d5b59bde655fbeb2d07bd5539c2ff9b29879d1d"},"old_file":{"kind":"string","value":"pythontutorials/books/AutomateTheBoringStuff/Ch14/P2_writeCSV.py"},"new_file":{"kind":"string","value":"pythontutorials/books/AutomateTheBoringStuff/Ch14/P2_writeCSV.py"},"old_contents":{"kind":"string","value":"\nimport csv\n\n# Writer Objects\noutputFile = open(\"output.csv\", \"w\", newline='')\noutputWriter = csv.writer(outputFile)\nprint(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham']))\nprint(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham']))\nprint(outputWriter.writerow([1, 2, 3.141592, 4]))\noutputFile.close()\n\n# Delimiter and lineterminator Keyword Arguments\ncsvFile = open(\"example.tsv\", 'w', newline='')\ncsvWriter = csv.writer(csvFile, delimiter='\\t', lineterminator='\\n\\n')\nprint(csvWriter.writerow(['apples', 'oranges', 'grapes']))\nprint(csvWriter.writerow(['eggs', 'bacon', 'ham']))\nprint(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam']))\ncsvFile.close()\n"},"new_contents":{"kind":"string","value":"\n\ndef main():\n import csv\n\n # Writer Objects\n outputFile = open(\"output.csv\", \"w\", newline='')\n outputWriter = csv.writer(outputFile)\n print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham']))\n print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham']))\n print(outputWriter.writerow([1, 2, 3.141592, 4]))\n outputFile.close()\n\n # Delimiter and lineterminator Keyword Arguments\n csvFile = open(\"example.tsv\", 'w', newline='')\n csvWriter = csv.writer(csvFile, delimiter='\\t', lineterminator='\\n\\n')\n print(csvWriter.writerow(['apples', 'oranges', 'grapes']))\n print(csvWriter.writerow(['eggs', 'bacon', 'ham']))\n print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam']))\n csvFile.close()\n\n\nif __name__ == '__main__':\n main()\n"},"subject":{"kind":"string","value":"Update P1_writeCSV.py added docstring and wrapped in main function"},"message":{"kind":"string","value":"Update P1_writeCSV.py\nadded docstring and wrapped in main function\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials"},"ndiff":{"kind":"string","value":" \n- import csv\n \n+ def main():\n+ import csv\n- # Writer Objects\n- outputFile = open(\"output.csv\", \"w\", newline='')\n- outputWriter = csv.writer(outputFile)\n- print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham']))\n- print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham']))\n- print(outputWriter.writerow([1, 2, 3.141592, 4]))\n- outputFile.close()\n \n+ # Writer Objects\n+ outputFile = open(\"output.csv\", \"w\", newline='')\n+ outputWriter = csv.writer(outputFile)\n- # Delimiter and lineterminator Keyword Arguments\n- csvFile = open(\"example.tsv\", 'w', newline='')\n- csvWriter = csv.writer(csvFile, delimiter='\\t', lineterminator='\\n\\n')\n- print(csvWriter.writerow(['apples', 'oranges', 'grapes']))\n- print(csvWriter.writerow(['eggs', 'bacon', 'ham']))\n+ print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham']))\n- print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam']))\n- csvFile.close()\n+ print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham']))\n+ print(outputWriter.writerow([1, 2, 3.141592, 4]))\n+ outputFile.close()\n \n+ # Delimiter and lineterminator Keyword Arguments\n+ csvFile = open(\"example.tsv\", 'w', newline='')\n+ csvWriter = csv.writer(csvFile, delimiter='\\t', lineterminator='\\n\\n')\n+ print(csvWriter.writerow(['apples', 'oranges', 'grapes']))\n+ print(csvWriter.writerow(['eggs', 'bacon', 'ham']))\n+ print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam']))\n+ csvFile.close()\n+ \n+ \n+ if __name__ == '__main__':\n+ main()\n+ "},"instruction":{"kind":"string","value":"Update P1_writeCSV.py added docstring and wrapped in main function"},"content":{"kind":"string","value":"## Code Before:\n\nimport csv\n\n# Writer Objects\noutputFile = open(\"output.csv\", \"w\", newline='')\noutputWriter = csv.writer(outputFile)\nprint(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham']))\nprint(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham']))\nprint(outputWriter.writerow([1, 2, 3.141592, 4]))\noutputFile.close()\n\n# Delimiter and lineterminator Keyword Arguments\ncsvFile = open(\"example.tsv\", 'w', newline='')\ncsvWriter = csv.writer(csvFile, delimiter='\\t', lineterminator='\\n\\n')\nprint(csvWriter.writerow(['apples', 'oranges', 'grapes']))\nprint(csvWriter.writerow(['eggs', 'bacon', 'ham']))\nprint(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam']))\ncsvFile.close()\n\n## Instruction:\nUpdate P1_writeCSV.py added docstring and wrapped in main function\n## Code After:\n\n\ndef main():\n import csv\n\n # Writer Objects\n outputFile = open(\"output.csv\", \"w\", newline='')\n outputWriter = csv.writer(outputFile)\n print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham']))\n print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham']))\n print(outputWriter.writerow([1, 2, 3.141592, 4]))\n outputFile.close()\n\n # Delimiter and lineterminator Keyword Arguments\n csvFile = open(\"example.tsv\", 'w', newline='')\n csvWriter = csv.writer(csvFile, delimiter='\\t', lineterminator='\\n\\n')\n print(csvWriter.writerow(['apples', 'oranges', 'grapes']))\n print(csvWriter.writerow(['eggs', 'bacon', 'ham']))\n print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam']))\n csvFile.close()\n\n\nif __name__ == '__main__':\n main()\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\n\ndef main():\n import csv\n\n # Writer Objects\n outputFile = open(\"output.csv\", \"w\", newline='')\n outputWriter = csv.writer(outputFile)\n print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham']))\n print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham']))\n print(outputWriter.writerow([1, 2, 3.141592, 4]))\n outputFile.close()\n\n # Delimiter and lineterminator Keyword Arguments\n csvFile = open(\"example.tsv\", 'w', newline='')\n csvWriter = csv.writer(csvFile, delimiter='\\t', lineterminator='\\n\\n')\n print(csvWriter.writerow(['apples', 'oranges', 'grapes']))\n print(csvWriter.writerow(['eggs', 'bacon', 'ham']))\n print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam']))\n csvFile.close()\n\n\nif __name__ == '__main__':\n main()\n\n\n// ... rest of the code ..."}}},{"rowIdx":19295,"cells":{"commit":{"kind":"string","value":"301f22b9b2de2a27dd2e3faa27ccb9c70266e938"},"old_file":{"kind":"string","value":"pybossa/api/project_stats.py"},"new_file":{"kind":"string","value":"pybossa/api/project_stats.py"},"old_contents":{"kind":"string","value":"from flask import request\nfrom pybossa.model.project_stats import ProjectStats\nfrom api_base import APIBase\n\n\nclass ProjectStatsAPI(APIBase):\n\n \"\"\"Class for domain object ProjectStats.\"\"\"\n\n __class__ = ProjectStats\n\n def _select_attributes(self, stats_data):\n if request.args.get('full'):\n return stats_data\n stats_data['info'].pop('hours_stats', None)\n stats_data['info'].pop('dates_stats', None)\n stats_data['info'].pop('users_stats', None)\n return stats_data\n"},"new_contents":{"kind":"string","value":"import copy\nfrom flask import request\nfrom pybossa.model.project_stats import ProjectStats\nfrom api_base import APIBase\n\n\nclass ProjectStatsAPI(APIBase):\n\n \"\"\"Class for domain object ProjectStats.\"\"\"\n\n __class__ = ProjectStats\n\n def _select_attributes(self, stats_data):\n if not request.args.get('full'):\n tmp = copy.deepcopy(stats_data)\n tmp['info'].pop('hours_stats', None)\n tmp['info'].pop('dates_stats', None)\n tmp['info'].pop('users_stats', None)\n return tmp\n return stats_data\n"},"subject":{"kind":"string","value":"Fix _select_attributes from project api"},"message":{"kind":"string","value":"Fix _select_attributes from project api\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"PyBossa/pybossa,PyBossa/pybossa,Scifabric/pybossa,Scifabric/pybossa"},"ndiff":{"kind":"string","value":"+ import copy\n from flask import request\n from pybossa.model.project_stats import ProjectStats\n from api_base import APIBase\n \n \n class ProjectStatsAPI(APIBase):\n \n \"\"\"Class for domain object ProjectStats.\"\"\"\n \n __class__ = ProjectStats\n \n def _select_attributes(self, stats_data):\n- if request.args.get('full'):\n+ if not request.args.get('full'):\n- return stats_data\n+ tmp = copy.deepcopy(stats_data)\n- stats_data['info'].pop('hours_stats', None)\n+ tmp['info'].pop('hours_stats', None)\n- stats_data['info'].pop('dates_stats', None)\n+ tmp['info'].pop('dates_stats', None)\n- stats_data['info'].pop('users_stats', None)\n+ tmp['info'].pop('users_stats', None)\n+ return tmp\n return stats_data\n "},"instruction":{"kind":"string","value":"Fix _select_attributes from project api"},"content":{"kind":"string","value":"## Code Before:\nfrom flask import request\nfrom pybossa.model.project_stats import ProjectStats\nfrom api_base import APIBase\n\n\nclass ProjectStatsAPI(APIBase):\n\n \"\"\"Class for domain object ProjectStats.\"\"\"\n\n __class__ = ProjectStats\n\n def _select_attributes(self, stats_data):\n if request.args.get('full'):\n return stats_data\n stats_data['info'].pop('hours_stats', None)\n stats_data['info'].pop('dates_stats', None)\n stats_data['info'].pop('users_stats', None)\n return stats_data\n\n## Instruction:\nFix _select_attributes from project api\n## Code After:\nimport copy\nfrom flask import request\nfrom pybossa.model.project_stats import ProjectStats\nfrom api_base import APIBase\n\n\nclass ProjectStatsAPI(APIBase):\n\n \"\"\"Class for domain object ProjectStats.\"\"\"\n\n __class__ = ProjectStats\n\n def _select_attributes(self, stats_data):\n if not request.args.get('full'):\n tmp = copy.deepcopy(stats_data)\n tmp['info'].pop('hours_stats', None)\n tmp['info'].pop('dates_stats', None)\n tmp['info'].pop('users_stats', None)\n return tmp\n return stats_data\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport copy\nfrom flask import request\n\n\n// ... modified code ... \n\n\n def _select_attributes(self, stats_data):\n if not request.args.get('full'):\n tmp = copy.deepcopy(stats_data)\n tmp['info'].pop('hours_stats', None)\n tmp['info'].pop('dates_stats', None)\n tmp['info'].pop('users_stats', None)\n return tmp\n return stats_data\n\n\n// ... rest of the code ..."}}},{"rowIdx":19296,"cells":{"commit":{"kind":"string","value":"014b4905784f50fd13111ca8528fade9be4bd767"},"old_file":{"kind":"string","value":"skimage/feature/__init__.py"},"new_file":{"kind":"string","value":"skimage/feature/__init__.py"},"old_contents":{"kind":"string","value":"from ._hog import hog\nfrom ._greycomatrix import greycomatrix, greycoprops\nfrom .hog import hog\nfrom .texture import greycomatrix, greycoprops, local_binary_pattern\nfrom .peak import peak_local_max\nfrom ._harris import harris\nfrom .template import match_template\n"},"new_contents":{"kind":"string","value":"from ._hog import hog\nfrom .texture import greycomatrix, greycoprops, local_binary_pattern\nfrom .peak import peak_local_max\nfrom ._harris import harris\nfrom .template import match_template\n"},"subject":{"kind":"string","value":"Fix import bug due to rebase"},"message":{"kind":"string","value":"Fix import bug due to rebase\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"blink1073/scikit-image,robintw/scikit-image,emon10005/scikit-image,newville/scikit-image,michaelaye/scikit-image,GaZ3ll3/scikit-image,chintak/scikit-image,chriscrosscutler/scikit-image,warmspringwinds/scikit-image,vighneshbirodkar/scikit-image,robintw/scikit-image,dpshelio/scikit-image,Hiyorimi/scikit-image,youprofit/scikit-image,youprofit/scikit-image,rjeli/scikit-image,michaelpacer/scikit-image,Britefury/scikit-image,SamHames/scikit-image,almarklein/scikit-image,SamHames/scikit-image,bennlich/scikit-image,Hiyorimi/scikit-image,WarrenWeckesser/scikits-image,almarklein/scikit-image,WarrenWeckesser/scikits-image,ClinicalGraphics/scikit-image,chriscrosscutler/scikit-image,Midafi/scikit-image,juliusbierk/scikit-image,oew1v07/scikit-image,juliusbierk/scikit-image,ajaybhat/scikit-image,newville/scikit-image,Britefury/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image,ClinicalGraphics/scikit-image,dpshelio/scikit-image,emmanuelle/scikits.image,jwiggins/scikit-image,bsipocz/scikit-image,ajaybhat/scikit-image,pratapvardhan/scikit-image,ofgulban/scikit-image,Midafi/scikit-image,rjeli/scikit-image,bsipocz/scikit-image,blink1073/scikit-image,almarklein/scikit-image,almarklein/scikit-image,emmanuelle/scikits.image,ofgulban/scikit-image,keflavich/scikit-image,keflavich/scikit-image,paalge/scikit-image,chintak/scikit-image,bennlich/scikit-image,warmspringwinds/scikit-image,emon10005/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,pratapvardhan/scikit-image,emmanuelle/scikits.image,jwiggins/scikit-image,GaZ3ll3/scikit-image,ofgulban/scikit-image,michaelaye/scikit-image,SamHames/scikit-image,rjeli/scikit-image,SamHames/scikit-image,emmanuelle/scikits.image,chintak/scikit-image,oew1v07/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image"},"ndiff":{"kind":"string","value":" from ._hog import hog\n- from ._greycomatrix import greycomatrix, greycoprops\n- from .hog import hog\n from .texture import greycomatrix, greycoprops, local_binary_pattern\n from .peak import peak_local_max\n from ._harris import harris\n from .template import match_template\n "},"instruction":{"kind":"string","value":"Fix import bug due to rebase"},"content":{"kind":"string","value":"## Code Before:\nfrom ._hog import hog\nfrom ._greycomatrix import greycomatrix, greycoprops\nfrom .hog import hog\nfrom .texture import greycomatrix, greycoprops, local_binary_pattern\nfrom .peak import peak_local_max\nfrom ._harris import harris\nfrom .template import match_template\n\n## Instruction:\nFix import bug due to rebase\n## Code After:\nfrom ._hog import hog\nfrom .texture import greycomatrix, greycoprops, local_binary_pattern\nfrom .peak import peak_local_max\nfrom ._harris import harris\nfrom .template import match_template\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nfrom ._hog import hog\nfrom .texture import greycomatrix, greycoprops, local_binary_pattern\n\n\n# ... rest of the code ..."}}},{"rowIdx":19297,"cells":{"commit":{"kind":"string","value":"b9b03c1f736b38d122baafdd57bbd96657de17af"},"old_file":{"kind":"string","value":"valuenetwork/api/types/apps.py"},"new_file":{"kind":"string","value":"valuenetwork/api/types/apps.py"},"old_contents":{"kind":"string","value":"from django.apps import AppConfig\nimport valuenetwork.api.types as types\n\nclass ApiTypesAppConfig(AppConfig):\n name = 'valuenetwork.api.types'\n verbose_name = \"ApiTypes\"\n\n def ready(self):\n #import pdb; pdb.set_trace()\n \n from valuenetwork.api.types.EconomicResource import EconomicResource, EconomicResourceCategory\n types.EconomicResource = EconomicResource\n types.EconomicResourceCategory = EconomicResourceCategory\n from valuenetwork.api.types.Agent import Agent\n types.Agent = Agent\n from valuenetwork.api.types.Process import Process\n types.Process = Process\n from valuenetwork.api.types.EconomicEvent import EconomicEvent\n types.EconomicEvent = EconomicEvent\n super(ApiTypesAppConfig, self).ready()\n\n \n"},"new_contents":{"kind":"string","value":"from django.apps import AppConfig\nimport valuenetwork.api.types as types\n\nclass ApiTypesAppConfig(AppConfig):\n name = 'valuenetwork.api.types'\n verbose_name = \"ApiTypes\"\n\n def ready(self):\n \"\"\" Source of this hack:\n https://stackoverflow.com/questions/37862725/django-1-9-how-to-import-in-init-py\n 'Adding from .models import CommentMixin imports CommentMixin so that you can use it\n inside the ready() method. It does not magically add it to the comment module so that\n you can access it as comments.CommentMixin\n \n You could assign it to the comments module in the ready() method.'\n from .models import CommentMixin\n comments.CommentMixin = CommentsMixin\n \"\"\"\n \n from valuenetwork.api.types.EconomicResource import EconomicResource, EconomicResourceCategory\n types.EconomicResource = EconomicResource\n types.EconomicResourceCategory = EconomicResourceCategory\n from valuenetwork.api.types.Agent import Agent\n types.Agent = Agent\n from valuenetwork.api.types.Process import Process\n types.Process = Process\n from valuenetwork.api.types.EconomicEvent import EconomicEvent\n types.EconomicEvent = EconomicEvent\n super(ApiTypesAppConfig, self).ready()\n\n \n"},"subject":{"kind":"string","value":"Add a comment about the source of the hack"},"message":{"kind":"string","value":"Add a comment about the source of the hack\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"FreedomCoop/valuenetwork,FreedomCoop/valuenetwork,FreedomCoop/valuenetwork,FreedomCoop/valuenetwork"},"ndiff":{"kind":"string","value":" from django.apps import AppConfig\n import valuenetwork.api.types as types\n \n class ApiTypesAppConfig(AppConfig):\n name = 'valuenetwork.api.types'\n verbose_name = \"ApiTypes\"\n \n def ready(self):\n- #import pdb; pdb.set_trace()\n+ \"\"\" Source of this hack:\n+ https://stackoverflow.com/questions/37862725/django-1-9-how-to-import-in-init-py\n+ 'Adding from .models import CommentMixin imports CommentMixin so that you can use it\n+ inside the ready() method. It does not magically add it to the comment module so that\n+ you can access it as comments.CommentMixin\n+ \n+ You could assign it to the comments module in the ready() method.'\n+ from .models import CommentMixin\n+ comments.CommentMixin = CommentsMixin\n+ \"\"\"\n \n from valuenetwork.api.types.EconomicResource import EconomicResource, EconomicResourceCategory\n types.EconomicResource = EconomicResource\n types.EconomicResourceCategory = EconomicResourceCategory\n from valuenetwork.api.types.Agent import Agent\n types.Agent = Agent\n from valuenetwork.api.types.Process import Process\n types.Process = Process\n from valuenetwork.api.types.EconomicEvent import EconomicEvent\n types.EconomicEvent = EconomicEvent\n super(ApiTypesAppConfig, self).ready()\n \n \n "},"instruction":{"kind":"string","value":"Add a comment about the source of the hack"},"content":{"kind":"string","value":"## Code Before:\nfrom django.apps import AppConfig\nimport valuenetwork.api.types as types\n\nclass ApiTypesAppConfig(AppConfig):\n name = 'valuenetwork.api.types'\n verbose_name = \"ApiTypes\"\n\n def ready(self):\n #import pdb; pdb.set_trace()\n \n from valuenetwork.api.types.EconomicResource import EconomicResource, EconomicResourceCategory\n types.EconomicResource = EconomicResource\n types.EconomicResourceCategory = EconomicResourceCategory\n from valuenetwork.api.types.Agent import Agent\n types.Agent = Agent\n from valuenetwork.api.types.Process import Process\n types.Process = Process\n from valuenetwork.api.types.EconomicEvent import EconomicEvent\n types.EconomicEvent = EconomicEvent\n super(ApiTypesAppConfig, self).ready()\n\n \n\n## Instruction:\nAdd a comment about the source of the hack\n## Code After:\nfrom django.apps import AppConfig\nimport valuenetwork.api.types as types\n\nclass ApiTypesAppConfig(AppConfig):\n name = 'valuenetwork.api.types'\n verbose_name = \"ApiTypes\"\n\n def ready(self):\n \"\"\" Source of this hack:\n https://stackoverflow.com/questions/37862725/django-1-9-how-to-import-in-init-py\n 'Adding from .models import CommentMixin imports CommentMixin so that you can use it\n inside the ready() method. It does not magically add it to the comment module so that\n you can access it as comments.CommentMixin\n \n You could assign it to the comments module in the ready() method.'\n from .models import CommentMixin\n comments.CommentMixin = CommentsMixin\n \"\"\"\n \n from valuenetwork.api.types.EconomicResource import EconomicResource, EconomicResourceCategory\n types.EconomicResource = EconomicResource\n types.EconomicResourceCategory = EconomicResourceCategory\n from valuenetwork.api.types.Agent import Agent\n types.Agent = Agent\n from valuenetwork.api.types.Process import Process\n types.Process = Process\n from valuenetwork.api.types.EconomicEvent import EconomicEvent\n types.EconomicEvent = EconomicEvent\n super(ApiTypesAppConfig, self).ready()\n\n \n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n def ready(self):\n \"\"\" Source of this hack:\n https://stackoverflow.com/questions/37862725/django-1-9-how-to-import-in-init-py\n 'Adding from .models import CommentMixin imports CommentMixin so that you can use it\n inside the ready() method. It does not magically add it to the comment module so that\n you can access it as comments.CommentMixin\n \n You could assign it to the comments module in the ready() method.'\n from .models import CommentMixin\n comments.CommentMixin = CommentsMixin\n \"\"\"\n \n\n\n ... "}}},{"rowIdx":19298,"cells":{"commit":{"kind":"string","value":"1a10f21566f59c9f4f8171bc088af1e2a18d9702"},"old_file":{"kind":"string","value":"prestoadmin/_version.py"},"new_file":{"kind":"string","value":"prestoadmin/_version.py"},"old_contents":{"kind":"string","value":"\"\"\"Version information\"\"\"\n\n# This must be the last line in the file and the format must be maintained\n# even when the version is changed\n__version__ = '2.3'\n"},"new_contents":{"kind":"string","value":"\"\"\"Version information\"\"\"\n\n# This must be the last line in the file and the format must be maintained\n# even when the version is changed\n__version__ = '2.4-SNAPSHOT'\n"},"subject":{"kind":"string","value":"Prepare for the next development iteration"},"message":{"kind":"string","value":"Prepare for the next development iteration\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"prestodb/presto-admin,prestodb/presto-admin"},"ndiff":{"kind":"string","value":" \"\"\"Version information\"\"\"\n \n # This must be the last line in the file and the format must be maintained\n # even when the version is changed\n- __version__ = '2.3'\n+ __version__ = '2.4-SNAPSHOT'\n "},"instruction":{"kind":"string","value":"Prepare for the next development iteration"},"content":{"kind":"string","value":"## Code Before:\n\"\"\"Version information\"\"\"\n\n# This must be the last line in the file and the format must be maintained\n# even when the version is changed\n__version__ = '2.3'\n\n## Instruction:\nPrepare for the next development iteration\n## Code After:\n\"\"\"Version information\"\"\"\n\n# This must be the last line in the file and the format must be maintained\n# even when the version is changed\n__version__ = '2.4-SNAPSHOT'\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n# even when the version is changed\n__version__ = '2.4-SNAPSHOT'\n\n\n ... "}}},{"rowIdx":19299,"cells":{"commit":{"kind":"string","value":"3f3818e4a21ffc4e1b8d4426093fc093396b5a5b"},"old_file":{"kind":"string","value":"pandas_finance.py"},"new_file":{"kind":"string","value":"pandas_finance.py"},"old_contents":{"kind":"string","value":"import datetime\n\nimport scraperwiki\nimport numpy\nimport pandas.io.data as web\n\n\ndef get_stock(stock, start, end, service):\n \"\"\"\n Return data frame of finance data for stock.\n\n Takes start and end datetimes, and service name of 'google' or 'yahoo'.\n \"\"\"\n return web.DataReader(stock, service, start, end)\n\n\ndef parse_finance_frame(stock, start, end, service='google'):\n \"\"\"\n Return rows of dicts from a finance data frame for scraperwiki.sqlite.\n\n service can also be 'yahoo', start and end are datetimes.\n \"\"\"\n frame = get_stock(stock, start, end, service)\n rows = []\n for idx in range(len(frame)):\n current_row_as_dict = frame.ix[idx].to_dict()\n # have to convert dates because these are Pandas timestamps and\n # dumptruck doesn't support them\n current_row_as_dict['Date'] = frame.index[idx].to_datetime()\n current_row_as_dict['Stock'] = stock\n # horrible hack because data values are numpy.float64 and dumptruck\n # doesn't support them\n for key in current_row_as_dict:\n if isinstance(current_row_as_dict[key], numpy.float64):\n current_row_as_dict[key] = float(current_row_as_dict[key])\n rows.append(current_row_as_dict)\n return rows\n\n\ndef main():\n \"\"\"\n Dump stock data into scraperwiki.sqlite using pandas.io.data.\n \"\"\"\n # arbitrary start chosen\n start = datetime.datetime(2014, 3, 1)\n end = datetime.datetime.today()\n\n stock_list = ['TWTR', 'FB']\n rows = []\n for stock in stock_list:\n rows.extend(parse_finance_frame(stock, start, end))\n scraperwiki.sqlite.save(data=rows, unique_keys=['Stock', 'Date'])\n\nif __name__ == '__main__':\n main()\n"},"new_contents":{"kind":"string","value":"import datetime\nimport sqlite3\nimport pandas.io.data as web\nimport pandas.io.sql as sql\n\n\ndef get_stock(stock, start, end):\n \"\"\"\n Return data frame of Yahoo Finance data for stock.\n\n Takes start and end datetimes.\n \"\"\"\n return web.DataReader(stock, 'yahoo', start, end)\n\ndef scrape_stock(stock, start, end):\n sqlite_db.execute(\"drop table if exists {};\".format(stock))\n frame = (get_stock(stock, start, end))\n # make Date not an index so it appears in table\n frame = frame.reset_index()\n # force Date datetime to string\n frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat())\n sql.write_frame(frame, stock, sqlite_db)\n\ndef main():\n global sqlite_db\n sqlite_db = sqlite3.connect(\"scraperwiki.sqlite\")\n start = datetime.datetime(2014, 3, 1)\n end = datetime.datetime.today()\n for ticker in ['TWTR', 'FB']:\n scrape_stock(ticker, start, end)\n \nif __name__ == '__main__':\n main()\n"},"subject":{"kind":"string","value":"Use pandas native saving by forcing date to not be index, and be string"},"message":{"kind":"string","value":"Use pandas native saving by forcing date to not be index, and be string"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"scraperwiki/stock-tool,scraperwiki/stock-tool"},"ndiff":{"kind":"string","value":" import datetime\n+ import sqlite3\n- \n- import scraperwiki\n- import numpy\n import pandas.io.data as web\n+ import pandas.io.sql as sql\n \n \n- def get_stock(stock, start, end, service):\n+ def get_stock(stock, start, end):\n \"\"\"\n- Return data frame of finance data for stock.\n+ Return data frame of Yahoo Finance data for stock.\n \n- Takes start and end datetimes, and service name of 'google' or 'yahoo'.\n+ Takes start and end datetimes.\n \"\"\"\n- return web.DataReader(stock, service, start, end)\n+ return web.DataReader(stock, 'yahoo', start, end)\n \n+ def scrape_stock(stock, start, end):\n+ sqlite_db.execute(\"drop table if exists {};\".format(stock))\n- \n- def parse_finance_frame(stock, start, end, service='google'):\n- \"\"\"\n- Return rows of dicts from a finance data frame for scraperwiki.sqlite.\n- \n- service can also be 'yahoo', start and end are datetimes.\n- \"\"\"\n- frame = get_stock(stock, start, end, service)\n+ frame = (get_stock(stock, start, end))\n+ # make Date not an index so it appears in table\n+ frame = frame.reset_index()\n+ # force Date datetime to string\n+ frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat())\n+ sql.write_frame(frame, stock, sqlite_db)\n- rows = []\n- for idx in range(len(frame)):\n- current_row_as_dict = frame.ix[idx].to_dict()\n- # have to convert dates because these are Pandas timestamps and\n- # dumptruck doesn't support them\n- current_row_as_dict['Date'] = frame.index[idx].to_datetime()\n- current_row_as_dict['Stock'] = stock\n- # horrible hack because data values are numpy.float64 and dumptruck\n- # doesn't support them\n- for key in current_row_as_dict:\n- if isinstance(current_row_as_dict[key], numpy.float64):\n- current_row_as_dict[key] = float(current_row_as_dict[key])\n- rows.append(current_row_as_dict)\n- return rows\n- \n \n def main():\n+ global sqlite_db\n+ sqlite_db = sqlite3.connect(\"scraperwiki.sqlite\")\n- \"\"\"\n- Dump stock data into scraperwiki.sqlite using pandas.io.data.\n- \"\"\"\n- # arbitrary start chosen\n start = datetime.datetime(2014, 3, 1)\n end = datetime.datetime.today()\n+ for ticker in ['TWTR', 'FB']:\n+ scrape_stock(ticker, start, end)\n+ \n- \n- stock_list = ['TWTR', 'FB']\n- rows = []\n- for stock in stock_list:\n- rows.extend(parse_finance_frame(stock, start, end))\n- scraperwiki.sqlite.save(data=rows, unique_keys=['Stock', 'Date'])\n- \n if __name__ == '__main__':\n main()\n "},"instruction":{"kind":"string","value":"Use pandas native saving by forcing date to not be index, and be string"},"content":{"kind":"string","value":"## Code Before:\nimport datetime\n\nimport scraperwiki\nimport numpy\nimport pandas.io.data as web\n\n\ndef get_stock(stock, start, end, service):\n \"\"\"\n Return data frame of finance data for stock.\n\n Takes start and end datetimes, and service name of 'google' or 'yahoo'.\n \"\"\"\n return web.DataReader(stock, service, start, end)\n\n\ndef parse_finance_frame(stock, start, end, service='google'):\n \"\"\"\n Return rows of dicts from a finance data frame for scraperwiki.sqlite.\n\n service can also be 'yahoo', start and end are datetimes.\n \"\"\"\n frame = get_stock(stock, start, end, service)\n rows = []\n for idx in range(len(frame)):\n current_row_as_dict = frame.ix[idx].to_dict()\n # have to convert dates because these are Pandas timestamps and\n # dumptruck doesn't support them\n current_row_as_dict['Date'] = frame.index[idx].to_datetime()\n current_row_as_dict['Stock'] = stock\n # horrible hack because data values are numpy.float64 and dumptruck\n # doesn't support them\n for key in current_row_as_dict:\n if isinstance(current_row_as_dict[key], numpy.float64):\n current_row_as_dict[key] = float(current_row_as_dict[key])\n rows.append(current_row_as_dict)\n return rows\n\n\ndef main():\n \"\"\"\n Dump stock data into scraperwiki.sqlite using pandas.io.data.\n \"\"\"\n # arbitrary start chosen\n start = datetime.datetime(2014, 3, 1)\n end = datetime.datetime.today()\n\n stock_list = ['TWTR', 'FB']\n rows = []\n for stock in stock_list:\n rows.extend(parse_finance_frame(stock, start, end))\n scraperwiki.sqlite.save(data=rows, unique_keys=['Stock', 'Date'])\n\nif __name__ == '__main__':\n main()\n\n## Instruction:\nUse pandas native saving by forcing date to not be index, and be string\n## Code After:\nimport datetime\nimport sqlite3\nimport pandas.io.data as web\nimport pandas.io.sql as sql\n\n\ndef get_stock(stock, start, end):\n \"\"\"\n Return data frame of Yahoo Finance data for stock.\n\n Takes start and end datetimes.\n \"\"\"\n return web.DataReader(stock, 'yahoo', start, end)\n\ndef scrape_stock(stock, start, end):\n sqlite_db.execute(\"drop table if exists {};\".format(stock))\n frame = (get_stock(stock, start, end))\n # make Date not an index so it appears in table\n frame = frame.reset_index()\n # force Date datetime to string\n frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat())\n sql.write_frame(frame, stock, sqlite_db)\n\ndef main():\n global sqlite_db\n sqlite_db = sqlite3.connect(\"scraperwiki.sqlite\")\n start = datetime.datetime(2014, 3, 1)\n end = datetime.datetime.today()\n for ticker in ['TWTR', 'FB']:\n scrape_stock(ticker, start, end)\n \nif __name__ == '__main__':\n main()\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nimport datetime\nimport sqlite3\nimport pandas.io.data as web\nimport pandas.io.sql as sql\n\n\n\n ... \n\n\n\ndef get_stock(stock, start, end):\n \"\"\"\n Return data frame of Yahoo Finance data for stock.\n\n Takes start and end datetimes.\n \"\"\"\n return web.DataReader(stock, 'yahoo', start, end)\n\ndef scrape_stock(stock, start, end):\n sqlite_db.execute(\"drop table if exists {};\".format(stock))\n frame = (get_stock(stock, start, end))\n # make Date not an index so it appears in table\n frame = frame.reset_index()\n # force Date datetime to string\n frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat())\n sql.write_frame(frame, stock, sqlite_db)\n\n\n\n ... \n\n\ndef main():\n global sqlite_db\n sqlite_db = sqlite3.connect(\"scraperwiki.sqlite\")\n start = datetime.datetime(2014, 3, 1)\n\n\n ... \n\n\n end = datetime.datetime.today()\n for ticker in ['TWTR', 'FB']:\n scrape_stock(ticker, start, end)\n \nif __name__ == '__main__':\n\n\n ... "}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":192,"numItemsPerPage":100,"numTotalItems":21467,"offset":19200,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1ODM3ODQwMSwic3ViIjoiL2RhdGFzZXRzL2tzZW5pYXN5Y2gvRWRpdFBhY2tGVC1hcHBseS1mdXp6eS1kaWZmcy1oZXVyaXN0aWNzX2NvbnRleHQtMSIsImV4cCI6MTc1ODM4MjAwMSwiaXNzIjoiaHR0cHM6Ly9odWdnaW5nZmFjZS5jbyJ9.Ybct9PnjXvhEENHN27TrpVVdNck7YLfQ0cn18_WTHAO3q7ZOPRPgNL4_SvckobXDIzYC48esHpuwxARZcLOjDA","displayUrls":true},"discussionsStats":{"closed":0,"open":1,"total":1},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43k
ndiff
stringlengths
52
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
fuzzy_diff
stringlengths
16
3.18k
535ac4c6eae416461e11f33c1a1ef67e92c73914
tests/test_exception_wrapping.py
tests/test_exception_wrapping.py
import safe def test_simple_exception(): class MockReponse(object): def json(self): return {'status': False, 'method': 'synchronize', 'module': 'cluster', 'error': {'message': 'Example error'}} exception = safe.library.raise_from_json(MockReponse()) assert str(exception) == 'Example error'
import safe class MockResponse(object): def __init__(self, data): self.data = data def json(self): return self.data def test_basic_exception(): error_message = 'Example error' response = MockResponse({ 'status': False, 'method': 'synchronize', 'module': 'cluster', 'error': {'message': error_message} }) exception = safe.library.raise_from_json(response) assert str(exception) == error_message def test_commit_failed_exception(): error_message = 'Default ipv4 gateway is not on eth0 subnet' response = MockResponse({ 'status': False, 'type': 'configuration', 'method': 'smartapply', 'module': 'nsc', 'error': { 'message': 'Apply configuration failed.', 'reason': [{ 'url': '/SAFe/sng_network_config/modify/network', 'obj_type': 'configuration', 'type': 'ERROR', 'description': error_message, 'module': 'network' }] } }) exception = safe.library.raise_from_json(response) assert isinstance(exception, safe.CommitFailed) assert str(exception) == 'Apply changes failed: ' + error_message assert len(exception.reasons) == 1 reason = exception.reasons[0] assert reason.obj == 'configuration' assert reason.module == 'network' assert reason.description == error_message
Add a commit failed test
Add a commit failed test
Python
mpl-2.0
sangoma/safepy2,leonardolang/safepy2
import safe - def test_simple_exception(): - class MockReponse(object): + class MockResponse(object): + def __init__(self, data): + self.data = data - def json(self): - return {'status': False, - 'method': 'synchronize', - 'module': 'cluster', - 'error': {'message': 'Example error'}} - exception = safe.library.raise_from_json(MockReponse()) - assert str(exception) == 'Example error' + def json(self): + return self.data + + def test_basic_exception(): + error_message = 'Example error' + response = MockResponse({ + 'status': False, + 'method': 'synchronize', + 'module': 'cluster', + 'error': {'message': error_message} + }) + + exception = safe.library.raise_from_json(response) + assert str(exception) == error_message + + + def test_commit_failed_exception(): + error_message = 'Default ipv4 gateway is not on eth0 subnet' + response = MockResponse({ + 'status': False, + 'type': 'configuration', + 'method': 'smartapply', + 'module': 'nsc', + 'error': { + 'message': 'Apply configuration failed.', + 'reason': [{ + 'url': '/SAFe/sng_network_config/modify/network', + 'obj_type': 'configuration', + 'type': 'ERROR', + 'description': error_message, + 'module': 'network' + }] + } + }) + + exception = safe.library.raise_from_json(response) + assert isinstance(exception, safe.CommitFailed) + assert str(exception) == 'Apply changes failed: ' + error_message + assert len(exception.reasons) == 1 + + reason = exception.reasons[0] + assert reason.obj == 'configuration' + assert reason.module == 'network' + assert reason.description == error_message +
Add a commit failed test
## Code Before: import safe def test_simple_exception(): class MockReponse(object): def json(self): return {'status': False, 'method': 'synchronize', 'module': 'cluster', 'error': {'message': 'Example error'}} exception = safe.library.raise_from_json(MockReponse()) assert str(exception) == 'Example error' ## Instruction: Add a commit failed test ## Code After: import safe class MockResponse(object): def __init__(self, data): self.data = data def json(self): return self.data def test_basic_exception(): error_message = 'Example error' response = MockResponse({ 'status': False, 'method': 'synchronize', 'module': 'cluster', 'error': {'message': error_message} }) exception = safe.library.raise_from_json(response) assert str(exception) == error_message def test_commit_failed_exception(): error_message = 'Default ipv4 gateway is not on eth0 subnet' response = MockResponse({ 'status': False, 'type': 'configuration', 'method': 'smartapply', 'module': 'nsc', 'error': { 'message': 'Apply configuration failed.', 'reason': [{ 'url': '/SAFe/sng_network_config/modify/network', 'obj_type': 'configuration', 'type': 'ERROR', 'description': error_message, 'module': 'network' }] } }) exception = safe.library.raise_from_json(response) assert isinstance(exception, safe.CommitFailed) assert str(exception) == 'Apply changes failed: ' + error_message assert len(exception.reasons) == 1 reason = exception.reasons[0] assert reason.obj == 'configuration' assert reason.module == 'network' assert reason.description == error_message
// ... existing code ... class MockResponse(object): def __init__(self, data): self.data = data def json(self): return self.data def test_basic_exception(): error_message = 'Example error' response = MockResponse({ 'status': False, 'method': 'synchronize', 'module': 'cluster', 'error': {'message': error_message} }) exception = safe.library.raise_from_json(response) assert str(exception) == error_message def test_commit_failed_exception(): error_message = 'Default ipv4 gateway is not on eth0 subnet' response = MockResponse({ 'status': False, 'type': 'configuration', 'method': 'smartapply', 'module': 'nsc', 'error': { 'message': 'Apply configuration failed.', 'reason': [{ 'url': '/SAFe/sng_network_config/modify/network', 'obj_type': 'configuration', 'type': 'ERROR', 'description': error_message, 'module': 'network' }] } }) exception = safe.library.raise_from_json(response) assert isinstance(exception, safe.CommitFailed) assert str(exception) == 'Apply changes failed: ' + error_message assert len(exception.reasons) == 1 reason = exception.reasons[0] assert reason.obj == 'configuration' assert reason.module == 'network' assert reason.description == error_message // ... rest of the code ...
099ff76e6b7ea10535fd85de1709a53baa9c9252
examples/install_german_voices.py
examples/install_german_voices.py
import subprocess import os pkgs = [ '"VOICEID:com.apple.speech.synthesis.voice.anna.premium_2" IN tags', '"VOICEID:com.apple.speech.synthesis.voice.steffi.premium_2" IN tags', '"VOICEID:com.apple.speech.synthesis.voice.yannick.premium_2" IN tags' ] for pkg in pkgs: subprocess.call(['/usr/bin/python', os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'predicate_installer.py'), pkg])
import subprocess import os import platform num = int(platform.release().split('.')[0])-10 # 13=>3: Mavericks, 12=>2: Mountain Lion, 11=>1: Lion if num <= 0: raise Exception("Voices are not available in OS X below 10.7") if num == 1: num = '' else: num = '_%d' % num pkgs = [ '"VOICEID:com.apple.speech.synthesis.voice.anna.premium%s" IN tags' % num, '"VOICEID:com.apple.speech.synthesis.voice.steffi.premium%s" IN tags' % num, '"VOICEID:com.apple.speech.synthesis.voice.yannick.premium%s" IN tags' % num ] for pkg in pkgs: subprocess.call(['/usr/bin/python', os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'predicate_installer.py'), pkg])
Make voices example work on 10.7-10.9
Make voices example work on 10.7-10.9
Python
mit
mkuron/PredicateInstaller
import subprocess import os + import platform + + num = int(platform.release().split('.')[0])-10 # 13=>3: Mavericks, 12=>2: Mountain Lion, 11=>1: Lion + if num <= 0: + raise Exception("Voices are not available in OS X below 10.7") + if num == 1: + num = '' + else: + num = '_%d' % num pkgs = [ - '"VOICEID:com.apple.speech.synthesis.voice.anna.premium_2" IN tags', + '"VOICEID:com.apple.speech.synthesis.voice.anna.premium%s" IN tags' % num, - '"VOICEID:com.apple.speech.synthesis.voice.steffi.premium_2" IN tags', + '"VOICEID:com.apple.speech.synthesis.voice.steffi.premium%s" IN tags' % num, - '"VOICEID:com.apple.speech.synthesis.voice.yannick.premium_2" IN tags' + '"VOICEID:com.apple.speech.synthesis.voice.yannick.premium%s" IN tags' % num ] for pkg in pkgs: subprocess.call(['/usr/bin/python', os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'predicate_installer.py'), pkg])
Make voices example work on 10.7-10.9
## Code Before: import subprocess import os pkgs = [ '"VOICEID:com.apple.speech.synthesis.voice.anna.premium_2" IN tags', '"VOICEID:com.apple.speech.synthesis.voice.steffi.premium_2" IN tags', '"VOICEID:com.apple.speech.synthesis.voice.yannick.premium_2" IN tags' ] for pkg in pkgs: subprocess.call(['/usr/bin/python', os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'predicate_installer.py'), pkg]) ## Instruction: Make voices example work on 10.7-10.9 ## Code After: import subprocess import os import platform num = int(platform.release().split('.')[0])-10 # 13=>3: Mavericks, 12=>2: Mountain Lion, 11=>1: Lion if num <= 0: raise Exception("Voices are not available in OS X below 10.7") if num == 1: num = '' else: num = '_%d' % num pkgs = [ '"VOICEID:com.apple.speech.synthesis.voice.anna.premium%s" IN tags' % num, '"VOICEID:com.apple.speech.synthesis.voice.steffi.premium%s" IN tags' % num, '"VOICEID:com.apple.speech.synthesis.voice.yannick.premium%s" IN tags' % num ] for pkg in pkgs: subprocess.call(['/usr/bin/python', os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'predicate_installer.py'), pkg])
... import os import platform num = int(platform.release().split('.')[0])-10 # 13=>3: Mavericks, 12=>2: Mountain Lion, 11=>1: Lion if num <= 0: raise Exception("Voices are not available in OS X below 10.7") if num == 1: num = '' else: num = '_%d' % num ... pkgs = [ '"VOICEID:com.apple.speech.synthesis.voice.anna.premium%s" IN tags' % num, '"VOICEID:com.apple.speech.synthesis.voice.steffi.premium%s" IN tags' % num, '"VOICEID:com.apple.speech.synthesis.voice.yannick.premium%s" IN tags' % num ] ...
a08483b5fc55556b46c08e988ac297b1dffaed48
app/utils/utilities.py
app/utils/utilities.py
from re import search from flask import g from flask_restplus import abort from flask_httpauth import HTTPBasicAuth from app.models.user import User from instance.config import Config auth = HTTPBasicAuth() def validate_email(email): ''' Method to check that a valid email is provided ''' email_re = r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)" return True if search(email_re, email) else False @auth.verify_token def verify_token(token=None): ''' Method to verify token ''' token = request.headers.get('x-access-token') user_id = User.verify_authentication_token(token) if user_id: g.current_user = User.query.filter_by(id=user.id).first() return True return False
from re import search from flask import g, request from flask_httpauth import HTTPTokenAuth from app.models.user import User auth = HTTPTokenAuth(scheme='Token') def validate_email(email): ''' Method to check that a valid email is provided ''' email_re = r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)" return True if search(email_re, email) else False @auth.verify_token def verify_token(token=None): ''' Method to verify token ''' token = request.headers.get('x-access-token') user_id = User.verify_authentication_token(token) if user_id: g.current_user = User.query.filter_by(id=user_id).first() return True return False
Implement HTTPTokenAuth Store user data in global
Implement HTTPTokenAuth Store user data in global
Python
mit
Elbertbiggs360/buckelist-api
from re import search - from flask import g + from flask import g, request - from flask_restplus import abort - from flask_httpauth import HTTPBasicAuth + from flask_httpauth import HTTPTokenAuth from app.models.user import User - from instance.config import Config - auth = HTTPBasicAuth() + auth = HTTPTokenAuth(scheme='Token') def validate_email(email): - ''' Method to check that a valid email is provided ''' + ''' Method to check that a valid email is provided ''' email_re = r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)" return True if search(email_re, email) else False @auth.verify_token def verify_token(token=None): ''' Method to verify token ''' token = request.headers.get('x-access-token') user_id = User.verify_authentication_token(token) if user_id: - g.current_user = User.query.filter_by(id=user.id).first() + g.current_user = User.query.filter_by(id=user_id).first() return True return False
Implement HTTPTokenAuth Store user data in global
## Code Before: from re import search from flask import g from flask_restplus import abort from flask_httpauth import HTTPBasicAuth from app.models.user import User from instance.config import Config auth = HTTPBasicAuth() def validate_email(email): ''' Method to check that a valid email is provided ''' email_re = r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)" return True if search(email_re, email) else False @auth.verify_token def verify_token(token=None): ''' Method to verify token ''' token = request.headers.get('x-access-token') user_id = User.verify_authentication_token(token) if user_id: g.current_user = User.query.filter_by(id=user.id).first() return True return False ## Instruction: Implement HTTPTokenAuth Store user data in global ## Code After: from re import search from flask import g, request from flask_httpauth import HTTPTokenAuth from app.models.user import User auth = HTTPTokenAuth(scheme='Token') def validate_email(email): ''' Method to check that a valid email is provided ''' email_re = r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)" return True if search(email_re, email) else False @auth.verify_token def verify_token(token=None): ''' Method to verify token ''' token = request.headers.get('x-access-token') user_id = User.verify_authentication_token(token) if user_id: g.current_user = User.query.filter_by(id=user_id).first() return True return False
# ... existing code ... from re import search from flask import g, request from flask_httpauth import HTTPTokenAuth from app.models.user import User auth = HTTPTokenAuth(scheme='Token') # ... modified code ... def validate_email(email): ''' Method to check that a valid email is provided ''' email_re = r"(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)" ... if user_id: g.current_user = User.query.filter_by(id=user_id).first() return True # ... rest of the code ...
7aaef53e5547abfca8eb64ceb4ac477a14b79536
tensorflow_datasets/core/visualization/__init__.py
tensorflow_datasets/core/visualization/__init__.py
"""Visualizer utils.""" from tensorflow_datasets.core.visualization.image_visualizer import ImageGridVisualizer from tensorflow_datasets.core.visualization.show_examples import show_examples from tensorflow_datasets.core.visualization.show_examples import show_statistics from tensorflow_datasets.core.visualization.visualizer import Visualizer __all__ = [ "ImageGridVisualizer", "show_examples", "Visualizer", ]
"""Visualizer utils.""" from tensorflow_datasets.core.visualization.image_visualizer import ImageGridVisualizer from tensorflow_datasets.core.visualization.show_examples import show_examples from tensorflow_datasets.core.visualization.show_examples import show_statistics from tensorflow_datasets.core.visualization.visualizer import Visualizer __all__ = [ "ImageGridVisualizer", "show_examples", "show_statistics", "Visualizer", ]
Add show_statistics to public API
Add show_statistics to public API PiperOrigin-RevId: 322842576
Python
apache-2.0
tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets
"""Visualizer utils.""" from tensorflow_datasets.core.visualization.image_visualizer import ImageGridVisualizer from tensorflow_datasets.core.visualization.show_examples import show_examples from tensorflow_datasets.core.visualization.show_examples import show_statistics from tensorflow_datasets.core.visualization.visualizer import Visualizer __all__ = [ "ImageGridVisualizer", "show_examples", + "show_statistics", "Visualizer", ]
Add show_statistics to public API
## Code Before: """Visualizer utils.""" from tensorflow_datasets.core.visualization.image_visualizer import ImageGridVisualizer from tensorflow_datasets.core.visualization.show_examples import show_examples from tensorflow_datasets.core.visualization.show_examples import show_statistics from tensorflow_datasets.core.visualization.visualizer import Visualizer __all__ = [ "ImageGridVisualizer", "show_examples", "Visualizer", ] ## Instruction: Add show_statistics to public API ## Code After: """Visualizer utils.""" from tensorflow_datasets.core.visualization.image_visualizer import ImageGridVisualizer from tensorflow_datasets.core.visualization.show_examples import show_examples from tensorflow_datasets.core.visualization.show_examples import show_statistics from tensorflow_datasets.core.visualization.visualizer import Visualizer __all__ = [ "ImageGridVisualizer", "show_examples", "show_statistics", "Visualizer", ]
// ... existing code ... "show_examples", "show_statistics", "Visualizer", // ... rest of the code ...
5b877d2c42a44fb4ebd1c72f89a595ac5c095e07
wsgi/bufsm/mainapp/urls.py
wsgi/bufsm/mainapp/urls.py
from django.conf.urls import url from . import views urlpatterns = [ url(r'^linha/(?P<idLinha>[0-9]+)$', views.getLinha), url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha), url(r'^test$', views.testLinha), ]
from django.conf.urls import url from . import views urlpatterns = [ url(r'^linha/(?P<idLinha>[0-9]+)$', views.testLinha), url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha), ]
Test in the original URL
Test in the original URL
Python
mit
bufsm/bufsm,bufsm/bufsm,bufsm/bufsm,bufsm/bufsm,bufsm/bufsm
from django.conf.urls import url from . import views urlpatterns = [ - url(r'^linha/(?P<idLinha>[0-9]+)$', views.getLinha), + url(r'^linha/(?P<idLinha>[0-9]+)$', views.testLinha), url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha), - url(r'^test$', views.testLinha), ]
Test in the original URL
## Code Before: from django.conf.urls import url from . import views urlpatterns = [ url(r'^linha/(?P<idLinha>[0-9]+)$', views.getLinha), url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha), url(r'^test$', views.testLinha), ] ## Instruction: Test in the original URL ## Code After: from django.conf.urls import url from . import views urlpatterns = [ url(r'^linha/(?P<idLinha>[0-9]+)$', views.testLinha), url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha), ]
// ... existing code ... urlpatterns = [ url(r'^linha/(?P<idLinha>[0-9]+)$', views.testLinha), url(r'^linha/(?P<idLinha>[0-9]+)/(?P<token>.+)/(?P<lat>.+)/(?P<lng>.+)$', views.writeLinha), ] // ... rest of the code ...
4de72b4bd349ebf16c0046c4ed9034914c03ffb5
cea/interfaces/dashboard/api/utils.py
cea/interfaces/dashboard/api/utils.py
from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p): params = {'name': p.name, 'type': p.typename, 'value': p.get(), 'help': p.help} if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params
from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p: cea.config.Parameter): params = {'name': p.name, 'type': p.typename, 'help': p.help} try: params["value"] = p.get() except cea.ConfigError as e: print(e) params["value"] = "" if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params
Fix `weather_helper` bug when creating new scenario
Fix `weather_helper` bug when creating new scenario
Python
mit
architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst
from flask import current_app import cea.config import cea.inputlocator - def deconstruct_parameters(p): + def deconstruct_parameters(p: cea.config.Parameter): - params = {'name': p.name, 'type': p.typename, + params = {'name': p.name, 'type': p.typename, 'help': p.help} - 'value': p.get(), 'help': p.help} + try: + params["value"] = p.get() + except cea.ConfigError as e: + print(e) + params["value"] = "" if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params
Fix `weather_helper` bug when creating new scenario
## Code Before: from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p): params = {'name': p.name, 'type': p.typename, 'value': p.get(), 'help': p.help} if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params ## Instruction: Fix `weather_helper` bug when creating new scenario ## Code After: from flask import current_app import cea.config import cea.inputlocator def deconstruct_parameters(p: cea.config.Parameter): params = {'name': p.name, 'type': p.typename, 'help': p.help} try: params["value"] = p.get() except cea.ConfigError as e: print(e) params["value"] = "" if isinstance(p, cea.config.ChoiceParameter): params['choices'] = p._choices if p.typename == 'WeatherPathParameter': config = current_app.cea_config locator = cea.inputlocator.InputLocator(config.scenario) params['choices'] = {wn: locator.get_weather( wn) for wn in locator.get_weather_names()} elif p.typename == 'DatabasePathParameter': params['choices'] = p._choices return params
... def deconstruct_parameters(p: cea.config.Parameter): params = {'name': p.name, 'type': p.typename, 'help': p.help} try: params["value"] = p.get() except cea.ConfigError as e: print(e) params["value"] = "" if isinstance(p, cea.config.ChoiceParameter): ...
fa9f4ca0bae63b17937c676800fcf80889c70030
cura/CuraSplashScreen.py
cura/CuraSplashScreen.py
from PyQt5.QtCore import Qt from PyQt5.QtGui import QPixmap, QColor, QFont from PyQt5.QtWidgets import QSplashScreen from UM.Resources import Resources from UM.Application import Application class CuraSplashScreen(QSplashScreen): def __init__(self): super().__init__() self.setPixmap(QPixmap(Resources.getPath(Resources.Images, "cura.png"))) def drawContents(self, painter): painter.save() painter.setPen(QColor(0, 0, 0, 255)) version = Application.getInstance().getVersion().split("-") painter.setFont(QFont("Proxima Nova Rg", 20)) painter.drawText(0, 0, 203, 230, Qt.AlignRight | Qt.AlignBottom, version[0]) if len(version) > 1: painter.setFont(QFont("Proxima Nova Rg", 12)) painter.drawText(0, 0, 203, 255, Qt.AlignRight | Qt.AlignBottom, version[1]) painter.restore() super().drawContents(painter)
from PyQt5.QtCore import Qt, QCoreApplication from PyQt5.QtGui import QPixmap, QColor, QFont, QFontMetrics from PyQt5.QtWidgets import QSplashScreen from UM.Resources import Resources from UM.Application import Application class CuraSplashScreen(QSplashScreen): def __init__(self): super().__init__() self._scale = round(QFontMetrics(QCoreApplication.instance().font()).ascent() / 12) splash_image = QPixmap(Resources.getPath(Resources.Images, "cura.png")) self.setPixmap(splash_image.scaled(splash_image.size() * self._scale)) def drawContents(self, painter): painter.save() painter.setPen(QColor(0, 0, 0, 255)) version = Application.getInstance().getVersion().split("-") painter.setFont(QFont("Proxima Nova Rg", 20 )) painter.drawText(0, 0, 330 * self._scale, 230 * self._scale, Qt.AlignHCenter | Qt.AlignBottom, version[0]) if len(version) > 1: painter.setFont(QFont("Proxima Nova Rg", 12 )) painter.drawText(0, 0, 330 * self._scale, 255 * self._scale, Qt.AlignHCenter | Qt.AlignBottom, version[1]) painter.restore() super().drawContents(painter)
Fix splashscreen size on HiDPI (windows) screens
Fix splashscreen size on HiDPI (windows) screens
Python
agpl-3.0
fieldOfView/Cura,ynotstartups/Wanhao,ynotstartups/Wanhao,Curahelper/Cura,totalretribution/Cura,Curahelper/Cura,totalretribution/Cura,senttech/Cura,fieldOfView/Cura,hmflash/Cura,senttech/Cura,hmflash/Cura
- from PyQt5.QtCore import Qt + from PyQt5.QtCore import Qt, QCoreApplication - from PyQt5.QtGui import QPixmap, QColor, QFont + from PyQt5.QtGui import QPixmap, QColor, QFont, QFontMetrics from PyQt5.QtWidgets import QSplashScreen from UM.Resources import Resources from UM.Application import Application class CuraSplashScreen(QSplashScreen): def __init__(self): super().__init__() + self._scale = round(QFontMetrics(QCoreApplication.instance().font()).ascent() / 12) + - self.setPixmap(QPixmap(Resources.getPath(Resources.Images, "cura.png"))) + splash_image = QPixmap(Resources.getPath(Resources.Images, "cura.png")) + self.setPixmap(splash_image.scaled(splash_image.size() * self._scale)) def drawContents(self, painter): painter.save() painter.setPen(QColor(0, 0, 0, 255)) version = Application.getInstance().getVersion().split("-") - painter.setFont(QFont("Proxima Nova Rg", 20)) + painter.setFont(QFont("Proxima Nova Rg", 20 )) - painter.drawText(0, 0, 203, 230, Qt.AlignRight | Qt.AlignBottom, version[0]) + painter.drawText(0, 0, 330 * self._scale, 230 * self._scale, Qt.AlignHCenter | Qt.AlignBottom, version[0]) if len(version) > 1: - painter.setFont(QFont("Proxima Nova Rg", 12)) + painter.setFont(QFont("Proxima Nova Rg", 12 )) - painter.drawText(0, 0, 203, 255, Qt.AlignRight | Qt.AlignBottom, version[1]) + painter.drawText(0, 0, 330 * self._scale, 255 * self._scale, Qt.AlignHCenter | Qt.AlignBottom, version[1]) painter.restore() super().drawContents(painter)
Fix splashscreen size on HiDPI (windows) screens
## Code Before: from PyQt5.QtCore import Qt from PyQt5.QtGui import QPixmap, QColor, QFont from PyQt5.QtWidgets import QSplashScreen from UM.Resources import Resources from UM.Application import Application class CuraSplashScreen(QSplashScreen): def __init__(self): super().__init__() self.setPixmap(QPixmap(Resources.getPath(Resources.Images, "cura.png"))) def drawContents(self, painter): painter.save() painter.setPen(QColor(0, 0, 0, 255)) version = Application.getInstance().getVersion().split("-") painter.setFont(QFont("Proxima Nova Rg", 20)) painter.drawText(0, 0, 203, 230, Qt.AlignRight | Qt.AlignBottom, version[0]) if len(version) > 1: painter.setFont(QFont("Proxima Nova Rg", 12)) painter.drawText(0, 0, 203, 255, Qt.AlignRight | Qt.AlignBottom, version[1]) painter.restore() super().drawContents(painter) ## Instruction: Fix splashscreen size on HiDPI (windows) screens ## Code After: from PyQt5.QtCore import Qt, QCoreApplication from PyQt5.QtGui import QPixmap, QColor, QFont, QFontMetrics from PyQt5.QtWidgets import QSplashScreen from UM.Resources import Resources from UM.Application import Application class CuraSplashScreen(QSplashScreen): def __init__(self): super().__init__() self._scale = round(QFontMetrics(QCoreApplication.instance().font()).ascent() / 12) splash_image = QPixmap(Resources.getPath(Resources.Images, "cura.png")) self.setPixmap(splash_image.scaled(splash_image.size() * self._scale)) def drawContents(self, painter): painter.save() painter.setPen(QColor(0, 0, 0, 255)) version = Application.getInstance().getVersion().split("-") painter.setFont(QFont("Proxima Nova Rg", 20 )) painter.drawText(0, 0, 330 * self._scale, 230 * self._scale, Qt.AlignHCenter | Qt.AlignBottom, version[0]) if len(version) > 1: painter.setFont(QFont("Proxima Nova Rg", 12 )) painter.drawText(0, 0, 330 * self._scale, 255 * self._scale, Qt.AlignHCenter | Qt.AlignBottom, version[1]) painter.restore() super().drawContents(painter)
// ... existing code ... from PyQt5.QtCore import Qt, QCoreApplication from PyQt5.QtGui import QPixmap, QColor, QFont, QFontMetrics from PyQt5.QtWidgets import QSplashScreen // ... modified code ... super().__init__() self._scale = round(QFontMetrics(QCoreApplication.instance().font()).ascent() / 12) splash_image = QPixmap(Resources.getPath(Resources.Images, "cura.png")) self.setPixmap(splash_image.scaled(splash_image.size() * self._scale)) ... painter.setFont(QFont("Proxima Nova Rg", 20 )) painter.drawText(0, 0, 330 * self._scale, 230 * self._scale, Qt.AlignHCenter | Qt.AlignBottom, version[0]) if len(version) > 1: painter.setFont(QFont("Proxima Nova Rg", 12 )) painter.drawText(0, 0, 330 * self._scale, 255 * self._scale, Qt.AlignHCenter | Qt.AlignBottom, version[1]) // ... rest of the code ...
38c2f86e8784530efc0234851d3bb9ebbfef58f5
froide/account/api_views.py
froide/account/api_views.py
from rest_framework import serializers, views, permissions, response from oauth2_provider.contrib.rest_framework import TokenHasScope from .models import User class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ('id',) def to_representation(self, obj): default = super(UserSerializer, self).to_representation(obj) if obj.is_superuser: default['is_superuser'] = True if obj.is_staff: default['is_staff'] = True return default class UserDetailSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = UserSerializer.Meta.fields + ('first_name', 'last_name',) class UserEmailSerializer(UserSerializer): class Meta: model = User fields = UserDetailSerializer.Meta.fields + ('email',) class ProfileView(views.APIView): permission_classes = [permissions.IsAuthenticated, TokenHasScope] required_scopes = ['read:user'] def get(self, request, format=None): token = request.auth user = request.user if token.is_valid(['read:email']): serializer = UserEmailSerializer(user) elif token.is_valid(['read:profile']): serializer = UserDetailSerializer(user) else: serializer = UserSerializer(user) return response.Response(serializer.data)
from rest_framework import serializers, views, permissions, response from .models import User class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ('id', 'private') def to_representation(self, obj): default = super(UserSerializer, self).to_representation(obj) if obj.is_superuser: default['is_superuser'] = True if obj.is_staff: default['is_staff'] = True return default class UserDetailSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = UserSerializer.Meta.fields + ('first_name', 'last_name',) class UserEmailSerializer(UserSerializer): class Meta: model = User fields = UserDetailSerializer.Meta.fields + ('email',) class UserFullSerializer(UserSerializer): class Meta: model = User fields = UserEmailSerializer.Meta.fields + ('address',) class ProfileView(views.APIView): permission_classes = [permissions.IsAuthenticated] def has_permission(self, request, view): token = request.auth if token and not token.is_valid(['read:user']): return False return super(ProfileView, self).has_permission( request, view ) def get(self, request, format=None): token = request.auth user = request.user if token: if token.is_valid(['read:email']): serializer = UserEmailSerializer(user) elif token.is_valid(['read:profile']): serializer = UserDetailSerializer(user) else: serializer = UserSerializer(user) else: # if token is None, user is currently logged in user serializer = UserFullSerializer(user) return response.Response(serializer.data)
Make logged in user endpoint available w/o token
Make logged in user endpoint available w/o token
Python
mit
stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide
from rest_framework import serializers, views, permissions, response - - from oauth2_provider.contrib.rest_framework import TokenHasScope from .models import User class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User - fields = ('id',) + fields = ('id', 'private') def to_representation(self, obj): default = super(UserSerializer, self).to_representation(obj) if obj.is_superuser: default['is_superuser'] = True if obj.is_staff: default['is_staff'] = True return default class UserDetailSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = UserSerializer.Meta.fields + ('first_name', 'last_name',) class UserEmailSerializer(UserSerializer): class Meta: model = User fields = UserDetailSerializer.Meta.fields + ('email',) + class UserFullSerializer(UserSerializer): + class Meta: + model = User + fields = UserEmailSerializer.Meta.fields + ('address',) + + class ProfileView(views.APIView): - permission_classes = [permissions.IsAuthenticated, TokenHasScope] + permission_classes = [permissions.IsAuthenticated] - required_scopes = ['read:user'] + + def has_permission(self, request, view): + token = request.auth + if token and not token.is_valid(['read:user']): + return False + + return super(ProfileView, self).has_permission( + request, view + ) def get(self, request, format=None): token = request.auth user = request.user + if token: - if token.is_valid(['read:email']): + if token.is_valid(['read:email']): - serializer = UserEmailSerializer(user) + serializer = UserEmailSerializer(user) - elif token.is_valid(['read:profile']): + elif token.is_valid(['read:profile']): - serializer = UserDetailSerializer(user) + serializer = UserDetailSerializer(user) + else: + serializer = UserSerializer(user) else: + # if token is None, user is currently logged in user - serializer = UserSerializer(user) + serializer = UserFullSerializer(user) return response.Response(serializer.data)
Make logged in user endpoint available w/o token
## Code Before: from rest_framework import serializers, views, permissions, response from oauth2_provider.contrib.rest_framework import TokenHasScope from .models import User class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ('id',) def to_representation(self, obj): default = super(UserSerializer, self).to_representation(obj) if obj.is_superuser: default['is_superuser'] = True if obj.is_staff: default['is_staff'] = True return default class UserDetailSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = UserSerializer.Meta.fields + ('first_name', 'last_name',) class UserEmailSerializer(UserSerializer): class Meta: model = User fields = UserDetailSerializer.Meta.fields + ('email',) class ProfileView(views.APIView): permission_classes = [permissions.IsAuthenticated, TokenHasScope] required_scopes = ['read:user'] def get(self, request, format=None): token = request.auth user = request.user if token.is_valid(['read:email']): serializer = UserEmailSerializer(user) elif token.is_valid(['read:profile']): serializer = UserDetailSerializer(user) else: serializer = UserSerializer(user) return response.Response(serializer.data) ## Instruction: Make logged in user endpoint available w/o token ## Code After: from rest_framework import serializers, views, permissions, response from .models import User class UserSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = ('id', 'private') def to_representation(self, obj): default = super(UserSerializer, self).to_representation(obj) if obj.is_superuser: default['is_superuser'] = True if obj.is_staff: default['is_staff'] = True return default class UserDetailSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = User fields = UserSerializer.Meta.fields + ('first_name', 'last_name',) class UserEmailSerializer(UserSerializer): class Meta: model = User fields = UserDetailSerializer.Meta.fields + ('email',) class UserFullSerializer(UserSerializer): class Meta: model = User fields = UserEmailSerializer.Meta.fields + ('address',) class ProfileView(views.APIView): permission_classes = [permissions.IsAuthenticated] def has_permission(self, request, view): token = request.auth if token and not token.is_valid(['read:user']): return False return super(ProfileView, self).has_permission( request, view ) def get(self, request, format=None): token = request.auth user = request.user if token: if token.is_valid(['read:email']): serializer = UserEmailSerializer(user) elif token.is_valid(['read:profile']): serializer = UserDetailSerializer(user) else: serializer = UserSerializer(user) else: # if token is None, user is currently logged in user serializer = UserFullSerializer(user) return response.Response(serializer.data)
# ... existing code ... from rest_framework import serializers, views, permissions, response # ... modified code ... model = User fields = ('id', 'private') ... class UserFullSerializer(UserSerializer): class Meta: model = User fields = UserEmailSerializer.Meta.fields + ('address',) class ProfileView(views.APIView): permission_classes = [permissions.IsAuthenticated] def has_permission(self, request, view): token = request.auth if token and not token.is_valid(['read:user']): return False return super(ProfileView, self).has_permission( request, view ) ... user = request.user if token: if token.is_valid(['read:email']): serializer = UserEmailSerializer(user) elif token.is_valid(['read:profile']): serializer = UserDetailSerializer(user) else: serializer = UserSerializer(user) else: # if token is None, user is currently logged in user serializer = UserFullSerializer(user) return response.Response(serializer.data) # ... rest of the code ...
0668b59d8ec73e80976928706f96922605fe4f67
tsserver/models.py
tsserver/models.py
from tsserver import db from tsserver.dtutils import datetime_to_str class Telemetry(db.Model): """ All the data that is going to be obtained in regular time intervals (every second or so). """ id = db.Column(db.Integer, primary_key=True) timestamp = db.Column(db.DateTime) temperature = db.Column(db.Float) pressure = db.Column(db.Float) def __init__(self, timestamp, temperature, pressure): self.timestamp = timestamp self.temperature = temperature self.pressure = pressure def as_dict(self): return {'timestamp': datetime_to_str(self.timestamp), 'temperature': self.temperature, 'pressure': self.pressure}
from tsserver import db from tsserver.dtutils import datetime_to_str class Telemetry(db.Model): """ All the data that is going to be obtained in regular time intervals (every second or so). """ timestamp = db.Column(db.DateTime, primary_key=True) temperature = db.Column(db.Float) pressure = db.Column(db.Float) def __init__(self, timestamp, temperature, pressure): self.timestamp = timestamp self.temperature = temperature self.pressure = pressure def as_dict(self): return {'timestamp': datetime_to_str(self.timestamp), 'temperature': self.temperature, 'pressure': self.pressure}
Remove integer ID in Telemetry model
Remove integer ID in Telemetry model
Python
mit
m4tx/techswarm-server
from tsserver import db from tsserver.dtutils import datetime_to_str class Telemetry(db.Model): """ All the data that is going to be obtained in regular time intervals (every second or so). """ - id = db.Column(db.Integer, primary_key=True) + timestamp = db.Column(db.DateTime, primary_key=True) - timestamp = db.Column(db.DateTime) temperature = db.Column(db.Float) pressure = db.Column(db.Float) def __init__(self, timestamp, temperature, pressure): self.timestamp = timestamp self.temperature = temperature self.pressure = pressure def as_dict(self): return {'timestamp': datetime_to_str(self.timestamp), 'temperature': self.temperature, 'pressure': self.pressure}
Remove integer ID in Telemetry model
## Code Before: from tsserver import db from tsserver.dtutils import datetime_to_str class Telemetry(db.Model): """ All the data that is going to be obtained in regular time intervals (every second or so). """ id = db.Column(db.Integer, primary_key=True) timestamp = db.Column(db.DateTime) temperature = db.Column(db.Float) pressure = db.Column(db.Float) def __init__(self, timestamp, temperature, pressure): self.timestamp = timestamp self.temperature = temperature self.pressure = pressure def as_dict(self): return {'timestamp': datetime_to_str(self.timestamp), 'temperature': self.temperature, 'pressure': self.pressure} ## Instruction: Remove integer ID in Telemetry model ## Code After: from tsserver import db from tsserver.dtutils import datetime_to_str class Telemetry(db.Model): """ All the data that is going to be obtained in regular time intervals (every second or so). """ timestamp = db.Column(db.DateTime, primary_key=True) temperature = db.Column(db.Float) pressure = db.Column(db.Float) def __init__(self, timestamp, temperature, pressure): self.timestamp = timestamp self.temperature = temperature self.pressure = pressure def as_dict(self): return {'timestamp': datetime_to_str(self.timestamp), 'temperature': self.temperature, 'pressure': self.pressure}
// ... existing code ... timestamp = db.Column(db.DateTime, primary_key=True) temperature = db.Column(db.Float) // ... rest of the code ...
15964c974220c88a1b2fbca353d4a11b180e2bd8
_launch.py
_launch.py
from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function) from dragonglue.command import send_command grammar = Grammar("launch") applications = { 'sublime': 'w-s', 'pycharm': 'w-d', 'chrome': 'w-f', 'logs': 'w-j', 'SQL': 'w-k', 'IPython': 'w-l', 'shell': 'w-semicolon', 'terminal': 'w-a', # 'spotify': 'spotify /home/dan/bin/spotify', } # aliases applications['charm'] = applications['pycharm'] applications['termie'] = applications['terminal'] def Command(cmd): def ex(application=''): # print 'execute', cmd + application send_command(cmd + application) return Function(ex) launch_rule = MappingRule( name="launch", mapping={ 'Do run': Key('w-x'), 'get <application>': Key('%(application)s'), # 're-browse': Key('w-F'), 'voice sync': Command('subl --command voice_sync'), '(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'), }, extras=[ Choice('application', applications) ] ) grammar.add_rule(launch_rule) grammar.load() def unload(): global grammar if grammar: grammar.unload() grammar = None
from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function) from dragonglue.command import send_command, Command grammar = Grammar("launch") applications = { 'sublime': 'w-s', 'pycharm': 'w-d', 'chrome': 'w-f', 'logs': 'w-j', 'SQL': 'w-k', 'IPython': 'w-l', 'shell': 'w-semicolon', 'terminal': 'w-a', # 'spotify': 'spotify /home/dan/bin/spotify', } # aliases applications['charm'] = applications['pycharm'] applications['termie'] = applications['terminal'] launch_rule = MappingRule( name="launch", mapping={ 'Do run': Key('w-x'), 'get <application>': Key('%(application)s'), # 're-browse': Key('w-F'), 'voice sync': Command('subl --command voice_sync'), '(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'), }, extras=[ Choice('application', applications) ] ) grammar.add_rule(launch_rule) grammar.load() def unload(): global grammar if grammar: grammar.unload() grammar = None
Refactor Command action to dragonglue.command
Refactor Command action to dragonglue.command
Python
mit
drocco007/vox_commands
from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function) - from dragonglue.command import send_command + from dragonglue.command import send_command, Command grammar = Grammar("launch") applications = { 'sublime': 'w-s', 'pycharm': 'w-d', 'chrome': 'w-f', 'logs': 'w-j', 'SQL': 'w-k', 'IPython': 'w-l', 'shell': 'w-semicolon', 'terminal': 'w-a', # 'spotify': 'spotify /home/dan/bin/spotify', } # aliases applications['charm'] = applications['pycharm'] applications['termie'] = applications['terminal'] - - - def Command(cmd): - def ex(application=''): - # print 'execute', cmd + application - send_command(cmd + application) - - return Function(ex) launch_rule = MappingRule( name="launch", mapping={ 'Do run': Key('w-x'), 'get <application>': Key('%(application)s'), # 're-browse': Key('w-F'), 'voice sync': Command('subl --command voice_sync'), '(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'), }, extras=[ Choice('application', applications) ] ) grammar.add_rule(launch_rule) grammar.load() def unload(): global grammar if grammar: grammar.unload() grammar = None
Refactor Command action to dragonglue.command
## Code Before: from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function) from dragonglue.command import send_command grammar = Grammar("launch") applications = { 'sublime': 'w-s', 'pycharm': 'w-d', 'chrome': 'w-f', 'logs': 'w-j', 'SQL': 'w-k', 'IPython': 'w-l', 'shell': 'w-semicolon', 'terminal': 'w-a', # 'spotify': 'spotify /home/dan/bin/spotify', } # aliases applications['charm'] = applications['pycharm'] applications['termie'] = applications['terminal'] def Command(cmd): def ex(application=''): # print 'execute', cmd + application send_command(cmd + application) return Function(ex) launch_rule = MappingRule( name="launch", mapping={ 'Do run': Key('w-x'), 'get <application>': Key('%(application)s'), # 're-browse': Key('w-F'), 'voice sync': Command('subl --command voice_sync'), '(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'), }, extras=[ Choice('application', applications) ] ) grammar.add_rule(launch_rule) grammar.load() def unload(): global grammar if grammar: grammar.unload() grammar = None ## Instruction: Refactor Command action to dragonglue.command ## Code After: from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function) from dragonglue.command import send_command, Command grammar = Grammar("launch") applications = { 'sublime': 'w-s', 'pycharm': 'w-d', 'chrome': 'w-f', 'logs': 'w-j', 'SQL': 'w-k', 'IPython': 'w-l', 'shell': 'w-semicolon', 'terminal': 'w-a', # 'spotify': 'spotify /home/dan/bin/spotify', } # aliases applications['charm'] = applications['pycharm'] applications['termie'] = applications['terminal'] launch_rule = MappingRule( name="launch", mapping={ 'Do run': Key('w-x'), 'get <application>': Key('%(application)s'), # 're-browse': Key('w-F'), 'voice sync': Command('subl --command voice_sync'), '(touch | refresh) multi-edit': Command('touch /home/drocco/source/voice/natlink/commands/_multiedit.py'), }, extras=[ Choice('application', applications) ] ) grammar.add_rule(launch_rule) grammar.load() def unload(): global grammar if grammar: grammar.unload() grammar = None
// ... existing code ... from dragonfly import (Grammar, MappingRule, Choice, Text, Key, Function) from dragonglue.command import send_command, Command // ... modified code ... applications['termie'] = applications['terminal'] // ... rest of the code ...
3eaf0ea514b0f78906af7e614079f3a90624bcc7
estimate.py
estimate.py
from sys import stdin def estimateConf(conf): """Estimate configuration from a string.""" confElements = [int(x) for x in conf.split(sep=" ")] disk = confElements[0] print(disk) procRates = confElements[1:] print(procRates) def estimateConfsFromInput(): """Parse and estimate configurations from stdin.""" for line in stdin: confs = line.splitlines() for conf in confs: estimateConf(conf) if __name__ == "__main__": estimateConfsFromInput()
from sys import stdin def calcExhaustion(disk, procRates): """Calculate how many seconds before the disk is filled. procRates lists the rates at which each process fills 1 byte of disk space.""" print(disk) print(procRates) def estimateConf(conf): """Estimate configuration from a string.""" confElements = [int(x) for x in conf.split(sep=" ")] disk = confElements[0] procRates = confElements[1:] eta = calcExhaustion(disk, procRates); def estimateConfsFromInput(): """Parse and estimate configurations from stdin.""" for line in stdin: confs = line.splitlines() for conf in confs: estimateConf(conf) if __name__ == "__main__": estimateConfsFromInput()
Create fn for calculating exhaustion
Create fn for calculating exhaustion
Python
mit
MattHeard/EstimateDiskExhaustion
from sys import stdin + def calcExhaustion(disk, procRates): + """Calculate how many seconds before the disk is filled. + + procRates lists the rates at which each process fills 1 byte of disk + space.""" + print(disk) + print(procRates) def estimateConf(conf): """Estimate configuration from a string.""" confElements = [int(x) for x in conf.split(sep=" ")] disk = confElements[0] - print(disk) procRates = confElements[1:] - print(procRates) + eta = calcExhaustion(disk, procRates); def estimateConfsFromInput(): """Parse and estimate configurations from stdin.""" for line in stdin: confs = line.splitlines() for conf in confs: estimateConf(conf) if __name__ == "__main__": estimateConfsFromInput()
Create fn for calculating exhaustion
## Code Before: from sys import stdin def estimateConf(conf): """Estimate configuration from a string.""" confElements = [int(x) for x in conf.split(sep=" ")] disk = confElements[0] print(disk) procRates = confElements[1:] print(procRates) def estimateConfsFromInput(): """Parse and estimate configurations from stdin.""" for line in stdin: confs = line.splitlines() for conf in confs: estimateConf(conf) if __name__ == "__main__": estimateConfsFromInput() ## Instruction: Create fn for calculating exhaustion ## Code After: from sys import stdin def calcExhaustion(disk, procRates): """Calculate how many seconds before the disk is filled. procRates lists the rates at which each process fills 1 byte of disk space.""" print(disk) print(procRates) def estimateConf(conf): """Estimate configuration from a string.""" confElements = [int(x) for x in conf.split(sep=" ")] disk = confElements[0] procRates = confElements[1:] eta = calcExhaustion(disk, procRates); def estimateConfsFromInput(): """Parse and estimate configurations from stdin.""" for line in stdin: confs = line.splitlines() for conf in confs: estimateConf(conf) if __name__ == "__main__": estimateConfsFromInput()
// ... existing code ... def calcExhaustion(disk, procRates): """Calculate how many seconds before the disk is filled. procRates lists the rates at which each process fills 1 byte of disk space.""" print(disk) print(procRates) // ... modified code ... disk = confElements[0] procRates = confElements[1:] eta = calcExhaustion(disk, procRates); // ... rest of the code ...
90a94b1d511aa17f167d783992fe0f874ad529c1
examples/python_interop/python_interop.py
examples/python_interop/python_interop.py
from __future__ import print_function import legion @legion.task def f(ctx): print("inside task f") @legion.task def main_task(ctx): print("%x" % legion.c.legion_runtime_get_executing_processor(ctx.runtime, ctx.context).id) f(ctx)
from __future__ import print_function import legion @legion.task def f(ctx, *args): print("inside task f%s" % (args,)) @legion.task def main_task(ctx): print("inside main()") f(ctx, 1, "asdf", True)
Test Python support for arguments.
examples: Test Python support for arguments.
Python
apache-2.0
StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion,StanfordLegion/legion
from __future__ import print_function import legion @legion.task - def f(ctx): + def f(ctx, *args): - print("inside task f") + print("inside task f%s" % (args,)) @legion.task def main_task(ctx): - print("%x" % legion.c.legion_runtime_get_executing_processor(ctx.runtime, ctx.context).id) - f(ctx) + print("inside main()") + f(ctx, 1, "asdf", True)
Test Python support for arguments.
## Code Before: from __future__ import print_function import legion @legion.task def f(ctx): print("inside task f") @legion.task def main_task(ctx): print("%x" % legion.c.legion_runtime_get_executing_processor(ctx.runtime, ctx.context).id) f(ctx) ## Instruction: Test Python support for arguments. ## Code After: from __future__ import print_function import legion @legion.task def f(ctx, *args): print("inside task f%s" % (args,)) @legion.task def main_task(ctx): print("inside main()") f(ctx, 1, "asdf", True)
// ... existing code ... @legion.task def f(ctx, *args): print("inside task f%s" % (args,)) // ... modified code ... def main_task(ctx): print("inside main()") f(ctx, 1, "asdf", True) // ... rest of the code ...
fdc0bb75271b90a31072f79b95283e1156d50181
waffle/decorators.py
waffle/decorators.py
from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator
from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator
Make the decorator actually work again.
Make the decorator actually work again.
Python
bsd-3-clause
isotoma/django-waffle,TwigWorld/django-waffle,rlr/django-waffle,webus/django-waffle,groovecoder/django-waffle,JeLoueMonCampingCar/django-waffle,crccheck/django-waffle,safarijv/django-waffle,paulcwatts/django-waffle,JeLoueMonCampingCar/django-waffle,11craft/django-waffle,festicket/django-waffle,styleseat/django-waffle,mark-adams/django-waffle,rodgomes/django-waffle,crccheck/django-waffle,mark-adams/django-waffle,webus/django-waffle,groovecoder/django-waffle,mwaaas/django-waffle-session,hwkns/django-waffle,VladimirFilonov/django-waffle,ekohl/django-waffle,festicket/django-waffle,paulcwatts/django-waffle,TwigWorld/django-waffle,mwaaas/django-waffle-session,VladimirFilonov/django-waffle,rlr/django-waffle,willkg/django-waffle,engagespark/django-waffle,hwkns/django-waffle,crccheck/django-waffle,JeLoueMonCampingCar/django-waffle,TwigWorld/django-waffle,hwkns/django-waffle,safarijv/django-waffle,webus/django-waffle,rodgomes/django-waffle,engagespark/django-waffle,safarijv/django-waffle,festicket/django-waffle,groovecoder/django-waffle,styleseat/django-waffle,mwaaas/django-waffle-session,mark-adams/django-waffle,paulcwatts/django-waffle,VladimirFilonov/django-waffle,webus/django-waffle,rlr/django-waffle,willkg/django-waffle,ilanbm/django-waffle,festicket/django-waffle,crccheck/django-waffle,rodgomes/django-waffle,ilanbm/django-waffle,ekohl/django-waffle,groovecoder/django-waffle,hwkns/django-waffle,isotoma/django-waffle,11craft/django-waffle,rlr/django-waffle,ilanbm/django-waffle,JeLoueMonCampingCar/django-waffle,isotoma/django-waffle,paulcwatts/django-waffle,VladimirFilonov/django-waffle,engagespark/django-waffle,engagespark/django-waffle,rsalmaso/django-waffle,styleseat/django-waffle,mark-adams/django-waffle,rsalmaso/django-waffle,isotoma/django-waffle,rsalmaso/django-waffle,rsalmaso/django-waffle,rodgomes/django-waffle,mwaaas/django-waffle-session,styleseat/django-waffle,ilanbm/django-waffle,safarijv/django-waffle
from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): - if flag_name.startswith('!'): - active = is_active(request, flag_name[1:]) - else: - active = is_active(request, flag_name) - @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): + if flag_name.startswith('!'): + active = is_active(request, flag_name[1:]) + else: + active = is_active(request, flag_name) + if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator
Make the decorator actually work again.
## Code Before: from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator ## Instruction: Make the decorator actually work again. ## Code After: from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator
... def decorator(view): @wraps(view, assigned=available_attrs(view)) ... def _wrapped_view(request, *args, **kwargs): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) if not active: ...
926d5333c1556850a3eda6025ac8cf471b67c0a3
condor/probes/setup.py
condor/probes/setup.py
from distutils.core import setup setup(name='htcondor-es-probes', version='0.6.3', description='HTCondor probes for Elasticsearch analytics', author='Suchandra Thapa', author_email='[email protected]', url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor', packages=['probe_libs'], scripts=['collect_history_info.py', 'get_job_status.py'], data_files=[('/etc/init.d/', ['scripts/collect_history']), ('/etc/cron.d/', ['config/schedd_probe']), ('/etc/sysconfig', ['config/collect_history'])], license = 'Apache 2.0' )
from distutils.core import setup setup(name='htcondor-es-probes', version='0.6.3', description='HTCondor probes for Elasticsearch analytics', author='Suchandra Thapa', author_email='[email protected]', url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor', packages=['probe_libs'], scripts=['collect_history_info.py', 'get_job_status.py'], data_files=[('/etc/init.d/', ['scripts/collect_history']), ('/etc/cron.d/', ['config/schedd_probe']), ('/var/lib/collect_history', []), ('/etc/sysconfig', ['config/collect_history'])], license='Apache 2.0' )
Add directory for state files
Add directory for state files
Python
apache-2.0
DHTC-Tools/logstash-confs,DHTC-Tools/logstash-confs,DHTC-Tools/logstash-confs
from distutils.core import setup setup(name='htcondor-es-probes', version='0.6.3', description='HTCondor probes for Elasticsearch analytics', author='Suchandra Thapa', author_email='[email protected]', url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor', packages=['probe_libs'], scripts=['collect_history_info.py', 'get_job_status.py'], data_files=[('/etc/init.d/', ['scripts/collect_history']), ('/etc/cron.d/', ['config/schedd_probe']), + ('/var/lib/collect_history', []), ('/etc/sysconfig', ['config/collect_history'])], - license = 'Apache 2.0' + license='Apache 2.0' )
Add directory for state files
## Code Before: from distutils.core import setup setup(name='htcondor-es-probes', version='0.6.3', description='HTCondor probes for Elasticsearch analytics', author='Suchandra Thapa', author_email='[email protected]', url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor', packages=['probe_libs'], scripts=['collect_history_info.py', 'get_job_status.py'], data_files=[('/etc/init.d/', ['scripts/collect_history']), ('/etc/cron.d/', ['config/schedd_probe']), ('/etc/sysconfig', ['config/collect_history'])], license = 'Apache 2.0' ) ## Instruction: Add directory for state files ## Code After: from distutils.core import setup setup(name='htcondor-es-probes', version='0.6.3', description='HTCondor probes for Elasticsearch analytics', author='Suchandra Thapa', author_email='[email protected]', url='https://github.com/DHTC-Tools/logstash-confs/tree/master/condor', packages=['probe_libs'], scripts=['collect_history_info.py', 'get_job_status.py'], data_files=[('/etc/init.d/', ['scripts/collect_history']), ('/etc/cron.d/', ['config/schedd_probe']), ('/var/lib/collect_history', []), ('/etc/sysconfig', ['config/collect_history'])], license='Apache 2.0' )
... ('/etc/cron.d/', ['config/schedd_probe']), ('/var/lib/collect_history', []), ('/etc/sysconfig', ['config/collect_history'])], license='Apache 2.0' ) ...
52e9390d88062e9442b18a7793e6696a36f5b9c3
testinfra/functional/test_tor_interfaces.py
testinfra/functional/test_tor_interfaces.py
import os import re import pytest sdvars = pytest.securedrop_test_vars @pytest.mark.xfail @pytest.mark.parametrize('site', sdvars.tor_url_files) @pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false", reason="Can only assure Tor is configured in CI atm") def test_www(Command, site): """ Ensure tor interface is reachable and returns expected content. """ # Extract Onion URL from saved onion file, fetched back from app-staging. onion_url_filepath = os.path.join( os.path.dirname(__file__), "../../install_files/ansible-base/{}".format(site['file']) ) onion_url_raw = open(onion_url_filepath, 'ro').read() onion_url = re.search("\w+\.onion", onion_url_raw).group() # Fetch Onion URL via curl to confirm interface is rendered correctly. curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format( onion_url) curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor) site_scrape = Command.check_output(curl_tor) assert Command.check_output(curl_tor_status) == "200" assert site['check_string'] in site_scrape assert site['error_string'] not in site_scrape
import os import re import pytest sdvars = pytest.securedrop_test_vars @pytest.mark.parametrize('site', sdvars.tor_url_files) @pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false", reason="Can only assure Tor is configured in CI atm") def test_www(Command, site): """ Ensure tor interface is reachable and returns expected content. """ # Extract Onion URL from saved onion file, fetched back from app-staging. onion_url_filepath = os.path.join( os.path.dirname(__file__), "../../install_files/ansible-base/{}".format(site['file']) ) onion_url_raw = open(onion_url_filepath, 'ro').read() onion_url = re.search("\w+\.onion", onion_url_raw).group() # Fetch Onion URL via curl to confirm interface is rendered correctly. curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format( onion_url) curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor) site_scrape = Command.check_output(curl_tor) assert Command.check_output(curl_tor_status) == "200" assert site['check_string'] in site_scrape assert site['error_string'] not in site_scrape
Remove XFAIL on functional tor test
Remove XFAIL on functional tor test
Python
agpl-3.0
conorsch/securedrop,ehartsuyker/securedrop,garrettr/securedrop,ehartsuyker/securedrop,conorsch/securedrop,heartsucker/securedrop,garrettr/securedrop,ehartsuyker/securedrop,ehartsuyker/securedrop,conorsch/securedrop,ehartsuyker/securedrop,heartsucker/securedrop,conorsch/securedrop,heartsucker/securedrop,ehartsuyker/securedrop,heartsucker/securedrop,garrettr/securedrop,conorsch/securedrop,garrettr/securedrop,heartsucker/securedrop
import os import re import pytest sdvars = pytest.securedrop_test_vars - @pytest.mark.xfail @pytest.mark.parametrize('site', sdvars.tor_url_files) @pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false", reason="Can only assure Tor is configured in CI atm") def test_www(Command, site): """ Ensure tor interface is reachable and returns expected content. """ # Extract Onion URL from saved onion file, fetched back from app-staging. onion_url_filepath = os.path.join( os.path.dirname(__file__), "../../install_files/ansible-base/{}".format(site['file']) ) onion_url_raw = open(onion_url_filepath, 'ro').read() onion_url = re.search("\w+\.onion", onion_url_raw).group() # Fetch Onion URL via curl to confirm interface is rendered correctly. curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format( onion_url) curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor) site_scrape = Command.check_output(curl_tor) assert Command.check_output(curl_tor_status) == "200" assert site['check_string'] in site_scrape assert site['error_string'] not in site_scrape
Remove XFAIL on functional tor test
## Code Before: import os import re import pytest sdvars = pytest.securedrop_test_vars @pytest.mark.xfail @pytest.mark.parametrize('site', sdvars.tor_url_files) @pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false", reason="Can only assure Tor is configured in CI atm") def test_www(Command, site): """ Ensure tor interface is reachable and returns expected content. """ # Extract Onion URL from saved onion file, fetched back from app-staging. onion_url_filepath = os.path.join( os.path.dirname(__file__), "../../install_files/ansible-base/{}".format(site['file']) ) onion_url_raw = open(onion_url_filepath, 'ro').read() onion_url = re.search("\w+\.onion", onion_url_raw).group() # Fetch Onion URL via curl to confirm interface is rendered correctly. curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format( onion_url) curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor) site_scrape = Command.check_output(curl_tor) assert Command.check_output(curl_tor_status) == "200" assert site['check_string'] in site_scrape assert site['error_string'] not in site_scrape ## Instruction: Remove XFAIL on functional tor test ## Code After: import os import re import pytest sdvars = pytest.securedrop_test_vars @pytest.mark.parametrize('site', sdvars.tor_url_files) @pytest.mark.skipif(os.environ.get('FPF_CI', 'false') == "false", reason="Can only assure Tor is configured in CI atm") def test_www(Command, site): """ Ensure tor interface is reachable and returns expected content. """ # Extract Onion URL from saved onion file, fetched back from app-staging. onion_url_filepath = os.path.join( os.path.dirname(__file__), "../../install_files/ansible-base/{}".format(site['file']) ) onion_url_raw = open(onion_url_filepath, 'ro').read() onion_url = re.search("\w+\.onion", onion_url_raw).group() # Fetch Onion URL via curl to confirm interface is rendered correctly. curl_tor = 'curl -s --socks5-hostname "${{TOR_PROXY}}":9050 {}'.format( onion_url) curl_tor_status = '{} -o /dev/null -w "%{{http_code}}"'.format(curl_tor) site_scrape = Command.check_output(curl_tor) assert Command.check_output(curl_tor_status) == "200" assert site['check_string'] in site_scrape assert site['error_string'] not in site_scrape
// ... existing code ... @pytest.mark.parametrize('site', sdvars.tor_url_files) // ... rest of the code ...
606cb3475e2e4220822f924d13881dfaefb51aa4
teryt_tree/rest_framework_ext/viewsets.py
teryt_tree/rest_framework_ext/viewsets.py
import django_filters from django.shortcuts import get_object_or_404 try: from django_filters import rest_framework as filters except ImportError: # Back-ward compatible for django-rest-framework<3.7 from rest_framework import filters from rest_framework import viewsets from teryt_tree.models import JednostkaAdministracyjna from teryt_tree.rest_framework_ext.serializers import JednostkaAdministracyjnaSerializer def custom_area_filter(queryset, _, value): if not value: return queryset return queryset.area(get_object_or_404(JednostkaAdministracyjna, pk=value)) class JednostkaAdministracyjnaFilter(filters.FilterSet): area = django_filters.CharFilter(action=custom_area_filter) class Meta: model = JednostkaAdministracyjna fields = ['name', 'category', 'category__level', 'area'] class JednostkaAdministracyjnaViewSet(viewsets.ModelViewSet): queryset = (JednostkaAdministracyjna.objects. select_related('category'). prefetch_related('children'). all()) serializer_class = JednostkaAdministracyjnaSerializer filter_backends = (filters.DjangoFilterBackend,) filter_class = JednostkaAdministracyjnaFilter
import django_filters from django.shortcuts import get_object_or_404 from django.utils.translation import ugettext_lazy as _ try: from django_filters import rest_framework as filters except ImportError: # Back-ward compatible for django-rest-framework<3.7 from rest_framework import filters from rest_framework import viewsets from teryt_tree.models import JednostkaAdministracyjna from teryt_tree.rest_framework_ext.serializers import \ JednostkaAdministracyjnaSerializer def custom_area_filter(queryset, _, value): if not value: return queryset return queryset.area(get_object_or_404(JednostkaAdministracyjna, pk=value)) class JednostkaAdministracyjnaFilter(filters.FilterSet): area = django_filters.CharFilter( method=custom_area_filter, label=_("Area") ) class Meta: model = JednostkaAdministracyjna fields = ['name', 'category', 'category__level', 'area'] class JednostkaAdministracyjnaViewSet(viewsets.ModelViewSet): queryset = (JednostkaAdministracyjna.objects. select_related('category'). prefetch_related('children'). all()) serializer_class = JednostkaAdministracyjnaSerializer filter_backends = (filters.DjangoFilterBackend,) filter_class = JednostkaAdministracyjnaFilter
Update JednostkaAdministracyjnaFilter for new django-filters
Update JednostkaAdministracyjnaFilter for new django-filters
Python
bsd-3-clause
ad-m/django-teryt-tree
import django_filters from django.shortcuts import get_object_or_404 + from django.utils.translation import ugettext_lazy as _ + try: from django_filters import rest_framework as filters except ImportError: # Back-ward compatible for django-rest-framework<3.7 from rest_framework import filters from rest_framework import viewsets from teryt_tree.models import JednostkaAdministracyjna - from teryt_tree.rest_framework_ext.serializers import JednostkaAdministracyjnaSerializer + from teryt_tree.rest_framework_ext.serializers import \ + JednostkaAdministracyjnaSerializer def custom_area_filter(queryset, _, value): if not value: return queryset return queryset.area(get_object_or_404(JednostkaAdministracyjna, pk=value)) class JednostkaAdministracyjnaFilter(filters.FilterSet): - area = django_filters.CharFilter(action=custom_area_filter) + area = django_filters.CharFilter( + method=custom_area_filter, + label=_("Area") + ) class Meta: model = JednostkaAdministracyjna fields = ['name', 'category', 'category__level', 'area'] class JednostkaAdministracyjnaViewSet(viewsets.ModelViewSet): queryset = (JednostkaAdministracyjna.objects. select_related('category'). prefetch_related('children'). all()) serializer_class = JednostkaAdministracyjnaSerializer filter_backends = (filters.DjangoFilterBackend,) filter_class = JednostkaAdministracyjnaFilter
Update JednostkaAdministracyjnaFilter for new django-filters
## Code Before: import django_filters from django.shortcuts import get_object_or_404 try: from django_filters import rest_framework as filters except ImportError: # Back-ward compatible for django-rest-framework<3.7 from rest_framework import filters from rest_framework import viewsets from teryt_tree.models import JednostkaAdministracyjna from teryt_tree.rest_framework_ext.serializers import JednostkaAdministracyjnaSerializer def custom_area_filter(queryset, _, value): if not value: return queryset return queryset.area(get_object_or_404(JednostkaAdministracyjna, pk=value)) class JednostkaAdministracyjnaFilter(filters.FilterSet): area = django_filters.CharFilter(action=custom_area_filter) class Meta: model = JednostkaAdministracyjna fields = ['name', 'category', 'category__level', 'area'] class JednostkaAdministracyjnaViewSet(viewsets.ModelViewSet): queryset = (JednostkaAdministracyjna.objects. select_related('category'). prefetch_related('children'). all()) serializer_class = JednostkaAdministracyjnaSerializer filter_backends = (filters.DjangoFilterBackend,) filter_class = JednostkaAdministracyjnaFilter ## Instruction: Update JednostkaAdministracyjnaFilter for new django-filters ## Code After: import django_filters from django.shortcuts import get_object_or_404 from django.utils.translation import ugettext_lazy as _ try: from django_filters import rest_framework as filters except ImportError: # Back-ward compatible for django-rest-framework<3.7 from rest_framework import filters from rest_framework import viewsets from teryt_tree.models import JednostkaAdministracyjna from teryt_tree.rest_framework_ext.serializers import \ JednostkaAdministracyjnaSerializer def custom_area_filter(queryset, _, value): if not value: return queryset return queryset.area(get_object_or_404(JednostkaAdministracyjna, pk=value)) class JednostkaAdministracyjnaFilter(filters.FilterSet): area = django_filters.CharFilter( method=custom_area_filter, label=_("Area") ) class Meta: model = JednostkaAdministracyjna fields = ['name', 'category', 'category__level', 'area'] class JednostkaAdministracyjnaViewSet(viewsets.ModelViewSet): queryset = (JednostkaAdministracyjna.objects. select_related('category'). prefetch_related('children'). all()) serializer_class = JednostkaAdministracyjnaSerializer filter_backends = (filters.DjangoFilterBackend,) filter_class = JednostkaAdministracyjnaFilter
# ... existing code ... from django.shortcuts import get_object_or_404 from django.utils.translation import ugettext_lazy as _ try: # ... modified code ... from teryt_tree.models import JednostkaAdministracyjna from teryt_tree.rest_framework_ext.serializers import \ JednostkaAdministracyjnaSerializer ... class JednostkaAdministracyjnaFilter(filters.FilterSet): area = django_filters.CharFilter( method=custom_area_filter, label=_("Area") ) # ... rest of the code ...
11b16c26c182636016e7d86cd0f94963eec42556
project/settings/ci.py
project/settings/ci.py
from .base import * # JWT Settings def jwt_get_username_from_payload_handler(payload): return payload.get('email') JWT_AUTH = { # 'JWT_SECRET_KEY': AUTH0_CLIENT_SECRET, 'JWT_AUDIENCE': AUTH0_CLIENT_ID, 'JWT_PAYLOAD_GET_USERNAME_HANDLER': jwt_get_username_from_payload_handler, 'JWT_AUTH_HEADER_PREFIX': 'Bearer', 'JWT_PUBLIC_KEY': jwt_public_key, 'JWT_ALGORITHM': 'RS256', } DATABASES['default']['TEST'] = dj_database_url.config(default=DATABASE_URL) # Static Server Config STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static') STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage' STATIC_URL = '/static/' STATICFILES_STORAGE = STATIC_STORAGE # Media (aka File Upload) Server Config MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media') MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage' MEDIA_URL = '/media/' DEFAULT_FILE_STORAGE = MEDIA_STORAGE # Email EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
from .base import * # JWT Settings def jwt_get_username_from_payload_handler(payload): return payload.get('email') JWT_AUTH = { # 'JWT_SECRET_KEY': AUTH0_CLIENT_SECRET, 'JWT_AUDIENCE': AUTH0_CLIENT_ID, 'JWT_PAYLOAD_GET_USERNAME_HANDLER': jwt_get_username_from_payload_handler, 'JWT_AUTH_HEADER_PREFIX': 'Bearer', 'JWT_PUBLIC_KEY': jwt_public_key, 'JWT_ALGORITHM': 'RS256', } # Static Server Config STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static') STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage' STATIC_URL = '/static/' STATICFILES_STORAGE = STATIC_STORAGE # Media (aka File Upload) Server Config MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media') MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage' MEDIA_URL = '/media/' DEFAULT_FILE_STORAGE = MEDIA_STORAGE # Email EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
Revert "Attempt to bypass test database"
Revert "Attempt to bypass test database" This reverts commit 889713c8c4c7151ba06448a3993778a91d2abfd6.
Python
bsd-2-clause
barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore-django,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore
from .base import * # JWT Settings def jwt_get_username_from_payload_handler(payload): return payload.get('email') JWT_AUTH = { # 'JWT_SECRET_KEY': AUTH0_CLIENT_SECRET, 'JWT_AUDIENCE': AUTH0_CLIENT_ID, 'JWT_PAYLOAD_GET_USERNAME_HANDLER': jwt_get_username_from_payload_handler, 'JWT_AUTH_HEADER_PREFIX': 'Bearer', 'JWT_PUBLIC_KEY': jwt_public_key, 'JWT_ALGORITHM': 'RS256', } - DATABASES['default']['TEST'] = dj_database_url.config(default=DATABASE_URL) # Static Server Config STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static') STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage' STATIC_URL = '/static/' STATICFILES_STORAGE = STATIC_STORAGE # Media (aka File Upload) Server Config MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media') MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage' MEDIA_URL = '/media/' DEFAULT_FILE_STORAGE = MEDIA_STORAGE # Email EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
Revert "Attempt to bypass test database"
## Code Before: from .base import * # JWT Settings def jwt_get_username_from_payload_handler(payload): return payload.get('email') JWT_AUTH = { # 'JWT_SECRET_KEY': AUTH0_CLIENT_SECRET, 'JWT_AUDIENCE': AUTH0_CLIENT_ID, 'JWT_PAYLOAD_GET_USERNAME_HANDLER': jwt_get_username_from_payload_handler, 'JWT_AUTH_HEADER_PREFIX': 'Bearer', 'JWT_PUBLIC_KEY': jwt_public_key, 'JWT_ALGORITHM': 'RS256', } DATABASES['default']['TEST'] = dj_database_url.config(default=DATABASE_URL) # Static Server Config STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static') STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage' STATIC_URL = '/static/' STATICFILES_STORAGE = STATIC_STORAGE # Media (aka File Upload) Server Config MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media') MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage' MEDIA_URL = '/media/' DEFAULT_FILE_STORAGE = MEDIA_STORAGE # Email EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' ## Instruction: Revert "Attempt to bypass test database" ## Code After: from .base import * # JWT Settings def jwt_get_username_from_payload_handler(payload): return payload.get('email') JWT_AUTH = { # 'JWT_SECRET_KEY': AUTH0_CLIENT_SECRET, 'JWT_AUDIENCE': AUTH0_CLIENT_ID, 'JWT_PAYLOAD_GET_USERNAME_HANDLER': jwt_get_username_from_payload_handler, 'JWT_AUTH_HEADER_PREFIX': 'Bearer', 'JWT_PUBLIC_KEY': jwt_public_key, 'JWT_ALGORITHM': 'RS256', } # Static Server Config STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static') STATIC_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage' STATIC_URL = '/static/' STATICFILES_STORAGE = STATIC_STORAGE # Media (aka File Upload) Server Config MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media') MEDIA_STORAGE = 'django.core.files.storage.FileSystemStorage' MEDIA_URL = '/media/' DEFAULT_FILE_STORAGE = MEDIA_STORAGE # Email EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
// ... existing code ... // ... rest of the code ...
3ed6c303e935a78781d096ab90e77a6c1360322d
wutu/tests/modules/test_module/test_module.py
wutu/tests/modules/test_module/test_module.py
from wutu.module import Module class TestModule(Module): def __init__(self): super(TestModule, self).__init__() def ping(self): return "pong" def get(self): return {"result": "Hello"}
from wutu.module import Module class TestModule(Module): def __init__(self): super(TestModule, self).__init__() def ping(self): return "pong" def get(self, id): return {"result": id}
Make test module return dynamic value
Make test module return dynamic value
Python
mit
zaibacu/wutu,zaibacu/wutu,zaibacu/wutu
from wutu.module import Module class TestModule(Module): def __init__(self): super(TestModule, self).__init__() def ping(self): return "pong" - def get(self): + def get(self, id): - return {"result": "Hello"} + return {"result": id}
Make test module return dynamic value
## Code Before: from wutu.module import Module class TestModule(Module): def __init__(self): super(TestModule, self).__init__() def ping(self): return "pong" def get(self): return {"result": "Hello"} ## Instruction: Make test module return dynamic value ## Code After: from wutu.module import Module class TestModule(Module): def __init__(self): super(TestModule, self).__init__() def ping(self): return "pong" def get(self, id): return {"result": id}
# ... existing code ... def get(self, id): return {"result": id} # ... rest of the code ...
323cc3f50fa0bbd072bfe243443adf12e1b25220
bluebottle/projects/migrations/0019_auto_20170118_1537.py
bluebottle/projects/migrations/0019_auto_20170118_1537.py
from __future__ import unicode_literals import binascii import os from django.db import migrations def generate_key(): return binascii.hexlify(os.urandom(20)).decode() def create_auth_token(apps, schema_editor): Member = apps.get_model('members', 'member') Token = apps.get_model('authtoken', 'token') member = Member.objects.create( email='[email protected]', username='accounting' ) token = Token.objects.create( user=member, key=generate_key() ) class Migration(migrations.Migration): dependencies = [ ('projects', '0018_merge_20170118_1533'), ('authtoken', '0001_initial'), ] operations = [ migrations.RunPython(create_auth_token) ]
from __future__ import unicode_literals import binascii import os from django.db import migrations def generate_key(): return binascii.hexlify(os.urandom(20)).decode() def create_auth_token(apps, schema_editor): Member = apps.get_model('members', 'member') Token = apps.get_model('authtoken', 'token') member = Member.objects.create( email='[email protected]', username='accounting' ) token = Token.objects.create( user=member, key=generate_key() ) class Migration(migrations.Migration): dependencies = [ ('projects', '0018_merge_20170118_1533'), ('authtoken', '0001_initial'), ('quotes', '0005_auto_20180717_1017'), ('slides', '0006_auto_20180717_1017'), ] operations = [ migrations.RunPython(create_auth_token) ]
Add dependency on different migrations
Add dependency on different migrations
Python
bsd-3-clause
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
from __future__ import unicode_literals import binascii import os from django.db import migrations def generate_key(): return binascii.hexlify(os.urandom(20)).decode() def create_auth_token(apps, schema_editor): Member = apps.get_model('members', 'member') Token = apps.get_model('authtoken', 'token') member = Member.objects.create( email='[email protected]', username='accounting' ) token = Token.objects.create( user=member, key=generate_key() ) class Migration(migrations.Migration): dependencies = [ ('projects', '0018_merge_20170118_1533'), ('authtoken', '0001_initial'), + ('quotes', '0005_auto_20180717_1017'), + ('slides', '0006_auto_20180717_1017'), ] operations = [ migrations.RunPython(create_auth_token) ]
Add dependency on different migrations
## Code Before: from __future__ import unicode_literals import binascii import os from django.db import migrations def generate_key(): return binascii.hexlify(os.urandom(20)).decode() def create_auth_token(apps, schema_editor): Member = apps.get_model('members', 'member') Token = apps.get_model('authtoken', 'token') member = Member.objects.create( email='[email protected]', username='accounting' ) token = Token.objects.create( user=member, key=generate_key() ) class Migration(migrations.Migration): dependencies = [ ('projects', '0018_merge_20170118_1533'), ('authtoken', '0001_initial'), ] operations = [ migrations.RunPython(create_auth_token) ] ## Instruction: Add dependency on different migrations ## Code After: from __future__ import unicode_literals import binascii import os from django.db import migrations def generate_key(): return binascii.hexlify(os.urandom(20)).decode() def create_auth_token(apps, schema_editor): Member = apps.get_model('members', 'member') Token = apps.get_model('authtoken', 'token') member = Member.objects.create( email='[email protected]', username='accounting' ) token = Token.objects.create( user=member, key=generate_key() ) class Migration(migrations.Migration): dependencies = [ ('projects', '0018_merge_20170118_1533'), ('authtoken', '0001_initial'), ('quotes', '0005_auto_20180717_1017'), ('slides', '0006_auto_20180717_1017'), ] operations = [ migrations.RunPython(create_auth_token) ]
... ('authtoken', '0001_initial'), ('quotes', '0005_auto_20180717_1017'), ('slides', '0006_auto_20180717_1017'), ] ...
0f49230309ac115ff78eddd36bcd153d7f3b75ea
data_aggregator/threads.py
data_aggregator/threads.py
import queue import threading from multiprocessing import Queue class ThreadPool(): def __init__(self, processes=20): self.processes = processes self.threads = [Thread() for _ in range(0, processes)] self.mp_queue = Queue() def yield_dead_threads(self): for thread in self.threads: if not thread.is_alive(): yield thread def map(self, func, values): completed_count = 0 values_iter = iter(values) while completed_count < len(values): try: self.mp_queue.get_nowait() completed_count += 1 except queue.Empty: pass for thread in self.yield_dead_threads(): try: # run next job job = next(values_iter) thread.run(func, job, self.mp_queue) except StopIteration: break def __enter__(self): return self def __exit__(self, exc_type, exc_value, exc_tb): pass class Thread(): def __init__(self): self.thread = None def run(self, target, *args, **kwargs): self.thread = threading.Thread(target=target, args=args, kwargs=kwargs) self.thread.start() def is_alive(self): if self.thread: return self.thread.is_alive() else: return False
import queue import threading from multiprocessing import Queue class ThreadPool(): def __init__(self, processes=20): self.processes = processes self.threads = [Thread() for _ in range(0, processes)] self.mp_queue = Queue() def yield_dead_threads(self): for thread in self.threads: if not thread.is_alive(): yield thread def map(self, func, values): completed_count = 0 values_iter = iter(values) while completed_count < len(values): try: self.mp_queue.get_nowait() completed_count += 1 except queue.Empty: pass for thread in self.yield_dead_threads(): try: # run thread with the next value value = next(values_iter) thread.run(func, value, self.mp_queue) except StopIteration: break def __enter__(self): return self def __exit__(self, exc_type, exc_value, exc_tb): pass class Thread(): def __init__(self): self.thread = None def run(self, target, *args, **kwargs): self.thread = threading.Thread(target=target, args=args, kwargs=kwargs) self.thread.start() def is_alive(self): if self.thread: return self.thread.is_alive() else: return False
Remove reference to "job" from ThreadPool
Remove reference to "job" from ThreadPool
Python
apache-2.0
uw-it-aca/canvas-analytics,uw-it-aca/canvas-analytics,uw-it-aca/canvas-analytics,uw-it-aca/canvas-analytics
import queue import threading from multiprocessing import Queue class ThreadPool(): def __init__(self, processes=20): self.processes = processes self.threads = [Thread() for _ in range(0, processes)] self.mp_queue = Queue() def yield_dead_threads(self): for thread in self.threads: if not thread.is_alive(): yield thread def map(self, func, values): completed_count = 0 values_iter = iter(values) while completed_count < len(values): try: self.mp_queue.get_nowait() completed_count += 1 except queue.Empty: pass for thread in self.yield_dead_threads(): try: - # run next job + # run thread with the next value - job = next(values_iter) + value = next(values_iter) - thread.run(func, job, self.mp_queue) + thread.run(func, value, self.mp_queue) except StopIteration: break def __enter__(self): return self def __exit__(self, exc_type, exc_value, exc_tb): pass class Thread(): def __init__(self): self.thread = None def run(self, target, *args, **kwargs): self.thread = threading.Thread(target=target, args=args, kwargs=kwargs) self.thread.start() def is_alive(self): if self.thread: return self.thread.is_alive() else: return False
Remove reference to "job" from ThreadPool
## Code Before: import queue import threading from multiprocessing import Queue class ThreadPool(): def __init__(self, processes=20): self.processes = processes self.threads = [Thread() for _ in range(0, processes)] self.mp_queue = Queue() def yield_dead_threads(self): for thread in self.threads: if not thread.is_alive(): yield thread def map(self, func, values): completed_count = 0 values_iter = iter(values) while completed_count < len(values): try: self.mp_queue.get_nowait() completed_count += 1 except queue.Empty: pass for thread in self.yield_dead_threads(): try: # run next job job = next(values_iter) thread.run(func, job, self.mp_queue) except StopIteration: break def __enter__(self): return self def __exit__(self, exc_type, exc_value, exc_tb): pass class Thread(): def __init__(self): self.thread = None def run(self, target, *args, **kwargs): self.thread = threading.Thread(target=target, args=args, kwargs=kwargs) self.thread.start() def is_alive(self): if self.thread: return self.thread.is_alive() else: return False ## Instruction: Remove reference to "job" from ThreadPool ## Code After: import queue import threading from multiprocessing import Queue class ThreadPool(): def __init__(self, processes=20): self.processes = processes self.threads = [Thread() for _ in range(0, processes)] self.mp_queue = Queue() def yield_dead_threads(self): for thread in self.threads: if not thread.is_alive(): yield thread def map(self, func, values): completed_count = 0 values_iter = iter(values) while completed_count < len(values): try: self.mp_queue.get_nowait() completed_count += 1 except queue.Empty: pass for thread in self.yield_dead_threads(): try: # run thread with the next value value = next(values_iter) thread.run(func, value, self.mp_queue) except StopIteration: break def __enter__(self): return self def __exit__(self, exc_type, exc_value, exc_tb): pass class Thread(): def __init__(self): self.thread = None def run(self, target, *args, **kwargs): self.thread = threading.Thread(target=target, args=args, kwargs=kwargs) self.thread.start() def is_alive(self): if self.thread: return self.thread.is_alive() else: return False
... try: # run thread with the next value value = next(values_iter) thread.run(func, value, self.mp_queue) except StopIteration: ...
2e28cf549bd7de29143c317871008b3115e44975
tests/vstb-example-html5/tests/rotate.py
tests/vstb-example-html5/tests/rotate.py
from stbt import press, wait_for_match def wait_for_vstb_startup(): wait_for_match('stb-tester-350px.png') def test_that_image_is_rotated_by_arrows(): press("KEY_LEFT") wait_for_match('stb-tester-left.png') press("KEY_RIGHT") wait_for_match('stb-tester-right.png') press("KEY_UP") wait_for_match('stb-tester-up.png') press("KEY_DOWN") wait_for_match('stb-tester-down.png') def test_that_image_returns_to_normal_on_OK(): press("KEY_OK") wait_for_match('stb-tester-350px.png') def test_that_custom_key_is_recognised(): press("KEY_CUSTOM") wait_for_match('stb-tester-up.png', timeout_secs=1)
from stbt import press, wait_for_match def wait_for_vstb_startup(): wait_for_match('stb-tester-350px.png', timeout_secs=20) def test_that_image_is_rotated_by_arrows(): press("KEY_LEFT") wait_for_match('stb-tester-left.png') press("KEY_RIGHT") wait_for_match('stb-tester-right.png') press("KEY_UP") wait_for_match('stb-tester-up.png') press("KEY_DOWN") wait_for_match('stb-tester-down.png') def test_that_image_returns_to_normal_on_OK(): press("KEY_OK") wait_for_match('stb-tester-350px.png') def test_that_custom_key_is_recognised(): press("KEY_CUSTOM") wait_for_match('stb-tester-up.png', timeout_secs=1)
Fix virtual-stb intermittant test-failure on Travis
Fix virtual-stb intermittant test-failure on Travis test_that_virtual_stb_configures_stb_tester_for_testing_virtual_stbs fails intermittently on Travis because sometimes chrome takes longer than 10s to start-up. This causes the test to fail with: > MatchTimeout: Didn't find match for '.../stb-tester-350px.png' within 10 > seconds This commit should fix that issue.
Python
lgpl-2.1
LewisHaley/stb-tester,LewisHaley/stb-tester,martynjarvis/stb-tester,martynjarvis/stb-tester,LewisHaley/stb-tester,LewisHaley/stb-tester,LewisHaley/stb-tester,stb-tester/stb-tester,stb-tester/stb-tester,LewisHaley/stb-tester,LewisHaley/stb-tester,stb-tester/stb-tester,martynjarvis/stb-tester,stb-tester/stb-tester,martynjarvis/stb-tester,martynjarvis/stb-tester,martynjarvis/stb-tester,martynjarvis/stb-tester
from stbt import press, wait_for_match def wait_for_vstb_startup(): - wait_for_match('stb-tester-350px.png') + wait_for_match('stb-tester-350px.png', timeout_secs=20) def test_that_image_is_rotated_by_arrows(): press("KEY_LEFT") wait_for_match('stb-tester-left.png') press("KEY_RIGHT") wait_for_match('stb-tester-right.png') press("KEY_UP") wait_for_match('stb-tester-up.png') press("KEY_DOWN") wait_for_match('stb-tester-down.png') def test_that_image_returns_to_normal_on_OK(): press("KEY_OK") wait_for_match('stb-tester-350px.png') def test_that_custom_key_is_recognised(): press("KEY_CUSTOM") wait_for_match('stb-tester-up.png', timeout_secs=1)
Fix virtual-stb intermittant test-failure on Travis
## Code Before: from stbt import press, wait_for_match def wait_for_vstb_startup(): wait_for_match('stb-tester-350px.png') def test_that_image_is_rotated_by_arrows(): press("KEY_LEFT") wait_for_match('stb-tester-left.png') press("KEY_RIGHT") wait_for_match('stb-tester-right.png') press("KEY_UP") wait_for_match('stb-tester-up.png') press("KEY_DOWN") wait_for_match('stb-tester-down.png') def test_that_image_returns_to_normal_on_OK(): press("KEY_OK") wait_for_match('stb-tester-350px.png') def test_that_custom_key_is_recognised(): press("KEY_CUSTOM") wait_for_match('stb-tester-up.png', timeout_secs=1) ## Instruction: Fix virtual-stb intermittant test-failure on Travis ## Code After: from stbt import press, wait_for_match def wait_for_vstb_startup(): wait_for_match('stb-tester-350px.png', timeout_secs=20) def test_that_image_is_rotated_by_arrows(): press("KEY_LEFT") wait_for_match('stb-tester-left.png') press("KEY_RIGHT") wait_for_match('stb-tester-right.png') press("KEY_UP") wait_for_match('stb-tester-up.png') press("KEY_DOWN") wait_for_match('stb-tester-down.png') def test_that_image_returns_to_normal_on_OK(): press("KEY_OK") wait_for_match('stb-tester-350px.png') def test_that_custom_key_is_recognised(): press("KEY_CUSTOM") wait_for_match('stb-tester-up.png', timeout_secs=1)
# ... existing code ... def wait_for_vstb_startup(): wait_for_match('stb-tester-350px.png', timeout_secs=20) # ... rest of the code ...
7c65017fa16632f21eb94896a3d7c8d2cce989dd
user/admin.py
user/admin.py
from django.contrib import admin from .models import User @admin.register(User) class UserAdmin(admin.ModelAdmin): # list view list_display = ( 'email', 'get_date_joined', 'is_staff', 'is_superuser') list_filter = ( 'is_staff', 'is_superuser', 'profile__joined') ordering = ('email',) search_fields = ('email',) def get_date_joined(self, user): return user.profile.joined get_date_joined.short_description = 'Joined' get_date_joined.admin_order_field = ( 'profile__joined')
from django.contrib import admin from .models import User @admin.register(User) class UserAdmin(admin.ModelAdmin): # list view list_display = ( 'get_name', 'email', 'get_date_joined', 'is_staff', 'is_superuser') list_filter = ( 'is_staff', 'is_superuser', 'profile__joined') ordering = ('email',) search_fields = ('email',) def get_date_joined(self, user): return user.profile.joined get_date_joined.short_description = 'Joined' get_date_joined.admin_order_field = ( 'profile__joined') def get_name(self, user): return user.profile.name get_name.short_description = 'Name' get_name.admin_order_field = 'profile__name'
Add Profile name to UserAdmin list.
Ch23: Add Profile name to UserAdmin list.
Python
bsd-2-clause
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
from django.contrib import admin from .models import User @admin.register(User) class UserAdmin(admin.ModelAdmin): # list view list_display = ( + 'get_name', 'email', 'get_date_joined', 'is_staff', 'is_superuser') list_filter = ( 'is_staff', 'is_superuser', 'profile__joined') ordering = ('email',) search_fields = ('email',) def get_date_joined(self, user): return user.profile.joined get_date_joined.short_description = 'Joined' get_date_joined.admin_order_field = ( 'profile__joined') + def get_name(self, user): + return user.profile.name + get_name.short_description = 'Name' + get_name.admin_order_field = 'profile__name' +
Add Profile name to UserAdmin list.
## Code Before: from django.contrib import admin from .models import User @admin.register(User) class UserAdmin(admin.ModelAdmin): # list view list_display = ( 'email', 'get_date_joined', 'is_staff', 'is_superuser') list_filter = ( 'is_staff', 'is_superuser', 'profile__joined') ordering = ('email',) search_fields = ('email',) def get_date_joined(self, user): return user.profile.joined get_date_joined.short_description = 'Joined' get_date_joined.admin_order_field = ( 'profile__joined') ## Instruction: Add Profile name to UserAdmin list. ## Code After: from django.contrib import admin from .models import User @admin.register(User) class UserAdmin(admin.ModelAdmin): # list view list_display = ( 'get_name', 'email', 'get_date_joined', 'is_staff', 'is_superuser') list_filter = ( 'is_staff', 'is_superuser', 'profile__joined') ordering = ('email',) search_fields = ('email',) def get_date_joined(self, user): return user.profile.joined get_date_joined.short_description = 'Joined' get_date_joined.admin_order_field = ( 'profile__joined') def get_name(self, user): return user.profile.name get_name.short_description = 'Name' get_name.admin_order_field = 'profile__name'
# ... existing code ... list_display = ( 'get_name', 'email', # ... modified code ... 'profile__joined') def get_name(self, user): return user.profile.name get_name.short_description = 'Name' get_name.admin_order_field = 'profile__name' # ... rest of the code ...
cab3289827c859085dff9d492362d6648b52d23f
karma.py
karma.py
from brutal.core.plugin import cmd, match import collections karmas = collections.Counter() @match(regex=r'^([a-zA-Z0-9_]+)((:?\+)+)$') def karma_inc(event, name, pluses, *args): if name == event.meta['nick']: return 'Not in this universe, maggot!' else: karmas[name] += len(pluses)//2 @match(regex=r'^([a-zA-Z0-9_]+)((:?\-)+)$') def karma_dec(event, name, minuses, *args): if name == event.meta['nick']: return 'Not in this universe, maggot!' else: karmas[name] -= len(minuses)//2 @cmd def karma(event): """Returns karma points for a given user.""" args = event.args if len(args) < 1: return "{0}'s karma level is: {1}".format(event.meta['nick'], karmas[event.meta['nick']]) user = event.args[0] if user not in karmas: karmas[user] = 0 return "{0}'s karma level is: {1}".format(user, karmas[user])
from brutal.core.plugin import cmd, match import collections karmas = collections.Counter() @match(regex=r'^([a-zA-Z0-9_]+)((:?\+)+)$') def karma_inc(event, name, pluses, *args): if name == event.meta['nick']: return 'Not in this universe, maggot!' else: karmas[name] += len(pluses)//2 @match(regex=r'^([a-zA-Z0-9_]+)((:?\-)+)$') def karma_dec(event, name, minuses, *args): if name == event.meta['nick']: return 'Not in this universe, maggot!' else: karmas[name] -= len(minuses)//2 @cmd def karma(event): """Returns karma points for a given user.""" args = event.args if len(args) < 1: return "{0}'s karma level is: {1}".format(event.meta['nick'], karmas[event.meta['nick']]) user = event.args[0] if user not in karmas: karmas[user] = 0 return "{0}'s karma level is: {1}".format(user, karmas[user])
Split long line to make it more readable.
Karma: Split long line to make it more readable. Signed-off-by: Jakub Novak <[email protected]>
Python
apache-2.0
mrshu/brutal-plugins,Adman/brutal-plugins
from brutal.core.plugin import cmd, match import collections karmas = collections.Counter() @match(regex=r'^([a-zA-Z0-9_]+)((:?\+)+)$') def karma_inc(event, name, pluses, *args): if name == event.meta['nick']: return 'Not in this universe, maggot!' else: karmas[name] += len(pluses)//2 @match(regex=r'^([a-zA-Z0-9_]+)((:?\-)+)$') def karma_dec(event, name, minuses, *args): if name == event.meta['nick']: return 'Not in this universe, maggot!' else: karmas[name] -= len(minuses)//2 @cmd def karma(event): """Returns karma points for a given user.""" args = event.args if len(args) < 1: - return "{0}'s karma level is: {1}".format(event.meta['nick'], karmas[event.meta['nick']]) + return "{0}'s karma level is: {1}".format(event.meta['nick'], + karmas[event.meta['nick']]) user = event.args[0] if user not in karmas: karmas[user] = 0 return "{0}'s karma level is: {1}".format(user, karmas[user])
Split long line to make it more readable.
## Code Before: from brutal.core.plugin import cmd, match import collections karmas = collections.Counter() @match(regex=r'^([a-zA-Z0-9_]+)((:?\+)+)$') def karma_inc(event, name, pluses, *args): if name == event.meta['nick']: return 'Not in this universe, maggot!' else: karmas[name] += len(pluses)//2 @match(regex=r'^([a-zA-Z0-9_]+)((:?\-)+)$') def karma_dec(event, name, minuses, *args): if name == event.meta['nick']: return 'Not in this universe, maggot!' else: karmas[name] -= len(minuses)//2 @cmd def karma(event): """Returns karma points for a given user.""" args = event.args if len(args) < 1: return "{0}'s karma level is: {1}".format(event.meta['nick'], karmas[event.meta['nick']]) user = event.args[0] if user not in karmas: karmas[user] = 0 return "{0}'s karma level is: {1}".format(user, karmas[user]) ## Instruction: Split long line to make it more readable. ## Code After: from brutal.core.plugin import cmd, match import collections karmas = collections.Counter() @match(regex=r'^([a-zA-Z0-9_]+)((:?\+)+)$') def karma_inc(event, name, pluses, *args): if name == event.meta['nick']: return 'Not in this universe, maggot!' else: karmas[name] += len(pluses)//2 @match(regex=r'^([a-zA-Z0-9_]+)((:?\-)+)$') def karma_dec(event, name, minuses, *args): if name == event.meta['nick']: return 'Not in this universe, maggot!' else: karmas[name] -= len(minuses)//2 @cmd def karma(event): """Returns karma points for a given user.""" args = event.args if len(args) < 1: return "{0}'s karma level is: {1}".format(event.meta['nick'], karmas[event.meta['nick']]) user = event.args[0] if user not in karmas: karmas[user] = 0 return "{0}'s karma level is: {1}".format(user, karmas[user])
// ... existing code ... if len(args) < 1: return "{0}'s karma level is: {1}".format(event.meta['nick'], karmas[event.meta['nick']]) // ... rest of the code ...
94b6b97dc1e706a6560092aa29cbe4e21f052924
froide/account/apps.py
froide/account/apps.py
from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ class AccountConfig(AppConfig): name = 'froide.account' verbose_name = _("Account") def ready(self): from froide.bounce.signals import user_email_bounced user_email_bounced.connect(deactivate_user_after_bounce) def deactivate_user_after_bounce(sender, bounce, should_deactivate=False, **kwargs): if not should_deactivate: return if not bounce.user: return bounce.user.deactivate()
from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ from django.urls import reverse from .menu import menu_registry, MenuItem class AccountConfig(AppConfig): name = 'froide.account' verbose_name = _("Account") def ready(self): from froide.bounce.signals import user_email_bounced user_email_bounced.connect(deactivate_user_after_bounce) menu_registry.register(get_settings_menu_item) menu_registry.register(get_request_menu_item) def deactivate_user_after_bounce(sender, bounce, should_deactivate=False, **kwargs): if not should_deactivate: return if not bounce.user: return bounce.user.deactivate() def get_request_menu_item(request): return MenuItem( section='before_request', order=999, url=reverse('account-show'), label=_('My requests') ) def get_settings_menu_item(request): return MenuItem( section='after_settings', order=-1, url=reverse('account-settings'), label=_('Settings') )
Make settings and requests menu items
Make settings and requests menu items
Python
mit
fin/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide
from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ + from django.urls import reverse + + from .menu import menu_registry, MenuItem class AccountConfig(AppConfig): name = 'froide.account' verbose_name = _("Account") def ready(self): from froide.bounce.signals import user_email_bounced user_email_bounced.connect(deactivate_user_after_bounce) + menu_registry.register(get_settings_menu_item) + menu_registry.register(get_request_menu_item) + def deactivate_user_after_bounce(sender, bounce, should_deactivate=False, **kwargs): if not should_deactivate: return if not bounce.user: return bounce.user.deactivate() + + def get_request_menu_item(request): + return MenuItem( + section='before_request', order=999, + url=reverse('account-show'), + label=_('My requests') + ) + + + def get_settings_menu_item(request): + return MenuItem( + section='after_settings', order=-1, + url=reverse('account-settings'), + label=_('Settings') + ) +
Make settings and requests menu items
## Code Before: from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ class AccountConfig(AppConfig): name = 'froide.account' verbose_name = _("Account") def ready(self): from froide.bounce.signals import user_email_bounced user_email_bounced.connect(deactivate_user_after_bounce) def deactivate_user_after_bounce(sender, bounce, should_deactivate=False, **kwargs): if not should_deactivate: return if not bounce.user: return bounce.user.deactivate() ## Instruction: Make settings and requests menu items ## Code After: from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ from django.urls import reverse from .menu import menu_registry, MenuItem class AccountConfig(AppConfig): name = 'froide.account' verbose_name = _("Account") def ready(self): from froide.bounce.signals import user_email_bounced user_email_bounced.connect(deactivate_user_after_bounce) menu_registry.register(get_settings_menu_item) menu_registry.register(get_request_menu_item) def deactivate_user_after_bounce(sender, bounce, should_deactivate=False, **kwargs): if not should_deactivate: return if not bounce.user: return bounce.user.deactivate() def get_request_menu_item(request): return MenuItem( section='before_request', order=999, url=reverse('account-show'), label=_('My requests') ) def get_settings_menu_item(request): return MenuItem( section='after_settings', order=-1, url=reverse('account-settings'), label=_('Settings') )
# ... existing code ... from django.utils.translation import ugettext_lazy as _ from django.urls import reverse from .menu import menu_registry, MenuItem # ... modified code ... menu_registry.register(get_settings_menu_item) menu_registry.register(get_request_menu_item) ... bounce.user.deactivate() def get_request_menu_item(request): return MenuItem( section='before_request', order=999, url=reverse('account-show'), label=_('My requests') ) def get_settings_menu_item(request): return MenuItem( section='after_settings', order=-1, url=reverse('account-settings'), label=_('Settings') ) # ... rest of the code ...
3c0d52aa0a936b3ae138ddfba66e7ba9dcc5f934
sympy/plotting/proxy_pyglet.py
sympy/plotting/proxy_pyglet.py
from warnings import warn from sympy.core.compatibility import SymPyDeprecationWarning from pygletplot import PygletPlot def Plot(*args, **kwargs): """ A temporary proxy for an interface under deprecation. This proxy is the one imported by `from sympy import *`. The Plot class will change in future versions of sympy to use the new plotting module. That new plotting module is already used by the plot() function (lowercase). To write code compatible with future versions of sympy use that function (plot() lowercase). Or if you want to use the old plotting module just import it directly: `from sympy.plotting.pygletplot import PygletPlot` To use Plot from the new plotting module do: `from sympy.plotting.plot import Plot` In future version of sympy you will also be able to use `from sympy.plotting import Plot` but in the current version this will import this proxy object. It's done for backward compatibility. The old plotting module is not deprecated. Only the location will change. The new location is sympy.plotting.pygletplot. """ warn('This interface will change in future versions of sympy.' ' As a precatuion use the plot() function (lowercase).' ' See the docstring for details.', SymPyDeprecationWarning) return PygletPlot(*args, **kwargs)
from warnings import warn from sympy.utilities.exceptions import SymPyDeprecationWarning from pygletplot import PygletPlot def Plot(*args, **kwargs): """ A temporary proxy for an interface under deprecation. This proxy is the one imported by `from sympy import *`. The Plot class will change in future versions of sympy to use the new plotting module. That new plotting module is already used by the plot() function (lowercase). To write code compatible with future versions of sympy use that function (plot() lowercase). Or if you want to use the old plotting module just import it directly: `from sympy.plotting.pygletplot import PygletPlot` To use Plot from the new plotting module do: `from sympy.plotting.plot import Plot` In future version of sympy you will also be able to use `from sympy.plotting import Plot` but in the current version this will import this proxy object. It's done for backward compatibility. The old plotting module is not deprecated. Only the location will change. The new location is sympy.plotting.pygletplot. """ warn('This interface will change in future versions of sympy.' ' As a precatuion use the plot() function (lowercase).' ' See the docstring for details.', SymPyDeprecationWarning) return PygletPlot(*args, **kwargs)
Change the import location of DeprecationWarning used by plotting module
Change the import location of DeprecationWarning used by plotting module The SympyDeprecationWarning was moved from its original location. The change was done in the master branch. The same change must be mirrored in this development branch.
Python
bsd-3-clause
pbrady/sympy,grevutiu-gabriel/sympy,rahuldan/sympy,atsao72/sympy,kmacinnis/sympy,yashsharan/sympy,drufat/sympy,iamutkarshtiwari/sympy,shikil/sympy,atsao72/sympy,jaimahajan1997/sympy,meghana1995/sympy,jerli/sympy,oliverlee/sympy,ahhda/sympy,garvitr/sympy,sahmed95/sympy,abloomston/sympy,kaushik94/sympy,jbbskinny/sympy,cswiercz/sympy,postvakje/sympy,maniteja123/sympy,asm666/sympy,dqnykamp/sympy,jamesblunt/sympy,Curious72/sympy,diofant/diofant,Titan-C/sympy,kaushik94/sympy,saurabhjn76/sympy,amitjamadagni/sympy,atreyv/sympy,kevalds51/sympy,abloomston/sympy,saurabhjn76/sympy,sampadsaha5/sympy,abhiii5459/sympy,chaffra/sympy,MechCoder/sympy,MridulS/sympy,hargup/sympy,aktech/sympy,abhiii5459/sympy,vipulroxx/sympy,amitjamadagni/sympy,pandeyadarsh/sympy,Vishluck/sympy,sunny94/temp,emon10005/sympy,kaichogami/sympy,MridulS/sympy,AunShiLord/sympy,pbrady/sympy,Curious72/sympy,kevalds51/sympy,wyom/sympy,Davidjohnwilson/sympy,wanglongqi/sympy,jamesblunt/sympy,liangjiaxing/sympy,yashsharan/sympy,MechCoder/sympy,moble/sympy,atreyv/sympy,meghana1995/sympy,Arafatk/sympy,lindsayad/sympy,pbrady/sympy,aktech/sympy,mafiya69/sympy,jbbskinny/sympy,asm666/sympy,ga7g08/sympy,ga7g08/sympy,cswiercz/sympy,kevalds51/sympy,sahmed95/sympy,Sumith1896/sympy,souravsingh/sympy,skirpichev/omg,drufat/sympy,sunny94/temp,jerli/sympy,toolforger/sympy,mafiya69/sympy,cccfran/sympy,MridulS/sympy,chaffra/sympy,yukoba/sympy,AunShiLord/sympy,iamutkarshtiwari/sympy,ChristinaZografou/sympy,toolforger/sympy,lindsayad/sympy,farhaanbukhsh/sympy,debugger22/sympy,kumarkrishna/sympy,pandeyadarsh/sympy,ga7g08/sympy,hargup/sympy,saurabhjn76/sympy,atreyv/sympy,Curious72/sympy,lidavidm/sympy,ahhda/sympy,sahilshekhawat/sympy,dqnykamp/sympy,liangjiaxing/sympy,chaffra/sympy,kaushik94/sympy,MechCoder/sympy,Designist/sympy,jbbskinny/sympy,garvitr/sympy,madan96/sympy,beni55/sympy,VaibhavAgarwalVA/sympy,wanglongqi/sympy,skidzo/sympy,debugger22/sympy,cccfran/sympy,hargup/sympy,AkademieOlympia/sympy,VaibhavAgarwalVA/sympy,ahhda/sympy,postvakje/sympy,vipulroxx/sympy,dqnykamp/sympy,Sumith1896/sympy,bukzor/sympy,mafiya69/sympy,sunny94/temp,shikil/sympy,mcdaniel67/sympy,grevutiu-gabriel/sympy,Shaswat27/sympy,shikil/sympy,sampadsaha5/sympy,Davidjohnwilson/sympy,abloomston/sympy,skidzo/sympy,Shaswat27/sympy,Gadal/sympy,emon10005/sympy,lidavidm/sympy,hrashk/sympy,beni55/sympy,kumarkrishna/sympy,wyom/sympy,souravsingh/sympy,oliverlee/sympy,wanglongqi/sympy,sahilshekhawat/sympy,kmacinnis/sympy,bukzor/sympy,ChristinaZografou/sympy,aktech/sympy,maniteja123/sympy,moble/sympy,hrashk/sympy,cccfran/sympy,lindsayad/sympy,yukoba/sympy,emon10005/sympy,Titan-C/sympy,lidavidm/sympy,shipci/sympy,vipulroxx/sympy,bukzor/sympy,moble/sympy,yashsharan/sympy,Mitchkoens/sympy,madan96/sympy,Designist/sympy,toolforger/sympy,Gadal/sympy,Vishluck/sympy,Mitchkoens/sympy,meghana1995/sympy,ChristinaZografou/sympy,madan96/sympy,sahmed95/sympy,sampadsaha5/sympy,AunShiLord/sympy,srjoglekar246/sympy,cswiercz/sympy,rahuldan/sympy,skidzo/sympy,jaimahajan1997/sympy,kaichogami/sympy,flacjacket/sympy,jaimahajan1997/sympy,Shaswat27/sympy,abhiii5459/sympy,Mitchkoens/sympy,iamutkarshtiwari/sympy,kumarkrishna/sympy,AkademieOlympia/sympy,asm666/sympy,pandeyadarsh/sympy,postvakje/sympy,jerli/sympy,AkademieOlympia/sympy,shipci/sympy,Titan-C/sympy,shipci/sympy,farhaanbukhsh/sympy,atsao72/sympy,debugger22/sympy,kmacinnis/sympy,grevutiu-gabriel/sympy,yukoba/sympy,mcdaniel67/sympy,Arafatk/sympy,maniteja123/sympy,garvitr/sympy,sahilshekhawat/sympy,liangjiaxing/sympy,drufat/sympy,Davidjohnwilson/sympy,beni55/sympy,Designist/sympy,kaichogami/sympy,souravsingh/sympy,Vishluck/sympy,hrashk/sympy,farhaanbukhsh/sympy,oliverlee/sympy,wyom/sympy,rahuldan/sympy,jamesblunt/sympy,Sumith1896/sympy,Arafatk/sympy,VaibhavAgarwalVA/sympy,mcdaniel67/sympy,Gadal/sympy
from warnings import warn - from sympy.core.compatibility import SymPyDeprecationWarning + from sympy.utilities.exceptions import SymPyDeprecationWarning from pygletplot import PygletPlot def Plot(*args, **kwargs): """ A temporary proxy for an interface under deprecation. This proxy is the one imported by `from sympy import *`. The Plot class will change in future versions of sympy to use the new plotting module. That new plotting module is already used by the plot() function (lowercase). To write code compatible with future versions of sympy use that function (plot() lowercase). Or if you want to use the old plotting module just import it directly: `from sympy.plotting.pygletplot import PygletPlot` To use Plot from the new plotting module do: `from sympy.plotting.plot import Plot` In future version of sympy you will also be able to use `from sympy.plotting import Plot` but in the current version this will import this proxy object. It's done for backward compatibility. The old plotting module is not deprecated. Only the location will change. The new location is sympy.plotting.pygletplot. """ warn('This interface will change in future versions of sympy.' ' As a precatuion use the plot() function (lowercase).' ' See the docstring for details.', SymPyDeprecationWarning) return PygletPlot(*args, **kwargs)
Change the import location of DeprecationWarning used by plotting module
## Code Before: from warnings import warn from sympy.core.compatibility import SymPyDeprecationWarning from pygletplot import PygletPlot def Plot(*args, **kwargs): """ A temporary proxy for an interface under deprecation. This proxy is the one imported by `from sympy import *`. The Plot class will change in future versions of sympy to use the new plotting module. That new plotting module is already used by the plot() function (lowercase). To write code compatible with future versions of sympy use that function (plot() lowercase). Or if you want to use the old plotting module just import it directly: `from sympy.plotting.pygletplot import PygletPlot` To use Plot from the new plotting module do: `from sympy.plotting.plot import Plot` In future version of sympy you will also be able to use `from sympy.plotting import Plot` but in the current version this will import this proxy object. It's done for backward compatibility. The old plotting module is not deprecated. Only the location will change. The new location is sympy.plotting.pygletplot. """ warn('This interface will change in future versions of sympy.' ' As a precatuion use the plot() function (lowercase).' ' See the docstring for details.', SymPyDeprecationWarning) return PygletPlot(*args, **kwargs) ## Instruction: Change the import location of DeprecationWarning used by plotting module ## Code After: from warnings import warn from sympy.utilities.exceptions import SymPyDeprecationWarning from pygletplot import PygletPlot def Plot(*args, **kwargs): """ A temporary proxy for an interface under deprecation. This proxy is the one imported by `from sympy import *`. The Plot class will change in future versions of sympy to use the new plotting module. That new plotting module is already used by the plot() function (lowercase). To write code compatible with future versions of sympy use that function (plot() lowercase). Or if you want to use the old plotting module just import it directly: `from sympy.plotting.pygletplot import PygletPlot` To use Plot from the new plotting module do: `from sympy.plotting.plot import Plot` In future version of sympy you will also be able to use `from sympy.plotting import Plot` but in the current version this will import this proxy object. It's done for backward compatibility. The old plotting module is not deprecated. Only the location will change. The new location is sympy.plotting.pygletplot. """ warn('This interface will change in future versions of sympy.' ' As a precatuion use the plot() function (lowercase).' ' See the docstring for details.', SymPyDeprecationWarning) return PygletPlot(*args, **kwargs)
# ... existing code ... from warnings import warn from sympy.utilities.exceptions import SymPyDeprecationWarning from pygletplot import PygletPlot # ... rest of the code ...
00c3f1e3eb38a22d95c6e59f72e51a9b53723a31
brains/namelist/tasks.py
brains/namelist/tasks.py
from celery.task import task from namelist.scrape import get_user_profile_id, scrape_profile, NotFound from namelist.models import Player, Category @task() def import_user(user, profile_name_or_id, category=None): if isinstance(profile_name_or_id, basestring): try: profile_id = get_user_profile_id(profile_name_or_id) except NotFound: user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id)) return else: profile_id = profile_name_or_id info = scrape_profile(profile_id) player = Player.objects.get_or_create(name=info[0], group_name=info[1], profile_id=profile_id) if player[1]: player[0].category = category player[0].save()
from celery.task import task from namelist.scrape import get_user_profile_id, scrape_profile, NotFound from namelist.models import Player, Category @task() def import_user(profile_name_or_id, category=None, user=None): if isinstance(profile_name_or_id, basestring): try: profile_id = get_user_profile_id(profile_name_or_id) except NotFound: if user: user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id)) return else: profile_id = profile_name_or_id info = scrape_profile(profile_id) player, created = Player.objects.get_or_create(profile_id=profile_id) if player[1]: player[0].category = category player[0].name = info[0] player[0].group_name = info[1] player[0].save()
Fix duplicate profile key errors with a less specific query.
Fix duplicate profile key errors with a less specific query.
Python
bsd-3-clause
crisisking/udbraaains,crisisking/udbraaains,crisisking/udbraaains,crisisking/udbraaains
from celery.task import task from namelist.scrape import get_user_profile_id, scrape_profile, NotFound from namelist.models import Player, Category @task() - def import_user(user, profile_name_or_id, category=None): + def import_user(profile_name_or_id, category=None, user=None): if isinstance(profile_name_or_id, basestring): try: profile_id = get_user_profile_id(profile_name_or_id) except NotFound: + if user: - user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id)) + user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id)) return else: profile_id = profile_name_or_id info = scrape_profile(profile_id) - player = Player.objects.get_or_create(name=info[0], group_name=info[1], profile_id=profile_id) + player, created = Player.objects.get_or_create(profile_id=profile_id) if player[1]: player[0].category = category + player[0].name = info[0] + player[0].group_name = info[1] - player[0].save() + player[0].save()
Fix duplicate profile key errors with a less specific query.
## Code Before: from celery.task import task from namelist.scrape import get_user_profile_id, scrape_profile, NotFound from namelist.models import Player, Category @task() def import_user(user, profile_name_or_id, category=None): if isinstance(profile_name_or_id, basestring): try: profile_id = get_user_profile_id(profile_name_or_id) except NotFound: user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id)) return else: profile_id = profile_name_or_id info = scrape_profile(profile_id) player = Player.objects.get_or_create(name=info[0], group_name=info[1], profile_id=profile_id) if player[1]: player[0].category = category player[0].save() ## Instruction: Fix duplicate profile key errors with a less specific query. ## Code After: from celery.task import task from namelist.scrape import get_user_profile_id, scrape_profile, NotFound from namelist.models import Player, Category @task() def import_user(profile_name_or_id, category=None, user=None): if isinstance(profile_name_or_id, basestring): try: profile_id = get_user_profile_id(profile_name_or_id) except NotFound: if user: user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id)) return else: profile_id = profile_name_or_id info = scrape_profile(profile_id) player, created = Player.objects.get_or_create(profile_id=profile_id) if player[1]: player[0].category = category player[0].name = info[0] player[0].group_name = info[1] player[0].save()
// ... existing code ... @task() def import_user(profile_name_or_id, category=None, user=None): if isinstance(profile_name_or_id, basestring): // ... modified code ... except NotFound: if user: user.message_set.create(message="Couldn't create {0}".format(profile_name_or_id)) return ... player, created = Player.objects.get_or_create(profile_id=profile_id) if player[1]: ... player[0].category = category player[0].name = info[0] player[0].group_name = info[1] player[0].save() // ... rest of the code ...
1118541b1cdea7f6079bb63d000ba54f69dfa119
books/views.py
books/views.py
from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect from django.shortcuts import render from books import models from books import forms @login_required def receipt_list(request, user_id): user = User.objects.get(id=user_id) ctx = {} ctx['user'] = user ctx['receipts'] = models.Receipt.objects.filter(user=user).order_by('-id') return render(request, 'receipt_list.html', context=ctx) @login_required def receipt_create(request, user_id): if request.method == "POST": form = forms.ReceiptForm(request.POST) if form.is_valid(): data = form.cleaned_data models.Receipt.objects.create(title=data.get("title"), price=data.get("price"), user=request.user) return HttpResponseRedirect(reverse('receipt_list', args=[request.user.id])) else: form = forms.ReceiptForm() return render(request, 'receipt_create.html', {'form': form})
from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.shortcuts import redirect from django.shortcuts import render from books import models from books import forms @login_required def receipt_list(request, user_id): user = User.objects.get(id=user_id) ctx = {} ctx['user'] = user ctx['receipts'] = models.Receipt.objects.filter(user=user).order_by('-id') return render(request, 'receipt_list.html', context=ctx) @login_required def receipt_create(request, user_id): if request.method == "POST": form = forms.ReceiptForm(request.POST) if form.is_valid(): form.instance.user = request.user form.save() return redirect(reverse('receipt_list', args=[request.user.id])) else: form = forms.ReceiptForm() return render(request, 'receipt_create.html', {'form': form})
Use form.save for receipt creation
Use form.save for receipt creation
Python
mit
trimailov/finance,trimailov/finance,trimailov/finance
from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User from django.core.urlresolvers import reverse - from django.http import HttpResponseRedirect + from django.shortcuts import redirect from django.shortcuts import render from books import models from books import forms @login_required def receipt_list(request, user_id): user = User.objects.get(id=user_id) ctx = {} ctx['user'] = user ctx['receipts'] = models.Receipt.objects.filter(user=user).order_by('-id') return render(request, 'receipt_list.html', context=ctx) @login_required def receipt_create(request, user_id): if request.method == "POST": form = forms.ReceiptForm(request.POST) if form.is_valid(): + form.instance.user = request.user + form.save() + return redirect(reverse('receipt_list', args=[request.user.id])) - data = form.cleaned_data - models.Receipt.objects.create(title=data.get("title"), - price=data.get("price"), - user=request.user) - return HttpResponseRedirect(reverse('receipt_list', - args=[request.user.id])) else: form = forms.ReceiptForm() return render(request, 'receipt_create.html', {'form': form})
Use form.save for receipt creation
## Code Before: from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect from django.shortcuts import render from books import models from books import forms @login_required def receipt_list(request, user_id): user = User.objects.get(id=user_id) ctx = {} ctx['user'] = user ctx['receipts'] = models.Receipt.objects.filter(user=user).order_by('-id') return render(request, 'receipt_list.html', context=ctx) @login_required def receipt_create(request, user_id): if request.method == "POST": form = forms.ReceiptForm(request.POST) if form.is_valid(): data = form.cleaned_data models.Receipt.objects.create(title=data.get("title"), price=data.get("price"), user=request.user) return HttpResponseRedirect(reverse('receipt_list', args=[request.user.id])) else: form = forms.ReceiptForm() return render(request, 'receipt_create.html', {'form': form}) ## Instruction: Use form.save for receipt creation ## Code After: from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.shortcuts import redirect from django.shortcuts import render from books import models from books import forms @login_required def receipt_list(request, user_id): user = User.objects.get(id=user_id) ctx = {} ctx['user'] = user ctx['receipts'] = models.Receipt.objects.filter(user=user).order_by('-id') return render(request, 'receipt_list.html', context=ctx) @login_required def receipt_create(request, user_id): if request.method == "POST": form = forms.ReceiptForm(request.POST) if form.is_valid(): form.instance.user = request.user form.save() return redirect(reverse('receipt_list', args=[request.user.id])) else: form = forms.ReceiptForm() return render(request, 'receipt_create.html', {'form': form})
... from django.core.urlresolvers import reverse from django.shortcuts import redirect from django.shortcuts import render ... if form.is_valid(): form.instance.user = request.user form.save() return redirect(reverse('receipt_list', args=[request.user.id])) else: ...
ee53ec51d98802bf0bc55e70c39cc0918f2bb274
icekit/plugins/blog_post/content_plugins.py
icekit/plugins/blog_post/content_plugins.py
from django.apps import apps from django.conf import settings from django.db.models.loading import get_model from django.utils.translation import ugettext_lazy as _ from fluent_contents.extensions import ContentPlugin, plugin_pool default_blog_model = 'blog_tools.BlogPost' icekit_blog_model = getattr(settings, 'ICEKIT_BLOG_MODEL', default_blog_model) BLOG_MODEL = apps.get_model(*icekit_blog_model.rsplit('.', 1)) if icekit_blog_model != default_blog_model: @plugin_pool.register class BlogPostPlugin(ContentPlugin): model = get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.PostItem')) category = _('Blog') render_template = 'icekit/plugins/post/default.html' raw_id_fields = ['post', ]
from django.apps import apps from django.conf import settings from django.utils.translation import ugettext_lazy as _ from fluent_contents.extensions import ContentPlugin, plugin_pool default_blog_model = 'blog_tools.BlogPost' icekit_blog_model = getattr(settings, 'ICEKIT_BLOG_MODEL', default_blog_model) BLOG_MODEL = apps.get_model(*icekit_blog_model.rsplit('.', 1)) if icekit_blog_model != default_blog_model: @plugin_pool.register class BlogPostPlugin(ContentPlugin): model = apps.get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.BlogPostItem')) category = _('Blog') render_template = 'icekit/plugins/post/default.html' raw_id_fields = ['post', ]
Update Blog model and content item matching
Update Blog model and content item matching
Python
mit
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
from django.apps import apps from django.conf import settings - from django.db.models.loading import get_model from django.utils.translation import ugettext_lazy as _ from fluent_contents.extensions import ContentPlugin, plugin_pool default_blog_model = 'blog_tools.BlogPost' icekit_blog_model = getattr(settings, 'ICEKIT_BLOG_MODEL', default_blog_model) BLOG_MODEL = apps.get_model(*icekit_blog_model.rsplit('.', 1)) if icekit_blog_model != default_blog_model: @plugin_pool.register class BlogPostPlugin(ContentPlugin): - model = get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.PostItem')) + model = apps.get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.BlogPostItem')) category = _('Blog') render_template = 'icekit/plugins/post/default.html' raw_id_fields = ['post', ]
Update Blog model and content item matching
## Code Before: from django.apps import apps from django.conf import settings from django.db.models.loading import get_model from django.utils.translation import ugettext_lazy as _ from fluent_contents.extensions import ContentPlugin, plugin_pool default_blog_model = 'blog_tools.BlogPost' icekit_blog_model = getattr(settings, 'ICEKIT_BLOG_MODEL', default_blog_model) BLOG_MODEL = apps.get_model(*icekit_blog_model.rsplit('.', 1)) if icekit_blog_model != default_blog_model: @plugin_pool.register class BlogPostPlugin(ContentPlugin): model = get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.PostItem')) category = _('Blog') render_template = 'icekit/plugins/post/default.html' raw_id_fields = ['post', ] ## Instruction: Update Blog model and content item matching ## Code After: from django.apps import apps from django.conf import settings from django.utils.translation import ugettext_lazy as _ from fluent_contents.extensions import ContentPlugin, plugin_pool default_blog_model = 'blog_tools.BlogPost' icekit_blog_model = getattr(settings, 'ICEKIT_BLOG_MODEL', default_blog_model) BLOG_MODEL = apps.get_model(*icekit_blog_model.rsplit('.', 1)) if icekit_blog_model != default_blog_model: @plugin_pool.register class BlogPostPlugin(ContentPlugin): model = apps.get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.BlogPostItem')) category = _('Blog') render_template = 'icekit/plugins/post/default.html' raw_id_fields = ['post', ]
// ... existing code ... from django.conf import settings from django.utils.translation import ugettext_lazy as _ // ... modified code ... class BlogPostPlugin(ContentPlugin): model = apps.get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.BlogPostItem')) category = _('Blog') // ... rest of the code ...
6cfb0ca69b43784d495920865f0a250f7d16ff84
trump/extensions/loader.py
trump/extensions/loader.py
from imp import find_module, load_module import os class SourceExtension(object): def __init__(self, mod): self.initialized = False self.mod = mod self.renew = mod.renew self.Source = mod.Source def __call__(self, _ses, **kwargs): if not self.initialized or self.renew: self.fetcher = self.Source(_ses, **kwargs) self.initialized = True return self.fetcher.getseries(_ses, **kwargs) sources = {} curdir = os.path.dirname(os.path.realpath(__file__)) extension_names = os.listdir(os.path.join(curdir,'source')) for name in extension_names: ext = find_module(name, ['source']) mod = load_module(name, *ext) sources[mod.stype] = SourceExtension(mod)
from imp import find_module, load_module import os class SourceExtension(object): def __init__(self, mod): self.initialized = False self.mod = mod self.renew = mod.renew self.Source = mod.Source def __call__(self, _ses, **kwargs): if not self.initialized or self.renew: self.fetcher = self.Source(_ses, **kwargs) self.initialized = True return self.fetcher.getseries(_ses, **kwargs) sources = {} curdir = os.path.dirname(os.path.realpath(__file__)) sourcedir = os.path.join(curdir,'source') extension_names = os.listdir(sourcedir) for name in extension_names: ext = find_module(name, [sourcedir]) mod = load_module(name, *ext) sources[mod.stype] = SourceExtension(mod)
Use full path to find mods
Use full path to find mods
Python
bsd-3-clause
jnmclarty/trump,Equitable/trump
from imp import find_module, load_module import os class SourceExtension(object): def __init__(self, mod): self.initialized = False self.mod = mod self.renew = mod.renew self.Source = mod.Source def __call__(self, _ses, **kwargs): if not self.initialized or self.renew: self.fetcher = self.Source(_ses, **kwargs) self.initialized = True return self.fetcher.getseries(_ses, **kwargs) sources = {} curdir = os.path.dirname(os.path.realpath(__file__)) + sourcedir = os.path.join(curdir,'source') - extension_names = os.listdir(os.path.join(curdir,'source')) + extension_names = os.listdir(sourcedir) for name in extension_names: - ext = find_module(name, ['source']) + ext = find_module(name, [sourcedir]) mod = load_module(name, *ext) sources[mod.stype] = SourceExtension(mod)
Use full path to find mods
## Code Before: from imp import find_module, load_module import os class SourceExtension(object): def __init__(self, mod): self.initialized = False self.mod = mod self.renew = mod.renew self.Source = mod.Source def __call__(self, _ses, **kwargs): if not self.initialized or self.renew: self.fetcher = self.Source(_ses, **kwargs) self.initialized = True return self.fetcher.getseries(_ses, **kwargs) sources = {} curdir = os.path.dirname(os.path.realpath(__file__)) extension_names = os.listdir(os.path.join(curdir,'source')) for name in extension_names: ext = find_module(name, ['source']) mod = load_module(name, *ext) sources[mod.stype] = SourceExtension(mod) ## Instruction: Use full path to find mods ## Code After: from imp import find_module, load_module import os class SourceExtension(object): def __init__(self, mod): self.initialized = False self.mod = mod self.renew = mod.renew self.Source = mod.Source def __call__(self, _ses, **kwargs): if not self.initialized or self.renew: self.fetcher = self.Source(_ses, **kwargs) self.initialized = True return self.fetcher.getseries(_ses, **kwargs) sources = {} curdir = os.path.dirname(os.path.realpath(__file__)) sourcedir = os.path.join(curdir,'source') extension_names = os.listdir(sourcedir) for name in extension_names: ext = find_module(name, [sourcedir]) mod = load_module(name, *ext) sources[mod.stype] = SourceExtension(mod)
# ... existing code ... curdir = os.path.dirname(os.path.realpath(__file__)) sourcedir = os.path.join(curdir,'source') extension_names = os.listdir(sourcedir) # ... modified code ... for name in extension_names: ext = find_module(name, [sourcedir]) mod = load_module(name, *ext) # ... rest of the code ...
c1f270700d9de209577b64c40b71b5f5b69c5aae
cards.py
cards.py
from pymongo import MongoClient class Cards: def __init__(self, dbname='cards'): """Instantiate this class. Set up a connection to the given Mongo database. Get to the collection we'll store cards in. Args: dbname (str): Database name. """ self.client = MongoClient() self.db = self.client[dbname] self.cards_coll = self.db['cards'] @property def sets(self): """Return a list of all the card sets in the database. Args: None Returns: list: List of all card sets in the database. """ return self.cards_coll.distinct('set') def retrieve_set(self, setname): """Return a list of all the cards in the given set. Args: setname (str): Name of set. Returns: list: List of all cards in the the given set. """ return list(self.cards_coll.find({'set': setname})) def create_card(self, setname, color, text, creator): """Insert a new card with the given properties into the database. Args: setname (str): Name of set the card will belong to. color (str): Color the card will have. text (str): Text that will appear on the card. creator (str): Creator to attribute the card to. Returns: None """ card = { 'set': setname, 'color': color, 'text': text, 'creator': creator, } self.cards_coll.insert_one(card)
from pymongo import MongoClient class Cards: def __init__(self, dbname='cards'): """Instantiate this class. Set up a connection to the given Mongo database. Get to the collection we'll store cards in. Args: dbname (str): Database name. """ self.client = MongoClient() self.db = self.client[dbname] self.cards_coll = self.db['cards'] @property def sets(self): """Return a list of all the card sets in the database. Args: None Returns: list: List of all card sets in the database. """ return self.cards_coll.distinct('set') def retrieve_set(self, setname): """Return a list of all the cards in the given set. Args: setname (str): Name of set. Returns: list: List of all cards in the the given set. """ return list(self.cards_coll.find({'set': setname})) def create_cards(self, cards): """Insert a new card with the given properties into the database. Args: cards: List of dictionaries with set, color, text, and creator keys. Returns: None """ keys = ['set', 'color', 'text', 'creator'] filtered = [ { k: card[k] for k in keys if k in card} for card in cards] self.cards_coll.insert_many(filtered)
Refactor create_card method to take a list of card dictionaries. Rename method accordingly.
Refactor create_card method to take a list of card dictionaries. Rename method accordingly.
Python
isc
wwu-nosql/cards
from pymongo import MongoClient class Cards: def __init__(self, dbname='cards'): """Instantiate this class. Set up a connection to the given Mongo database. Get to the collection we'll store cards in. Args: dbname (str): Database name. """ self.client = MongoClient() self.db = self.client[dbname] self.cards_coll = self.db['cards'] @property def sets(self): """Return a list of all the card sets in the database. Args: None Returns: list: List of all card sets in the database. """ return self.cards_coll.distinct('set') def retrieve_set(self, setname): """Return a list of all the cards in the given set. Args: setname (str): Name of set. Returns: list: List of all cards in the the given set. """ return list(self.cards_coll.find({'set': setname})) - def create_card(self, setname, color, text, creator): + def create_cards(self, cards): """Insert a new card with the given properties into the database. Args: + cards: List of dictionaries with set, color, text, and creator keys. - setname (str): Name of set the card will belong to. - color (str): Color the card will have. - text (str): Text that will appear on the card. - creator (str): Creator to attribute the card to. Returns: None """ - card = { - 'set': setname, - 'color': color, - 'text': text, - 'creator': creator, - } - self.cards_coll.insert_one(card) + keys = ['set', 'color', 'text', 'creator'] + filtered = [ { k: card[k] for k in keys if k in card} for card in cards] + self.cards_coll.insert_many(filtered) +
Refactor create_card method to take a list of card dictionaries. Rename method accordingly.
## Code Before: from pymongo import MongoClient class Cards: def __init__(self, dbname='cards'): """Instantiate this class. Set up a connection to the given Mongo database. Get to the collection we'll store cards in. Args: dbname (str): Database name. """ self.client = MongoClient() self.db = self.client[dbname] self.cards_coll = self.db['cards'] @property def sets(self): """Return a list of all the card sets in the database. Args: None Returns: list: List of all card sets in the database. """ return self.cards_coll.distinct('set') def retrieve_set(self, setname): """Return a list of all the cards in the given set. Args: setname (str): Name of set. Returns: list: List of all cards in the the given set. """ return list(self.cards_coll.find({'set': setname})) def create_card(self, setname, color, text, creator): """Insert a new card with the given properties into the database. Args: setname (str): Name of set the card will belong to. color (str): Color the card will have. text (str): Text that will appear on the card. creator (str): Creator to attribute the card to. Returns: None """ card = { 'set': setname, 'color': color, 'text': text, 'creator': creator, } self.cards_coll.insert_one(card) ## Instruction: Refactor create_card method to take a list of card dictionaries. Rename method accordingly. ## Code After: from pymongo import MongoClient class Cards: def __init__(self, dbname='cards'): """Instantiate this class. Set up a connection to the given Mongo database. Get to the collection we'll store cards in. Args: dbname (str): Database name. """ self.client = MongoClient() self.db = self.client[dbname] self.cards_coll = self.db['cards'] @property def sets(self): """Return a list of all the card sets in the database. Args: None Returns: list: List of all card sets in the database. """ return self.cards_coll.distinct('set') def retrieve_set(self, setname): """Return a list of all the cards in the given set. Args: setname (str): Name of set. Returns: list: List of all cards in the the given set. """ return list(self.cards_coll.find({'set': setname})) def create_cards(self, cards): """Insert a new card with the given properties into the database. Args: cards: List of dictionaries with set, color, text, and creator keys. Returns: None """ keys = ['set', 'color', 'text', 'creator'] filtered = [ { k: card[k] for k in keys if k in card} for card in cards] self.cards_coll.insert_many(filtered)
# ... existing code ... def create_cards(self, cards): """Insert a new card with the given properties into the database. # ... modified code ... Args: cards: List of dictionaries with set, color, text, and creator keys. ... """ keys = ['set', 'color', 'text', 'creator'] filtered = [ { k: card[k] for k in keys if k in card} for card in cards] self.cards_coll.insert_many(filtered) # ... rest of the code ...
ef5d3c61acdb7538b4338351b8902802142e03a5
tests/bindings/python/scoring/test-scoring_result.py
tests/bindings/python/scoring/test-scoring_result.py
def test_import(): try: import vistk.pipeline_util.bake except: test_error("Failed to import the bake module") def test_api_calls(): from vistk.scoring import scoring_result result = scoring_result.ScoringResult(1, 1, 1) result.hit_count result.miss_count result.truth_count result.percent_detection() result.precision() result + result def main(testname): if testname == 'import': test_import() elif testname == 'api_calls': test_api_calls() else: test_error("No such test '%s'" % testname) if __name__ == '__main__': import os import sys if not len(sys.argv) == 4: test_error("Expected three arguments") sys.exit(1) testname = sys.argv[1] os.chdir(sys.argv[2]) sys.path.append(sys.argv[3]) from vistk.test.test import * try: main(testname) except BaseException as e: test_error("Unexpected exception: %s" % str(e))
def test_import(): try: import vistk.pipeline_util.bake except: test_error("Failed to import the bake module") def test_api_calls(): from vistk.scoring import scoring_result result = scoring_result.ScoringResult(1, 1, 1) result.true_positives result.false_positives result.total_trues result.total_possible result.percent_detection() result.precision() result.specificity() result + result def main(testname): if testname == 'import': test_import() elif testname == 'api_calls': test_api_calls() else: test_error("No such test '%s'" % testname) if __name__ == '__main__': import os import sys if not len(sys.argv) == 4: test_error("Expected three arguments") sys.exit(1) testname = sys.argv[1] os.chdir(sys.argv[2]) sys.path.append(sys.argv[3]) from vistk.test.test import * try: main(testname) except BaseException as e: test_error("Unexpected exception: %s" % str(e))
Update Python tests for scoring_result
Update Python tests for scoring_result
Python
bsd-3-clause
linus-sherrill/sprokit,mathstuf/sprokit,Kitware/sprokit,linus-sherrill/sprokit,linus-sherrill/sprokit,linus-sherrill/sprokit,mathstuf/sprokit,Kitware/sprokit,Kitware/sprokit,Kitware/sprokit,mathstuf/sprokit,mathstuf/sprokit
def test_import(): try: import vistk.pipeline_util.bake except: test_error("Failed to import the bake module") def test_api_calls(): from vistk.scoring import scoring_result result = scoring_result.ScoringResult(1, 1, 1) - result.hit_count - result.miss_count - result.truth_count + result.true_positives + result.false_positives + result.total_trues + result.total_possible result.percent_detection() result.precision() + result.specificity() result + result def main(testname): if testname == 'import': test_import() elif testname == 'api_calls': test_api_calls() else: test_error("No such test '%s'" % testname) if __name__ == '__main__': import os import sys if not len(sys.argv) == 4: test_error("Expected three arguments") sys.exit(1) testname = sys.argv[1] os.chdir(sys.argv[2]) sys.path.append(sys.argv[3]) from vistk.test.test import * try: main(testname) except BaseException as e: test_error("Unexpected exception: %s" % str(e))
Update Python tests for scoring_result
## Code Before: def test_import(): try: import vistk.pipeline_util.bake except: test_error("Failed to import the bake module") def test_api_calls(): from vistk.scoring import scoring_result result = scoring_result.ScoringResult(1, 1, 1) result.hit_count result.miss_count result.truth_count result.percent_detection() result.precision() result + result def main(testname): if testname == 'import': test_import() elif testname == 'api_calls': test_api_calls() else: test_error("No such test '%s'" % testname) if __name__ == '__main__': import os import sys if not len(sys.argv) == 4: test_error("Expected three arguments") sys.exit(1) testname = sys.argv[1] os.chdir(sys.argv[2]) sys.path.append(sys.argv[3]) from vistk.test.test import * try: main(testname) except BaseException as e: test_error("Unexpected exception: %s" % str(e)) ## Instruction: Update Python tests for scoring_result ## Code After: def test_import(): try: import vistk.pipeline_util.bake except: test_error("Failed to import the bake module") def test_api_calls(): from vistk.scoring import scoring_result result = scoring_result.ScoringResult(1, 1, 1) result.true_positives result.false_positives result.total_trues result.total_possible result.percent_detection() result.precision() result.specificity() result + result def main(testname): if testname == 'import': test_import() elif testname == 'api_calls': test_api_calls() else: test_error("No such test '%s'" % testname) if __name__ == '__main__': import os import sys if not len(sys.argv) == 4: test_error("Expected three arguments") sys.exit(1) testname = sys.argv[1] os.chdir(sys.argv[2]) sys.path.append(sys.argv[3]) from vistk.test.test import * try: main(testname) except BaseException as e: test_error("Unexpected exception: %s" % str(e))
// ... existing code ... result.true_positives result.false_positives result.total_trues result.total_possible result.percent_detection() // ... modified code ... result.precision() result.specificity() // ... rest of the code ...
117e8c717e4555aa9ee015336c36af186c1b0a85
src/ocspdash/web/blueprints/ui.py
src/ocspdash/web/blueprints/ui.py
from flask import Blueprint, current_app, render_template """The OCSPdash homepage UI blueprint.""" # from nacl.encoding import URLSafeBase64Encoder # from nacl.signing import VerifyKey __all__ = [ 'ui', ] ui = Blueprint('ui', __name__) @ui.route('/') def home(): """Show the user the home view.""" payload = current_app.manager.get_payload() return render_template('index.html', payload=payload) # @ui.route('/submit', methods=['POST']) # def submit(): # """Show the submit view.""" # location_id = int(request.headers['authorization']) # # location = current_app.manager.get_location_by_id(location_id) # # if not location.activated: # return abort(403, f'Not activated: {location}') # # key = location.pubkey # # try: # verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder) # payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder) # # except nacl.exceptions.BadSignatureError as e: # return abort(403, f'Bad Signature: {e}') # # decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8')) # current_app.manager.insert_payload(decoded_payload) # # return '', 204
"""The OCSPdash homepage UI blueprint.""" from flask import Blueprint, current_app, render_template __all__ = [ 'ui', ] ui = Blueprint('ui', __name__) @ui.route('/') def home(): """Show the user the home view.""" payload = current_app.manager.get_payload() return render_template('index.html', payload=payload) # @ui.route('/submit', methods=['POST']) # def submit(): # """Show the submit view.""" # location_id = int(request.headers['authorization']) # # location = current_app.manager.get_location_by_id(location_id) # # if not location.activated: # return abort(403, f'Not activated: {location}') # # key = location.pubkey # # try: # verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder) # payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder) # # except nacl.exceptions.BadSignatureError as e: # return abort(403, f'Bad Signature: {e}') # # decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8')) # current_app.manager.insert_payload(decoded_payload) # # return '', 204
Remove unused imports from UI blueprint
Remove unused imports from UI blueprint
Python
mit
scolby33/OCSPdash,scolby33/OCSPdash,scolby33/OCSPdash
- from flask import Blueprint, current_app, render_template + """The OCSPdash homepage UI blueprint.""" + from flask import Blueprint, current_app, render_template - # from nacl.encoding import URLSafeBase64Encoder - # from nacl.signing import VerifyKey __all__ = [ 'ui', ] ui = Blueprint('ui', __name__) @ui.route('/') def home(): """Show the user the home view.""" payload = current_app.manager.get_payload() return render_template('index.html', payload=payload) # @ui.route('/submit', methods=['POST']) # def submit(): # """Show the submit view.""" # location_id = int(request.headers['authorization']) # # location = current_app.manager.get_location_by_id(location_id) # # if not location.activated: # return abort(403, f'Not activated: {location}') # # key = location.pubkey # # try: # verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder) # payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder) # # except nacl.exceptions.BadSignatureError as e: # return abort(403, f'Bad Signature: {e}') # # decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8')) # current_app.manager.insert_payload(decoded_payload) # # return '', 204
Remove unused imports from UI blueprint
## Code Before: from flask import Blueprint, current_app, render_template """The OCSPdash homepage UI blueprint.""" # from nacl.encoding import URLSafeBase64Encoder # from nacl.signing import VerifyKey __all__ = [ 'ui', ] ui = Blueprint('ui', __name__) @ui.route('/') def home(): """Show the user the home view.""" payload = current_app.manager.get_payload() return render_template('index.html', payload=payload) # @ui.route('/submit', methods=['POST']) # def submit(): # """Show the submit view.""" # location_id = int(request.headers['authorization']) # # location = current_app.manager.get_location_by_id(location_id) # # if not location.activated: # return abort(403, f'Not activated: {location}') # # key = location.pubkey # # try: # verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder) # payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder) # # except nacl.exceptions.BadSignatureError as e: # return abort(403, f'Bad Signature: {e}') # # decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8')) # current_app.manager.insert_payload(decoded_payload) # # return '', 204 ## Instruction: Remove unused imports from UI blueprint ## Code After: """The OCSPdash homepage UI blueprint.""" from flask import Blueprint, current_app, render_template __all__ = [ 'ui', ] ui = Blueprint('ui', __name__) @ui.route('/') def home(): """Show the user the home view.""" payload = current_app.manager.get_payload() return render_template('index.html', payload=payload) # @ui.route('/submit', methods=['POST']) # def submit(): # """Show the submit view.""" # location_id = int(request.headers['authorization']) # # location = current_app.manager.get_location_by_id(location_id) # # if not location.activated: # return abort(403, f'Not activated: {location}') # # key = location.pubkey # # try: # verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder) # payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder) # # except nacl.exceptions.BadSignatureError as e: # return abort(403, f'Bad Signature: {e}') # # decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8')) # current_app.manager.insert_payload(decoded_payload) # # return '', 204
# ... existing code ... """The OCSPdash homepage UI blueprint.""" # ... modified code ... from flask import Blueprint, current_app, render_template # ... rest of the code ...
e0dac0a621cbeed615553e5c3544f9c49de96eb2
metadata/FrostNumberModel/hooks/pre-stage.py
metadata/FrostNumberModel/hooks/pre-stage.py
"""A hook for modifying parameter values read from the WMT client.""" import os import shutil from wmt.utils.hook import find_simulation_input_file from topoflow_utils.hook import assign_parameters file_list = [] def execute(env): """Perform pre-stage tasks for running a component. Parameters ---------- env : dict A dict of component parameter values from WMT. """ env['end_year'] = long(env['start_year']) + long(env['_run_duration']) env['fn_out_filename'] = 'frostnumber_output.dat' assign_parameters(env, file_list) for fname in file_list: src = find_simulation_input_file(env[fname]) shutil.copy(src, os.curdir)
"""A hook for modifying parameter values read from the WMT client.""" import os import shutil from wmt.utils.hook import find_simulation_input_file, yaml_dump from topoflow_utils.hook import assign_parameters file_list = [] def execute(env): """Perform pre-stage tasks for running a component. Parameters ---------- env : dict A dict of component parameter values from WMT. """ env['end_year'] = long(env['start_year']) + long(env['_run_duration']) - 1 env['fn_out_filename'] = 'frostnumber_output.dat' assign_parameters(env, file_list) for fname in file_list: src = find_simulation_input_file(env[fname]) shutil.copy(src, os.curdir) yaml_dump('_env.yaml', env)
Subtract 1 from model end_year
Subtract 1 from model end_year This matches the behavior of the FrostNumberModel BMI.
Python
mit
csdms/wmt-metadata
"""A hook for modifying parameter values read from the WMT client.""" import os import shutil - from wmt.utils.hook import find_simulation_input_file + from wmt.utils.hook import find_simulation_input_file, yaml_dump from topoflow_utils.hook import assign_parameters file_list = [] def execute(env): """Perform pre-stage tasks for running a component. Parameters ---------- env : dict A dict of component parameter values from WMT. """ - env['end_year'] = long(env['start_year']) + long(env['_run_duration']) + env['end_year'] = long(env['start_year']) + long(env['_run_duration']) - 1 env['fn_out_filename'] = 'frostnumber_output.dat' assign_parameters(env, file_list) + for fname in file_list: src = find_simulation_input_file(env[fname]) shutil.copy(src, os.curdir) + yaml_dump('_env.yaml', env) +
Subtract 1 from model end_year
## Code Before: """A hook for modifying parameter values read from the WMT client.""" import os import shutil from wmt.utils.hook import find_simulation_input_file from topoflow_utils.hook import assign_parameters file_list = [] def execute(env): """Perform pre-stage tasks for running a component. Parameters ---------- env : dict A dict of component parameter values from WMT. """ env['end_year'] = long(env['start_year']) + long(env['_run_duration']) env['fn_out_filename'] = 'frostnumber_output.dat' assign_parameters(env, file_list) for fname in file_list: src = find_simulation_input_file(env[fname]) shutil.copy(src, os.curdir) ## Instruction: Subtract 1 from model end_year ## Code After: """A hook for modifying parameter values read from the WMT client.""" import os import shutil from wmt.utils.hook import find_simulation_input_file, yaml_dump from topoflow_utils.hook import assign_parameters file_list = [] def execute(env): """Perform pre-stage tasks for running a component. Parameters ---------- env : dict A dict of component parameter values from WMT. """ env['end_year'] = long(env['start_year']) + long(env['_run_duration']) - 1 env['fn_out_filename'] = 'frostnumber_output.dat' assign_parameters(env, file_list) for fname in file_list: src = find_simulation_input_file(env[fname]) shutil.copy(src, os.curdir) yaml_dump('_env.yaml', env)
// ... existing code ... from wmt.utils.hook import find_simulation_input_file, yaml_dump from topoflow_utils.hook import assign_parameters // ... modified code ... """ env['end_year'] = long(env['start_year']) + long(env['_run_duration']) - 1 env['fn_out_filename'] = 'frostnumber_output.dat' ... assign_parameters(env, file_list) for fname in file_list: ... shutil.copy(src, os.curdir) yaml_dump('_env.yaml', env) // ... rest of the code ...
d3de354717fdb15d6e883f38d87eba4806fd5cc7
wafer/pages/urls.py
wafer/pages/urls.py
from django.conf.urls import patterns, url, include from django.core.urlresolvers import get_script_prefix from django.views.generic import RedirectView from rest_framework import routers from wafer.pages.views import PageViewSet router = routers.DefaultRouter() router.register(r'pages', PageViewSet) urlpatterns = patterns( 'wafer.pages.views', url(r'^api/', include(router.urls)), url('^index(?:\.html)?/?$', RedirectView.as_view( url=get_script_prefix(), permanent=True, query_string=True)), url(r'^(?:(.+)/)?$', 'slug', name='wafer_page'), )
from django.conf.urls import patterns, url, include from rest_framework import routers from wafer.pages.views import PageViewSet router = routers.DefaultRouter() router.register(r'pages', PageViewSet) urlpatterns = patterns( 'wafer.pages.views', url(r'^api/', include(router.urls)), url(r'^(?:(.+)/)?$', 'slug', name='wafer_page'), )
Drop index redirect, no longer needed
Drop index redirect, no longer needed
Python
isc
CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer
from django.conf.urls import patterns, url, include - from django.core.urlresolvers import get_script_prefix - from django.views.generic import RedirectView from rest_framework import routers from wafer.pages.views import PageViewSet router = routers.DefaultRouter() router.register(r'pages', PageViewSet) urlpatterns = patterns( 'wafer.pages.views', url(r'^api/', include(router.urls)), - url('^index(?:\.html)?/?$', RedirectView.as_view( - url=get_script_prefix(), permanent=True, query_string=True)), url(r'^(?:(.+)/)?$', 'slug', name='wafer_page'), )
Drop index redirect, no longer needed
## Code Before: from django.conf.urls import patterns, url, include from django.core.urlresolvers import get_script_prefix from django.views.generic import RedirectView from rest_framework import routers from wafer.pages.views import PageViewSet router = routers.DefaultRouter() router.register(r'pages', PageViewSet) urlpatterns = patterns( 'wafer.pages.views', url(r'^api/', include(router.urls)), url('^index(?:\.html)?/?$', RedirectView.as_view( url=get_script_prefix(), permanent=True, query_string=True)), url(r'^(?:(.+)/)?$', 'slug', name='wafer_page'), ) ## Instruction: Drop index redirect, no longer needed ## Code After: from django.conf.urls import patterns, url, include from rest_framework import routers from wafer.pages.views import PageViewSet router = routers.DefaultRouter() router.register(r'pages', PageViewSet) urlpatterns = patterns( 'wafer.pages.views', url(r'^api/', include(router.urls)), url(r'^(?:(.+)/)?$', 'slug', name='wafer_page'), )
// ... existing code ... from django.conf.urls import patterns, url, include from rest_framework import routers // ... modified code ... url(r'^api/', include(router.urls)), url(r'^(?:(.+)/)?$', 'slug', name='wafer_page'), // ... rest of the code ...
847a88c579118f8a0d528284ab3ea029ccca7215
git_pre_commit_hook/builtin_plugins/rst_check.py
git_pre_commit_hook/builtin_plugins/rst_check.py
import os import fnmatch import restructuredtext_lint DEFAULTS = { 'files': '*.rst', } def make_message(error): return '%s %s:%s %s\n' % ( error.type, error.source, error.line, error.message, ) def check(file_staged_for_commit, options): basename = os.path.basename(file_staged_for_commit.path) if not fnmatch.fnmatch(basename, options.rst_files): return True errors = restructuredtext_lint.lint( file_staged_for_commit.contents, file_staged_for_commit.path, ) if errors: print('\n'.join(make_message(e) for e in errors)) return False else: return True
"""Check that files contains valid ReStructuredText.""" import os import fnmatch import restructuredtext_lint DEFAULTS = { 'files': '*.rst', } def make_message(error): return '%s %s:%s %s\n' % ( error.type, error.source, error.line, error.message, ) def check(file_staged_for_commit, options): basename = os.path.basename(file_staged_for_commit.path) if not fnmatch.fnmatch(basename, options.rst_files): return True errors = restructuredtext_lint.lint( file_staged_for_commit.contents, file_staged_for_commit.path, ) if errors: print('\n'.join(make_message(e) for e in errors)) return False else: return True
Add description to rst plugin
Add description to rst plugin
Python
mit
evvers/git-pre-commit-hook
+ """Check that files contains valid ReStructuredText.""" import os import fnmatch import restructuredtext_lint DEFAULTS = { 'files': '*.rst', } def make_message(error): return '%s %s:%s %s\n' % ( error.type, error.source, error.line, error.message, ) def check(file_staged_for_commit, options): basename = os.path.basename(file_staged_for_commit.path) if not fnmatch.fnmatch(basename, options.rst_files): return True errors = restructuredtext_lint.lint( file_staged_for_commit.contents, file_staged_for_commit.path, ) if errors: print('\n'.join(make_message(e) for e in errors)) return False else: return True
Add description to rst plugin
## Code Before: import os import fnmatch import restructuredtext_lint DEFAULTS = { 'files': '*.rst', } def make_message(error): return '%s %s:%s %s\n' % ( error.type, error.source, error.line, error.message, ) def check(file_staged_for_commit, options): basename = os.path.basename(file_staged_for_commit.path) if not fnmatch.fnmatch(basename, options.rst_files): return True errors = restructuredtext_lint.lint( file_staged_for_commit.contents, file_staged_for_commit.path, ) if errors: print('\n'.join(make_message(e) for e in errors)) return False else: return True ## Instruction: Add description to rst plugin ## Code After: """Check that files contains valid ReStructuredText.""" import os import fnmatch import restructuredtext_lint DEFAULTS = { 'files': '*.rst', } def make_message(error): return '%s %s:%s %s\n' % ( error.type, error.source, error.line, error.message, ) def check(file_staged_for_commit, options): basename = os.path.basename(file_staged_for_commit.path) if not fnmatch.fnmatch(basename, options.rst_files): return True errors = restructuredtext_lint.lint( file_staged_for_commit.contents, file_staged_for_commit.path, ) if errors: print('\n'.join(make_message(e) for e in errors)) return False else: return True
... """Check that files contains valid ReStructuredText.""" import os ...
e2ffc3d09157a0bfba4a0bcaca98691d99d04d77
Scripted/CIP_/CIP/ui/__init__.py
Scripted/CIP_/CIP/ui/__init__.py
from .CIP_EditorWidget import CIP_EditorWidget from .CIP_EditBox import * from .CaseReportsWidget import * from .PreProcessingWidget import * from .MIPViewerWidget import * from .CollapsibleMultilineText import * from .PdfReporter import * #from ACIL_GetImage.CaseNavigatorWidget import * #from AutoUpdateWidget import AutoUpdateWidget # import os # CIP_ICON_DIR = os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + '/../Resources/Icons') # del os
from .CaseReportsWidget import * from .PreProcessingWidget import * from .MIPViewerWidget import * from .CollapsibleMultilineText import * from .PdfReporter import * #from ACIL_GetImage.CaseNavigatorWidget import * #from AutoUpdateWidget import AutoUpdateWidget # import os # CIP_ICON_DIR = os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + '/../Resources/Icons') # del os
Make CIP compatible with Slicer 5
ENH: Make CIP compatible with Slicer 5 - remove CIP UI Editor includes to enable CIP loading in Slicer preview
Python
bsd-3-clause
acil-bwh/SlicerCIP,acil-bwh/SlicerCIP,acil-bwh/SlicerCIP,acil-bwh/SlicerCIP
- from .CIP_EditorWidget import CIP_EditorWidget - from .CIP_EditBox import * from .CaseReportsWidget import * from .PreProcessingWidget import * from .MIPViewerWidget import * from .CollapsibleMultilineText import * from .PdfReporter import * #from ACIL_GetImage.CaseNavigatorWidget import * #from AutoUpdateWidget import AutoUpdateWidget # import os # CIP_ICON_DIR = os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + '/../Resources/Icons') # del os
Make CIP compatible with Slicer 5
## Code Before: from .CIP_EditorWidget import CIP_EditorWidget from .CIP_EditBox import * from .CaseReportsWidget import * from .PreProcessingWidget import * from .MIPViewerWidget import * from .CollapsibleMultilineText import * from .PdfReporter import * #from ACIL_GetImage.CaseNavigatorWidget import * #from AutoUpdateWidget import AutoUpdateWidget # import os # CIP_ICON_DIR = os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + '/../Resources/Icons') # del os ## Instruction: Make CIP compatible with Slicer 5 ## Code After: from .CaseReportsWidget import * from .PreProcessingWidget import * from .MIPViewerWidget import * from .CollapsibleMultilineText import * from .PdfReporter import * #from ACIL_GetImage.CaseNavigatorWidget import * #from AutoUpdateWidget import AutoUpdateWidget # import os # CIP_ICON_DIR = os.path.realpath(os.path.dirname(os.path.realpath(__file__)) + '/../Resources/Icons') # del os
# ... existing code ... from .CaseReportsWidget import * # ... rest of the code ...
4817784c6e1050034faabb1b3d04382fe8997b41
numpy/_array_api/_constants.py
numpy/_array_api/_constants.py
from ._array_object import ndarray from ._dtypes import float64 import numpy as np e = ndarray._new(np.array(np.e, dtype=float64)) inf = ndarray._new(np.array(np.inf, dtype=float64)) nan = ndarray._new(np.array(np.nan, dtype=float64)) pi = ndarray._new(np.array(np.pi, dtype=float64))
import numpy as np e = np.e inf = np.inf nan = np.nan pi = np.pi
Make the array API constants Python floats
Make the array API constants Python floats
Python
bsd-3-clause
seberg/numpy,numpy/numpy,simongibbons/numpy,charris/numpy,mhvk/numpy,simongibbons/numpy,mattip/numpy,seberg/numpy,pdebuyl/numpy,mattip/numpy,charris/numpy,endolith/numpy,numpy/numpy,anntzer/numpy,jakirkham/numpy,mhvk/numpy,anntzer/numpy,endolith/numpy,seberg/numpy,endolith/numpy,mattip/numpy,simongibbons/numpy,numpy/numpy,seberg/numpy,jakirkham/numpy,charris/numpy,anntzer/numpy,mhvk/numpy,rgommers/numpy,simongibbons/numpy,pdebuyl/numpy,mhvk/numpy,rgommers/numpy,rgommers/numpy,jakirkham/numpy,simongibbons/numpy,anntzer/numpy,mhvk/numpy,charris/numpy,numpy/numpy,rgommers/numpy,jakirkham/numpy,pdebuyl/numpy,mattip/numpy,endolith/numpy,pdebuyl/numpy,jakirkham/numpy
- from ._array_object import ndarray - from ._dtypes import float64 - import numpy as np - e = ndarray._new(np.array(np.e, dtype=float64)) - inf = ndarray._new(np.array(np.inf, dtype=float64)) - nan = ndarray._new(np.array(np.nan, dtype=float64)) - pi = ndarray._new(np.array(np.pi, dtype=float64)) + e = np.e + inf = np.inf + nan = np.nan + pi = np.pi
Make the array API constants Python floats
## Code Before: from ._array_object import ndarray from ._dtypes import float64 import numpy as np e = ndarray._new(np.array(np.e, dtype=float64)) inf = ndarray._new(np.array(np.inf, dtype=float64)) nan = ndarray._new(np.array(np.nan, dtype=float64)) pi = ndarray._new(np.array(np.pi, dtype=float64)) ## Instruction: Make the array API constants Python floats ## Code After: import numpy as np e = np.e inf = np.inf nan = np.nan pi = np.pi
# ... existing code ... import numpy as np # ... modified code ... e = np.e inf = np.inf nan = np.nan pi = np.pi # ... rest of the code ...
5f9a3c62c4117e0e674d33e675c3a54d800dacb6
comics/accounts/models.py
comics/accounts/models.py
import uuid from django.contrib.auth.models import User from django.db import models from django.dispatch import receiver from comics.core.models import Comic @receiver(models.signals.post_save, sender=User) def create_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance) def make_secret_key(): return uuid.uuid4().hex class UserProfile(models.Model): user = models.OneToOneField(User, related_name='comics_profile') secret_key = models.CharField(max_length=32, blank=False, default=make_secret_key, help_text='Secret key for feed and API access') comics = models.ManyToManyField(Comic) class Meta: db_table = 'comics_user_profile' def __unicode__(self): return u'User profile for %s' % self.user def generate_new_secret_key(self): self.secret_key = make_secret_key()
import uuid from django.contrib.auth.models import User from django.db import models from django.dispatch import receiver from comics.core.models import Comic @receiver(models.signals.post_save, sender=User) def create_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance) def make_secret_key(): return uuid.uuid4().hex class UserProfile(models.Model): user = models.OneToOneField(User, related_name='comics_profile') secret_key = models.CharField(max_length=32, blank=False, default=make_secret_key, help_text='Secret key for feed and API access') comics = models.ManyToManyField(Comic, through='Subscription') class Meta: db_table = 'comics_user_profile' def __unicode__(self): return u'User profile for %s' % self.user def generate_new_secret_key(self): self.secret_key = make_secret_key() class Subscription(models.Model): userprofile = models.ForeignKey(UserProfile) comic = models.ForeignKey(Comic) class Meta: db_table = 'comics_user_profile_comics'
Add a M2M table for the subscription relation between users and comics
Add a M2M table for the subscription relation between users and comics
Python
agpl-3.0
jodal/comics,datagutten/comics,datagutten/comics,jodal/comics,datagutten/comics,datagutten/comics,jodal/comics,jodal/comics
import uuid from django.contrib.auth.models import User from django.db import models from django.dispatch import receiver from comics.core.models import Comic @receiver(models.signals.post_save, sender=User) def create_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance) def make_secret_key(): return uuid.uuid4().hex class UserProfile(models.Model): user = models.OneToOneField(User, related_name='comics_profile') secret_key = models.CharField(max_length=32, blank=False, default=make_secret_key, help_text='Secret key for feed and API access') - comics = models.ManyToManyField(Comic) + comics = models.ManyToManyField(Comic, through='Subscription') class Meta: db_table = 'comics_user_profile' def __unicode__(self): return u'User profile for %s' % self.user def generate_new_secret_key(self): self.secret_key = make_secret_key() + + class Subscription(models.Model): + userprofile = models.ForeignKey(UserProfile) + comic = models.ForeignKey(Comic) + + class Meta: + db_table = 'comics_user_profile_comics' +
Add a M2M table for the subscription relation between users and comics
## Code Before: import uuid from django.contrib.auth.models import User from django.db import models from django.dispatch import receiver from comics.core.models import Comic @receiver(models.signals.post_save, sender=User) def create_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance) def make_secret_key(): return uuid.uuid4().hex class UserProfile(models.Model): user = models.OneToOneField(User, related_name='comics_profile') secret_key = models.CharField(max_length=32, blank=False, default=make_secret_key, help_text='Secret key for feed and API access') comics = models.ManyToManyField(Comic) class Meta: db_table = 'comics_user_profile' def __unicode__(self): return u'User profile for %s' % self.user def generate_new_secret_key(self): self.secret_key = make_secret_key() ## Instruction: Add a M2M table for the subscription relation between users and comics ## Code After: import uuid from django.contrib.auth.models import User from django.db import models from django.dispatch import receiver from comics.core.models import Comic @receiver(models.signals.post_save, sender=User) def create_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance) def make_secret_key(): return uuid.uuid4().hex class UserProfile(models.Model): user = models.OneToOneField(User, related_name='comics_profile') secret_key = models.CharField(max_length=32, blank=False, default=make_secret_key, help_text='Secret key for feed and API access') comics = models.ManyToManyField(Comic, through='Subscription') class Meta: db_table = 'comics_user_profile' def __unicode__(self): return u'User profile for %s' % self.user def generate_new_secret_key(self): self.secret_key = make_secret_key() class Subscription(models.Model): userprofile = models.ForeignKey(UserProfile) comic = models.ForeignKey(Comic) class Meta: db_table = 'comics_user_profile_comics'
# ... existing code ... help_text='Secret key for feed and API access') comics = models.ManyToManyField(Comic, through='Subscription') # ... modified code ... self.secret_key = make_secret_key() class Subscription(models.Model): userprofile = models.ForeignKey(UserProfile) comic = models.ForeignKey(Comic) class Meta: db_table = 'comics_user_profile_comics' # ... rest of the code ...
539fae27f9911b9ad13edc5244ffbd12b1509006
utils.py
utils.py
__all__ = ['mstack', 'wget'] def mstack(vs, fs): import chumpy as ch import numpy as np lengths = [v.shape[0] for v in vs] f = np.vstack([fs[i]+np.sum(lengths[:i]).astype(np.uint32) for i in range(len(fs))]) v = ch.vstack(vs) return v, f def wget(url, dest_fname=None): import urllib.request, urllib.error, urllib.parse from os.path import split, join curdir = split(__file__)[0] print(url) if dest_fname is None: dest_fname = join(curdir, split(url)[1]) try: contents = urllib.request.urlopen(url).read() except: raise Exception('Unable to get url: %s' % (url,)) open(dest_fname, 'w').write(contents)
__all__ = ['mstack', 'wget'] def mstack(vs, fs): import chumpy as ch import numpy as np lengths = [v.shape[0] for v in vs] f = np.vstack([fs[i]+np.sum(lengths[:i]).astype(np.uint32) for i in range(len(fs))]) v = ch.vstack(vs) return v, f def wget(url, dest_fname=None): try: #python3 from urllib.request import urlopen except: #python2 from urllib2 import urlopen from os.path import split, join curdir = split(__file__)[0] print(url) if dest_fname is None: dest_fname = join(curdir, split(url)[1]) try: contents = urlopen(url).read() except: raise Exception('Unable to get url: %s' % (url,)) open(dest_fname, 'w').write(contents)
Fix for python2/3 compatibility issue with urllib
Fix for python2/3 compatibility issue with urllib
Python
mit
mattloper/opendr,mattloper/opendr
__all__ = ['mstack', 'wget'] def mstack(vs, fs): import chumpy as ch import numpy as np lengths = [v.shape[0] for v in vs] f = np.vstack([fs[i]+np.sum(lengths[:i]).astype(np.uint32) for i in range(len(fs))]) v = ch.vstack(vs) return v, f def wget(url, dest_fname=None): - import urllib.request, urllib.error, urllib.parse + try: #python3 + from urllib.request import urlopen + except: #python2 + from urllib2 import urlopen + from os.path import split, join curdir = split(__file__)[0] print(url) if dest_fname is None: dest_fname = join(curdir, split(url)[1]) try: - contents = urllib.request.urlopen(url).read() + contents = urlopen(url).read() except: raise Exception('Unable to get url: %s' % (url,)) open(dest_fname, 'w').write(contents)
Fix for python2/3 compatibility issue with urllib
## Code Before: __all__ = ['mstack', 'wget'] def mstack(vs, fs): import chumpy as ch import numpy as np lengths = [v.shape[0] for v in vs] f = np.vstack([fs[i]+np.sum(lengths[:i]).astype(np.uint32) for i in range(len(fs))]) v = ch.vstack(vs) return v, f def wget(url, dest_fname=None): import urllib.request, urllib.error, urllib.parse from os.path import split, join curdir = split(__file__)[0] print(url) if dest_fname is None: dest_fname = join(curdir, split(url)[1]) try: contents = urllib.request.urlopen(url).read() except: raise Exception('Unable to get url: %s' % (url,)) open(dest_fname, 'w').write(contents) ## Instruction: Fix for python2/3 compatibility issue with urllib ## Code After: __all__ = ['mstack', 'wget'] def mstack(vs, fs): import chumpy as ch import numpy as np lengths = [v.shape[0] for v in vs] f = np.vstack([fs[i]+np.sum(lengths[:i]).astype(np.uint32) for i in range(len(fs))]) v = ch.vstack(vs) return v, f def wget(url, dest_fname=None): try: #python3 from urllib.request import urlopen except: #python2 from urllib2 import urlopen from os.path import split, join curdir = split(__file__)[0] print(url) if dest_fname is None: dest_fname = join(curdir, split(url)[1]) try: contents = urlopen(url).read() except: raise Exception('Unable to get url: %s' % (url,)) open(dest_fname, 'w').write(contents)
// ... existing code ... def wget(url, dest_fname=None): try: #python3 from urllib.request import urlopen except: #python2 from urllib2 import urlopen from os.path import split, join // ... modified code ... try: contents = urlopen(url).read() except: // ... rest of the code ...
94796ca0107e6c676e3905675290bbe147169717
hoppy/deploy.py
hoppy/deploy.py
from restkit import Resource from hoppy import api_key class Deploy(Resource): def __init__(self, use_ssl=False): self.api_key = api_key super(Deploy, self).__init__(self.host, follow_redirect=True) def check_configuration(self): if not self.api_key: raise HoptoadError('API Key cannot be blank') def request(self, *args, **kwargs): response = super(Deploy, self).request( api_key=self.api_key, *args, **kwargs) return response.body_string() def base_uri(self, use_ssl=False): base = 'http://hoptoadapp.com/deploys.txt' base = base.replace('http://', 'https://') if use_ssl else base return base def deploy(self, env, **kwargs): """ Optional parameters accepted by Hoptoad are: scm_revision scm_repository local_username """ params = {} params['deploy[rails_env]'] = env for key, value in kwargs: params['deploy[%s]' % key] = value return self.post(**params)
from hoppy.api import HoptoadResource class Deploy(HoptoadResource): def __init__(self, use_ssl=False): from hoppy import api_key self.api_key = api_key super(Deploy, self).__init__(use_ssl) def check_configuration(self): if not self.api_key: raise HoptoadError('API Key cannot be blank') def request(self, *args, **kwargs): response = super(Deploy, self).request( api_key=self.api_key, *args, **kwargs) return response def base_uri(self, use_ssl=False): base = 'http://hoptoadapp.com/deploys.txt' base = base.replace('http://', 'https://') if use_ssl else base return base def deploy(self, env, **kwargs): """ Optional parameters accepted by Hoptoad are: scm_revision scm_repository local_username """ params = {} params['deploy[rails_env]'] = env for key, value in kwargs.iteritems(): params['deploy[%s]' % key] = value return self.post(**params)
Test Deploy resource after reworking.
Test Deploy resource after reworking.
Python
mit
peplin/hoppy
- from restkit import Resource + from hoppy.api import HoptoadResource - from hoppy import api_key - - class Deploy(Resource): + class Deploy(HoptoadResource): def __init__(self, use_ssl=False): + from hoppy import api_key self.api_key = api_key - super(Deploy, self).__init__(self.host, follow_redirect=True) + super(Deploy, self).__init__(use_ssl) def check_configuration(self): if not self.api_key: raise HoptoadError('API Key cannot be blank') def request(self, *args, **kwargs): response = super(Deploy, self).request( api_key=self.api_key, *args, **kwargs) - return response.body_string() + return response def base_uri(self, use_ssl=False): base = 'http://hoptoadapp.com/deploys.txt' base = base.replace('http://', 'https://') if use_ssl else base return base def deploy(self, env, **kwargs): """ Optional parameters accepted by Hoptoad are: scm_revision scm_repository local_username """ params = {} params['deploy[rails_env]'] = env - for key, value in kwargs: + for key, value in kwargs.iteritems(): params['deploy[%s]' % key] = value return self.post(**params)
Test Deploy resource after reworking.
## Code Before: from restkit import Resource from hoppy import api_key class Deploy(Resource): def __init__(self, use_ssl=False): self.api_key = api_key super(Deploy, self).__init__(self.host, follow_redirect=True) def check_configuration(self): if not self.api_key: raise HoptoadError('API Key cannot be blank') def request(self, *args, **kwargs): response = super(Deploy, self).request( api_key=self.api_key, *args, **kwargs) return response.body_string() def base_uri(self, use_ssl=False): base = 'http://hoptoadapp.com/deploys.txt' base = base.replace('http://', 'https://') if use_ssl else base return base def deploy(self, env, **kwargs): """ Optional parameters accepted by Hoptoad are: scm_revision scm_repository local_username """ params = {} params['deploy[rails_env]'] = env for key, value in kwargs: params['deploy[%s]' % key] = value return self.post(**params) ## Instruction: Test Deploy resource after reworking. ## Code After: from hoppy.api import HoptoadResource class Deploy(HoptoadResource): def __init__(self, use_ssl=False): from hoppy import api_key self.api_key = api_key super(Deploy, self).__init__(use_ssl) def check_configuration(self): if not self.api_key: raise HoptoadError('API Key cannot be blank') def request(self, *args, **kwargs): response = super(Deploy, self).request( api_key=self.api_key, *args, **kwargs) return response def base_uri(self, use_ssl=False): base = 'http://hoptoadapp.com/deploys.txt' base = base.replace('http://', 'https://') if use_ssl else base return base def deploy(self, env, **kwargs): """ Optional parameters accepted by Hoptoad are: scm_revision scm_repository local_username """ params = {} params['deploy[rails_env]'] = env for key, value in kwargs.iteritems(): params['deploy[%s]' % key] = value return self.post(**params)
... from hoppy.api import HoptoadResource class Deploy(HoptoadResource): def __init__(self, use_ssl=False): from hoppy import api_key self.api_key = api_key super(Deploy, self).__init__(use_ssl) ... api_key=self.api_key, *args, **kwargs) return response ... params['deploy[rails_env]'] = env for key, value in kwargs.iteritems(): params['deploy[%s]' % key] = value ...
07549339c6b0e4b1c98a11799ca95e90cbf109cd
homedisplay/control_milight/management/commands/listen_433.py
homedisplay/control_milight/management/commands/listen_433.py
from control_milight.utils import process_automatic_trigger from django.conf import settings from django.core.management.base import BaseCommand, CommandError import serial import time import logging logger = logging.getLogger("%s.%s" % ("homecontroller", __name__)) class Command(BaseCommand): args = '' help = 'Listen for 433MHz radio messages' def handle(self, *args, **options): s = serial.Serial(settings.ARDUINO_433, 9600) ITEM_MAP = settings.ARDUINO_433_ITEM_MAP sent_event_map = {} while True: line = s.readline() if line.startswith("Received "): id = line.split(" ")[1] if id in self.ITEM_MAP: item_name = self.ITEM_MAP[id] if item_name in sent_event_map: if sent_event_map[item_name] > time.time() - 5: continue logger.info("Processing trigger %s (%s)", item_name, id) process_automatic_trigger(item_name) sent_event_map[item_name] = time.time() else: logger.warn("Unknown ID: %s", id)
from control_milight.utils import process_automatic_trigger from django.conf import settings from django.core.management.base import BaseCommand, CommandError import serial import time import logging logger = logging.getLogger("%s.%s" % ("homecontroller", __name__)) class Command(BaseCommand): args = '' help = 'Listen for 433MHz radio messages' def handle(self, *args, **options): s = serial.Serial(settings.ARDUINO_433, 9600) ITEM_MAP = settings.ARDUINO_433_ITEM_MAP sent_event_map = {} while True: line = s.readline() if line.startswith("Received "): id = line.split(" ")[1] if id in ITEM_MAP: item_name = ITEM_MAP[id] if item_name in sent_event_map: if sent_event_map[item_name] > time.time() - 5: continue logger.info("Processing trigger %s (%s)", item_name, id) process_automatic_trigger(item_name) sent_event_map[item_name] = time.time() else: logger.warn("Unknown ID: %s", id)
Move ITEM_MAP to method variable
Move ITEM_MAP to method variable
Python
bsd-3-clause
ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display
from control_milight.utils import process_automatic_trigger from django.conf import settings from django.core.management.base import BaseCommand, CommandError import serial import time import logging logger = logging.getLogger("%s.%s" % ("homecontroller", __name__)) class Command(BaseCommand): args = '' help = 'Listen for 433MHz radio messages' def handle(self, *args, **options): s = serial.Serial(settings.ARDUINO_433, 9600) ITEM_MAP = settings.ARDUINO_433_ITEM_MAP sent_event_map = {} while True: line = s.readline() if line.startswith("Received "): id = line.split(" ")[1] - if id in self.ITEM_MAP: + if id in ITEM_MAP: - item_name = self.ITEM_MAP[id] + item_name = ITEM_MAP[id] if item_name in sent_event_map: if sent_event_map[item_name] > time.time() - 5: continue logger.info("Processing trigger %s (%s)", item_name, id) process_automatic_trigger(item_name) sent_event_map[item_name] = time.time() else: logger.warn("Unknown ID: %s", id)
Move ITEM_MAP to method variable
## Code Before: from control_milight.utils import process_automatic_trigger from django.conf import settings from django.core.management.base import BaseCommand, CommandError import serial import time import logging logger = logging.getLogger("%s.%s" % ("homecontroller", __name__)) class Command(BaseCommand): args = '' help = 'Listen for 433MHz radio messages' def handle(self, *args, **options): s = serial.Serial(settings.ARDUINO_433, 9600) ITEM_MAP = settings.ARDUINO_433_ITEM_MAP sent_event_map = {} while True: line = s.readline() if line.startswith("Received "): id = line.split(" ")[1] if id in self.ITEM_MAP: item_name = self.ITEM_MAP[id] if item_name in sent_event_map: if sent_event_map[item_name] > time.time() - 5: continue logger.info("Processing trigger %s (%s)", item_name, id) process_automatic_trigger(item_name) sent_event_map[item_name] = time.time() else: logger.warn("Unknown ID: %s", id) ## Instruction: Move ITEM_MAP to method variable ## Code After: from control_milight.utils import process_automatic_trigger from django.conf import settings from django.core.management.base import BaseCommand, CommandError import serial import time import logging logger = logging.getLogger("%s.%s" % ("homecontroller", __name__)) class Command(BaseCommand): args = '' help = 'Listen for 433MHz radio messages' def handle(self, *args, **options): s = serial.Serial(settings.ARDUINO_433, 9600) ITEM_MAP = settings.ARDUINO_433_ITEM_MAP sent_event_map = {} while True: line = s.readline() if line.startswith("Received "): id = line.split(" ")[1] if id in ITEM_MAP: item_name = ITEM_MAP[id] if item_name in sent_event_map: if sent_event_map[item_name] > time.time() - 5: continue logger.info("Processing trigger %s (%s)", item_name, id) process_automatic_trigger(item_name) sent_event_map[item_name] = time.time() else: logger.warn("Unknown ID: %s", id)
# ... existing code ... id = line.split(" ")[1] if id in ITEM_MAP: item_name = ITEM_MAP[id] if item_name in sent_event_map: # ... rest of the code ...
6ff07265feaf40e20fe0fbd23df2747660dd0483
trex/serializers.py
trex/serializers.py
from rest_framework.serializers import ( HyperlinkedModelSerializer, HyperlinkedIdentityField, ) from trex.models.project import Project, Entry class ProjectSerializer(HyperlinkedModelSerializer): class Meta: model = Project fields = ("url", "id", "name", "description", "active", "created") class ProjectDetailSerializer(HyperlinkedModelSerializer): entries = HyperlinkedIdentityField(view_name="project-entries-list") class Meta: model = Project fields = ("id", "name", "description", "active", "created", "entries") class EntryDetailSerializer(HyperlinkedModelSerializer): class Meta: model = Entry fields = ("date", "duration", "description", "state", "user", "created")
from rest_framework.serializers import ( HyperlinkedModelSerializer, HyperlinkedIdentityField, ) from trex.models.project import Project, Entry class ProjectSerializer(HyperlinkedModelSerializer): class Meta: model = Project fields = ("url", "id", "name", "description", "active", "created") class ProjectDetailSerializer(HyperlinkedModelSerializer): entries = HyperlinkedIdentityField(view_name="project-entries-list") class Meta: model = Project fields = ("id", "name", "description", "active", "created", "entries") class EntryDetailSerializer(HyperlinkedModelSerializer): class Meta: model = Entry fields = ("url", "id", "date", "duration", "description", "state", "user_abbr", "user", "created", "tags")
Add url, id and trags to the EntriesSerializer
Add url, id and trags to the EntriesSerializer
Python
mit
bjoernricks/trex,bjoernricks/trex
from rest_framework.serializers import ( HyperlinkedModelSerializer, HyperlinkedIdentityField, ) from trex.models.project import Project, Entry class ProjectSerializer(HyperlinkedModelSerializer): class Meta: model = Project fields = ("url", "id", "name", "description", "active", "created") class ProjectDetailSerializer(HyperlinkedModelSerializer): entries = HyperlinkedIdentityField(view_name="project-entries-list") class Meta: model = Project fields = ("id", "name", "description", "active", "created", "entries") class EntryDetailSerializer(HyperlinkedModelSerializer): class Meta: model = Entry - fields = ("date", "duration", "description", "state", "user", "created") + fields = ("url", "id", "date", "duration", "description", "state", + "user_abbr", "user", "created", "tags")
Add url, id and trags to the EntriesSerializer
## Code Before: from rest_framework.serializers import ( HyperlinkedModelSerializer, HyperlinkedIdentityField, ) from trex.models.project import Project, Entry class ProjectSerializer(HyperlinkedModelSerializer): class Meta: model = Project fields = ("url", "id", "name", "description", "active", "created") class ProjectDetailSerializer(HyperlinkedModelSerializer): entries = HyperlinkedIdentityField(view_name="project-entries-list") class Meta: model = Project fields = ("id", "name", "description", "active", "created", "entries") class EntryDetailSerializer(HyperlinkedModelSerializer): class Meta: model = Entry fields = ("date", "duration", "description", "state", "user", "created") ## Instruction: Add url, id and trags to the EntriesSerializer ## Code After: from rest_framework.serializers import ( HyperlinkedModelSerializer, HyperlinkedIdentityField, ) from trex.models.project import Project, Entry class ProjectSerializer(HyperlinkedModelSerializer): class Meta: model = Project fields = ("url", "id", "name", "description", "active", "created") class ProjectDetailSerializer(HyperlinkedModelSerializer): entries = HyperlinkedIdentityField(view_name="project-entries-list") class Meta: model = Project fields = ("id", "name", "description", "active", "created", "entries") class EntryDetailSerializer(HyperlinkedModelSerializer): class Meta: model = Entry fields = ("url", "id", "date", "duration", "description", "state", "user_abbr", "user", "created", "tags")
// ... existing code ... model = Entry fields = ("url", "id", "date", "duration", "description", "state", "user_abbr", "user", "created", "tags") // ... rest of the code ...
7a1b6d1999682ef114f81143a99d0f4d8e1f4af2
transactions_not_entry_line/models/account_invoice.py
transactions_not_entry_line/models/account_invoice.py
from openerp import api, _, models from openerp.exceptions import UserError class AccountInvoice(models.Model): _inherit = 'account.invoice' @api.multi def action_move_create(self): for inv in self: for line in inv.invoice_line_ids: if line.price_unit <= 0: raise UserError(_('At least one of the lines of the \ invoice has price unit zero!' '\n Please make sure \ that all lines have successfully captured the unit price.') ) return super(AccountInvoice, self).action_move_create()
from openerp import api, _, models from openerp.exceptions import UserError class AccountInvoice(models.Model): _inherit = 'account.invoice' @api.multi def action_move_create(self): for inv in self: for line in inv.invoice_line_ids: if line.product_id.id in (975887, 975888, 507890): return [] elif line.price_unit <= 0: raise UserError(_('At least one of the lines of the \ invoice has price unit zero!' '\n Please make sure \ that all lines have successfully captured the unit price.') ) return super(AccountInvoice, self).action_move_create()
Add ids for balance product in transactions_not_entry_line
[FIX] Add ids for balance product in transactions_not_entry_line
Python
agpl-3.0
Gebesa-Dev/Addons-gebesa
from openerp import api, _, models from openerp.exceptions import UserError class AccountInvoice(models.Model): _inherit = 'account.invoice' @api.multi def action_move_create(self): + for inv in self: for line in inv.invoice_line_ids: + if line.product_id.id in (975887, 975888, 507890): + return [] - if line.price_unit <= 0: + elif line.price_unit <= 0: raise UserError(_('At least one of the lines of the \ invoice has price unit zero!' '\n Please make sure \ that all lines have successfully captured the unit price.') ) return super(AccountInvoice, self).action_move_create()
Add ids for balance product in transactions_not_entry_line
## Code Before: from openerp import api, _, models from openerp.exceptions import UserError class AccountInvoice(models.Model): _inherit = 'account.invoice' @api.multi def action_move_create(self): for inv in self: for line in inv.invoice_line_ids: if line.price_unit <= 0: raise UserError(_('At least one of the lines of the \ invoice has price unit zero!' '\n Please make sure \ that all lines have successfully captured the unit price.') ) return super(AccountInvoice, self).action_move_create() ## Instruction: Add ids for balance product in transactions_not_entry_line ## Code After: from openerp import api, _, models from openerp.exceptions import UserError class AccountInvoice(models.Model): _inherit = 'account.invoice' @api.multi def action_move_create(self): for inv in self: for line in inv.invoice_line_ids: if line.product_id.id in (975887, 975888, 507890): return [] elif line.price_unit <= 0: raise UserError(_('At least one of the lines of the \ invoice has price unit zero!' '\n Please make sure \ that all lines have successfully captured the unit price.') ) return super(AccountInvoice, self).action_move_create()
// ... existing code ... def action_move_create(self): for inv in self: // ... modified code ... for line in inv.invoice_line_ids: if line.product_id.id in (975887, 975888, 507890): return [] elif line.price_unit <= 0: raise UserError(_('At least one of the lines of the \ // ... rest of the code ...
2feda27b60874de513224256c553dfee32e1a982
tests/lexer/test_lexer.py
tests/lexer/test_lexer.py
import pytest from tests.infrastructure.test_utils import lexer_single from thinglang.lexer.tokens.indent import LexicalIndent from thinglang.lexer.values.identifier import Identifier from thinglang.lexer.values.inline_text import InlineString UNTERMINATED_GROUPS = 'hello"', '"hello', 'hello`', '`hello', '"hello`', '`hello"' def test_empty_string(): symbols = lexer_single('""', without_end=True) assert len(symbols) == 1 assert isinstance(symbols[0], InlineString) and symbols[0].value == "" def test_whitespace_handling(): assert lexer_single("does start with number a, number b, number c") == \ lexer_single("does start with number a,number b,number c ") def test_indentation_handling(): assert lexer_single("\t\t\tid", without_end=True) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')] @pytest.mark.parametrize('code', UNTERMINATED_GROUPS) def test_group_termination_errors(code): with pytest.raises(ValueError): lexer_single(code)
import pytest from tests.infrastructure.test_utils import lexer_single from thinglang.lexer.operators.comparison import LexicalEquals from thinglang.lexer.tokens.indent import LexicalIndent from thinglang.lexer.values.identifier import Identifier from thinglang.lexer.values.inline_text import InlineString UNTERMINATED_GROUPS = 'hello"', '"hello', 'hello`', '`hello', '"hello`', '`hello"' def test_empty_string(): symbols = lexer_single('""', without_end=True) assert len(symbols) == 1 assert isinstance(symbols[0], InlineString) and symbols[0].value == "" def test_whitespace_handling(): assert lexer_single("does start with number a, number b, number c") == \ lexer_single("does start with number a,number b,number c ") def test_indentation_handling(): assert lexer_single("\t\t\tid", without_end=True) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')] def test_escaping(): assert lexer_single(r'"\tHello world\nand goodbye!"', without_end=True) == [InlineString('\tHello world\nand goodbye!')] assert lexer_single(r'"A message, \"and a quote\"."', without_end=True) == [InlineString('A message, "and a quote".')] @pytest.mark.parametrize('code', UNTERMINATED_GROUPS) def test_group_termination_errors(code): with pytest.raises(ValueError): lexer_single(code)
Add test for string escaping
Add test for string escaping
Python
mit
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
import pytest from tests.infrastructure.test_utils import lexer_single + from thinglang.lexer.operators.comparison import LexicalEquals from thinglang.lexer.tokens.indent import LexicalIndent from thinglang.lexer.values.identifier import Identifier from thinglang.lexer.values.inline_text import InlineString UNTERMINATED_GROUPS = 'hello"', '"hello', 'hello`', '`hello', '"hello`', '`hello"' def test_empty_string(): symbols = lexer_single('""', without_end=True) assert len(symbols) == 1 assert isinstance(symbols[0], InlineString) and symbols[0].value == "" def test_whitespace_handling(): assert lexer_single("does start with number a, number b, number c") == \ lexer_single("does start with number a,number b,number c ") def test_indentation_handling(): assert lexer_single("\t\t\tid", without_end=True) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')] + def test_escaping(): + assert lexer_single(r'"\tHello world\nand goodbye!"', without_end=True) == [InlineString('\tHello world\nand goodbye!')] + assert lexer_single(r'"A message, \"and a quote\"."', without_end=True) == [InlineString('A message, "and a quote".')] + + @pytest.mark.parametrize('code', UNTERMINATED_GROUPS) def test_group_termination_errors(code): with pytest.raises(ValueError): lexer_single(code)
Add test for string escaping
## Code Before: import pytest from tests.infrastructure.test_utils import lexer_single from thinglang.lexer.tokens.indent import LexicalIndent from thinglang.lexer.values.identifier import Identifier from thinglang.lexer.values.inline_text import InlineString UNTERMINATED_GROUPS = 'hello"', '"hello', 'hello`', '`hello', '"hello`', '`hello"' def test_empty_string(): symbols = lexer_single('""', without_end=True) assert len(symbols) == 1 assert isinstance(symbols[0], InlineString) and symbols[0].value == "" def test_whitespace_handling(): assert lexer_single("does start with number a, number b, number c") == \ lexer_single("does start with number a,number b,number c ") def test_indentation_handling(): assert lexer_single("\t\t\tid", without_end=True) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')] @pytest.mark.parametrize('code', UNTERMINATED_GROUPS) def test_group_termination_errors(code): with pytest.raises(ValueError): lexer_single(code) ## Instruction: Add test for string escaping ## Code After: import pytest from tests.infrastructure.test_utils import lexer_single from thinglang.lexer.operators.comparison import LexicalEquals from thinglang.lexer.tokens.indent import LexicalIndent from thinglang.lexer.values.identifier import Identifier from thinglang.lexer.values.inline_text import InlineString UNTERMINATED_GROUPS = 'hello"', '"hello', 'hello`', '`hello', '"hello`', '`hello"' def test_empty_string(): symbols = lexer_single('""', without_end=True) assert len(symbols) == 1 assert isinstance(symbols[0], InlineString) and symbols[0].value == "" def test_whitespace_handling(): assert lexer_single("does start with number a, number b, number c") == \ lexer_single("does start with number a,number b,number c ") def test_indentation_handling(): assert lexer_single("\t\t\tid", without_end=True) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')] def test_escaping(): assert lexer_single(r'"\tHello world\nand goodbye!"', without_end=True) == [InlineString('\tHello world\nand goodbye!')] assert lexer_single(r'"A message, \"and a quote\"."', without_end=True) == [InlineString('A message, "and a quote".')] @pytest.mark.parametrize('code', UNTERMINATED_GROUPS) def test_group_termination_errors(code): with pytest.raises(ValueError): lexer_single(code)
... from tests.infrastructure.test_utils import lexer_single from thinglang.lexer.operators.comparison import LexicalEquals from thinglang.lexer.tokens.indent import LexicalIndent ... def test_escaping(): assert lexer_single(r'"\tHello world\nand goodbye!"', without_end=True) == [InlineString('\tHello world\nand goodbye!')] assert lexer_single(r'"A message, \"and a quote\"."', without_end=True) == [InlineString('A message, "and a quote".')] @pytest.mark.parametrize('code', UNTERMINATED_GROUPS) ...
7af8ee5ca8a036ae2339187b689507989d43aaa6
elmo/moon_tracker/utils.py
elmo/moon_tracker/utils.py
def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.can_view_scans', moon.planet.system) or user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.can_add_scans', moon.planet.system) or user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( user.has_perm('eve_sde.can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation.region) )
def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_view_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_add_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( user.has_perm('eve_sde.sys_can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_delete_scans', moon.planet.system.constellation.region) )
Update the permission helper functions.
Update the permission helper functions.
Python
mit
StephenSwat/eve_lunar_mining_organiser,StephenSwat/eve_lunar_mining_organiser
def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or - user.has_perm('eve_sde.can_view_scans', moon.planet.system) or + user.has_perm('eve_sde.sys_can_view_scans', moon.planet.system) or - user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation) or + user.has_perm('eve_sde.con_can_view_scans', moon.planet.system.constellation) or - user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation.region) + user.has_perm('eve_sde.reg_can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or - user.has_perm('eve_sde.can_add_scans', moon.planet.system) or + user.has_perm('eve_sde.sys_can_add_scans', moon.planet.system) or - user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation) or + user.has_perm('eve_sde.con_can_add_scans', moon.planet.system.constellation) or - user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation.region) + user.has_perm('eve_sde.reg_can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( - user.has_perm('eve_sde.can_delete_scans', moon.planet.system) or + user.has_perm('eve_sde.sys_can_delete_scans', moon.planet.system) or - user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation) or + user.has_perm('eve_sde.con_can_delete_scans', moon.planet.system.constellation) or - user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation.region) + user.has_perm('eve_sde.reg_can_delete_scans', moon.planet.system.constellation.region) )
Update the permission helper functions.
## Code Before: def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.can_view_scans', moon.planet.system) or user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.can_add_scans', moon.planet.system) or user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( user.has_perm('eve_sde.can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation.region) ) ## Instruction: Update the permission helper functions. ## Code After: def user_can_view_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_view_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_view_scans', moon.planet.system.constellation.region) ) def user_can_add_scans(user, moon): return ( user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_add_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_add_scans', moon.planet.system.constellation.region) ) def user_can_delete_scans(user, moon): return ( user.has_perm('eve_sde.sys_can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_delete_scans', moon.planet.system.constellation.region) )
// ... existing code ... user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_view_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_view_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_view_scans', moon.planet.system.constellation.region) ) // ... modified code ... user_can_delete_scans(user, moon) or user.has_perm('eve_sde.sys_can_add_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_add_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_add_scans', moon.planet.system.constellation.region) ) ... return ( user.has_perm('eve_sde.sys_can_delete_scans', moon.planet.system) or user.has_perm('eve_sde.con_can_delete_scans', moon.planet.system.constellation) or user.has_perm('eve_sde.reg_can_delete_scans', moon.planet.system.constellation.region) ) // ... rest of the code ...
9d7f2626294fbf25934e7dda4892b7ac13bd5555
fireplace/cards/tgt/warlock.py
fireplace/cards/tgt/warlock.py
from ..utils import * ## # Minions # Dreadsteed class AT_019: deathrattle = Summon(CONTROLLER, "AT_019") # Tiny Knight of Evil class AT_021: events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e")) # Wrathguard class AT_026: events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT)) # Wilfred Fizzlebang class AT_027: events = Draw(CONTROLLER).on( lambda self, target, card, source: source is self.controller.hero.power and Buff(card, "AT_027e") ) class AT_027e: cost = lambda self, i: 0 ## # Spells # Fist of Jaraxxus class AT_022: play = Hit(RANDOM_ENEMY_CHARACTER, 4) in_hand = Discard(SELF).on(play) # Demonfuse class AT_024: play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1) # Dark Bargain class AT_025: play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(CONTROLLER_HAND) * 2)
from ..utils import * ## # Minions # Dreadsteed class AT_019: deathrattle = Summon(CONTROLLER, "AT_019") # Tiny Knight of Evil class AT_021: events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e")) # Void Crusher class AT_023: inspire = Destroy(RANDOM_ENEMY_MINION | RANDOM_FRIENDLY_MINION) # Wrathguard class AT_026: events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT)) # Wilfred Fizzlebang class AT_027: events = Draw(CONTROLLER).on( lambda self, target, card, source: source is self.controller.hero.power and Buff(card, "AT_027e") ) class AT_027e: cost = lambda self, i: 0 ## # Spells # Fist of Jaraxxus class AT_022: play = Hit(RANDOM_ENEMY_CHARACTER, 4) in_hand = Discard(SELF).on(play) # Demonfuse class AT_024: play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1) # Dark Bargain class AT_025: play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(CONTROLLER_HAND) * 2)
Implement more TGT Warlock cards
Implement more TGT Warlock cards
Python
agpl-3.0
liujimj/fireplace,beheh/fireplace,Ragowit/fireplace,Ragowit/fireplace,amw2104/fireplace,amw2104/fireplace,smallnamespace/fireplace,smallnamespace/fireplace,oftc-ftw/fireplace,liujimj/fireplace,oftc-ftw/fireplace,Meerkov/fireplace,jleclanche/fireplace,Meerkov/fireplace,NightKev/fireplace
from ..utils import * ## # Minions # Dreadsteed class AT_019: deathrattle = Summon(CONTROLLER, "AT_019") # Tiny Knight of Evil class AT_021: events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e")) + # Void Crusher + class AT_023: + inspire = Destroy(RANDOM_ENEMY_MINION | RANDOM_FRIENDLY_MINION) + + # Wrathguard class AT_026: events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT)) + # Wilfred Fizzlebang class AT_027: events = Draw(CONTROLLER).on( lambda self, target, card, source: source is self.controller.hero.power and Buff(card, "AT_027e") ) class AT_027e: cost = lambda self, i: 0 ## # Spells # Fist of Jaraxxus class AT_022: play = Hit(RANDOM_ENEMY_CHARACTER, 4) in_hand = Discard(SELF).on(play) # Demonfuse class AT_024: play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1) # Dark Bargain class AT_025: play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(CONTROLLER_HAND) * 2)
Implement more TGT Warlock cards
## Code Before: from ..utils import * ## # Minions # Dreadsteed class AT_019: deathrattle = Summon(CONTROLLER, "AT_019") # Tiny Knight of Evil class AT_021: events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e")) # Wrathguard class AT_026: events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT)) # Wilfred Fizzlebang class AT_027: events = Draw(CONTROLLER).on( lambda self, target, card, source: source is self.controller.hero.power and Buff(card, "AT_027e") ) class AT_027e: cost = lambda self, i: 0 ## # Spells # Fist of Jaraxxus class AT_022: play = Hit(RANDOM_ENEMY_CHARACTER, 4) in_hand = Discard(SELF).on(play) # Demonfuse class AT_024: play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1) # Dark Bargain class AT_025: play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(CONTROLLER_HAND) * 2) ## Instruction: Implement more TGT Warlock cards ## Code After: from ..utils import * ## # Minions # Dreadsteed class AT_019: deathrattle = Summon(CONTROLLER, "AT_019") # Tiny Knight of Evil class AT_021: events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e")) # Void Crusher class AT_023: inspire = Destroy(RANDOM_ENEMY_MINION | RANDOM_FRIENDLY_MINION) # Wrathguard class AT_026: events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT)) # Wilfred Fizzlebang class AT_027: events = Draw(CONTROLLER).on( lambda self, target, card, source: source is self.controller.hero.power and Buff(card, "AT_027e") ) class AT_027e: cost = lambda self, i: 0 ## # Spells # Fist of Jaraxxus class AT_022: play = Hit(RANDOM_ENEMY_CHARACTER, 4) in_hand = Discard(SELF).on(play) # Demonfuse class AT_024: play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1) # Dark Bargain class AT_025: play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(CONTROLLER_HAND) * 2)
... # Void Crusher class AT_023: inspire = Destroy(RANDOM_ENEMY_MINION | RANDOM_FRIENDLY_MINION) # Wrathguard ... events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT)) ...
6fd4e2e4158c968a095832f3bf669109dc9f1481
mopidy_mpris/__init__.py
mopidy_mpris/__init__.py
import os from mopidy import config, exceptions, ext __version__ = "2.0.0" class Extension(ext.Extension): dist_name = "Mopidy-MPRIS" ext_name = "mpris" version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), "ext.conf") return config.read(conf_file) def get_config_schema(self): schema = super().get_config_schema() schema["desktop_file"] = config.Deprecated() schema["bus_type"] = config.String(choices=["session", "system"]) return schema def validate_environment(self): try: import pydbus # noqa except ImportError as e: raise exceptions.ExtensionError("pydbus library not found", e) def setup(self, registry): from .frontend import MprisFrontend registry.add("frontend", MprisFrontend)
import pathlib from mopidy import config, exceptions, ext __version__ = "2.0.0" class Extension(ext.Extension): dist_name = "Mopidy-MPRIS" ext_name = "mpris" version = __version__ def get_default_config(self): return config.read(pathlib.Path(__file__).parent / "ext.conf") def get_config_schema(self): schema = super().get_config_schema() schema["desktop_file"] = config.Deprecated() schema["bus_type"] = config.String(choices=["session", "system"]) return schema def validate_environment(self): try: import pydbus # noqa except ImportError as e: raise exceptions.ExtensionError("pydbus library not found", e) def setup(self, registry): from .frontend import MprisFrontend registry.add("frontend", MprisFrontend)
Use pathlib to read ext.conf
Use pathlib to read ext.conf
Python
apache-2.0
mopidy/mopidy-mpris
- import os + import pathlib from mopidy import config, exceptions, ext __version__ = "2.0.0" class Extension(ext.Extension): dist_name = "Mopidy-MPRIS" ext_name = "mpris" version = __version__ def get_default_config(self): + return config.read(pathlib.Path(__file__).parent / "ext.conf") - conf_file = os.path.join(os.path.dirname(__file__), "ext.conf") - return config.read(conf_file) def get_config_schema(self): schema = super().get_config_schema() schema["desktop_file"] = config.Deprecated() schema["bus_type"] = config.String(choices=["session", "system"]) return schema def validate_environment(self): try: import pydbus # noqa except ImportError as e: raise exceptions.ExtensionError("pydbus library not found", e) def setup(self, registry): from .frontend import MprisFrontend registry.add("frontend", MprisFrontend)
Use pathlib to read ext.conf
## Code Before: import os from mopidy import config, exceptions, ext __version__ = "2.0.0" class Extension(ext.Extension): dist_name = "Mopidy-MPRIS" ext_name = "mpris" version = __version__ def get_default_config(self): conf_file = os.path.join(os.path.dirname(__file__), "ext.conf") return config.read(conf_file) def get_config_schema(self): schema = super().get_config_schema() schema["desktop_file"] = config.Deprecated() schema["bus_type"] = config.String(choices=["session", "system"]) return schema def validate_environment(self): try: import pydbus # noqa except ImportError as e: raise exceptions.ExtensionError("pydbus library not found", e) def setup(self, registry): from .frontend import MprisFrontend registry.add("frontend", MprisFrontend) ## Instruction: Use pathlib to read ext.conf ## Code After: import pathlib from mopidy import config, exceptions, ext __version__ = "2.0.0" class Extension(ext.Extension): dist_name = "Mopidy-MPRIS" ext_name = "mpris" version = __version__ def get_default_config(self): return config.read(pathlib.Path(__file__).parent / "ext.conf") def get_config_schema(self): schema = super().get_config_schema() schema["desktop_file"] = config.Deprecated() schema["bus_type"] = config.String(choices=["session", "system"]) return schema def validate_environment(self): try: import pydbus # noqa except ImportError as e: raise exceptions.ExtensionError("pydbus library not found", e) def setup(self, registry): from .frontend import MprisFrontend registry.add("frontend", MprisFrontend)
// ... existing code ... import pathlib // ... modified code ... def get_default_config(self): return config.read(pathlib.Path(__file__).parent / "ext.conf") // ... rest of the code ...
fedf78926b7c135f0f86934975a2b70aa1256884
app/models.py
app/models.py
from datetime import datetime from werkzeug.security import generate_password_hash, check_password_hash from . import db class User(db.Model): __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True, autoincrement=True) email = db.Column(db.String(64), nullable=False, unique=True, index=True) username = db.Column(db.String(64), nullable=False, unique=True, index=True) is_admin = db.Column(db.Boolean) password_hash = db.Column(db.String(128)) name = db.Column(db.String(64)) member_since = db.Column(db.DateTime(), default = datetime.utcnow) @property def password(self): raise AttributeError('Password is not a readable attribute') @password.setter def password(self, password): self.password_hash = generate_password_hash(password) def verify_password(self, password): return check_password_hash(self.password_hash, password)
from datetime import datetime from flask.ext.login import UserMixin from werkzeug.security import generate_password_hash, check_password_hash from . import db, login_manager class User(UserMixin, db.Model): __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True, autoincrement=True) email = db.Column(db.String(64), nullable=False, unique=True, index=True) username = db.Column(db.String(64), nullable=False, unique=True, index=True) is_admin = db.Column(db.Boolean) password_hash = db.Column(db.String(128)) name = db.Column(db.String(64)) member_since = db.Column(db.DateTime(), default = datetime.utcnow) @property def password(self): raise AttributeError('Password is not a readable attribute') @login_manager.user_loader def load_user(user_id): return User.query.get(int(user_id)) @password.setter def password(self, password): self.password_hash = generate_password_hash(password) def verify_password(self, password): return check_password_hash(self.password_hash, password)
Add user_loader function for loading a user
Add user_loader function for loading a user
Python
mit
finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is
from datetime import datetime + + from flask.ext.login import UserMixin from werkzeug.security import generate_password_hash, check_password_hash - from . import db + from . import db, login_manager - class User(db.Model): + class User(UserMixin, db.Model): __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True, autoincrement=True) email = db.Column(db.String(64), nullable=False, unique=True, index=True) username = db.Column(db.String(64), nullable=False, unique=True, index=True) is_admin = db.Column(db.Boolean) password_hash = db.Column(db.String(128)) name = db.Column(db.String(64)) member_since = db.Column(db.DateTime(), default = datetime.utcnow) @property def password(self): raise AttributeError('Password is not a readable attribute') + + @login_manager.user_loader + def load_user(user_id): + return User.query.get(int(user_id)) @password.setter def password(self, password): self.password_hash = generate_password_hash(password) def verify_password(self, password): return check_password_hash(self.password_hash, password)
Add user_loader function for loading a user
## Code Before: from datetime import datetime from werkzeug.security import generate_password_hash, check_password_hash from . import db class User(db.Model): __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True, autoincrement=True) email = db.Column(db.String(64), nullable=False, unique=True, index=True) username = db.Column(db.String(64), nullable=False, unique=True, index=True) is_admin = db.Column(db.Boolean) password_hash = db.Column(db.String(128)) name = db.Column(db.String(64)) member_since = db.Column(db.DateTime(), default = datetime.utcnow) @property def password(self): raise AttributeError('Password is not a readable attribute') @password.setter def password(self, password): self.password_hash = generate_password_hash(password) def verify_password(self, password): return check_password_hash(self.password_hash, password) ## Instruction: Add user_loader function for loading a user ## Code After: from datetime import datetime from flask.ext.login import UserMixin from werkzeug.security import generate_password_hash, check_password_hash from . import db, login_manager class User(UserMixin, db.Model): __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True, autoincrement=True) email = db.Column(db.String(64), nullable=False, unique=True, index=True) username = db.Column(db.String(64), nullable=False, unique=True, index=True) is_admin = db.Column(db.Boolean) password_hash = db.Column(db.String(128)) name = db.Column(db.String(64)) member_since = db.Column(db.DateTime(), default = datetime.utcnow) @property def password(self): raise AttributeError('Password is not a readable attribute') @login_manager.user_loader def load_user(user_id): return User.query.get(int(user_id)) @password.setter def password(self, password): self.password_hash = generate_password_hash(password) def verify_password(self, password): return check_password_hash(self.password_hash, password)
# ... existing code ... from datetime import datetime from flask.ext.login import UserMixin # ... modified code ... from . import db, login_manager class User(UserMixin, db.Model): __tablename__ = 'users' ... raise AttributeError('Password is not a readable attribute') @login_manager.user_loader def load_user(user_id): return User.query.get(int(user_id)) # ... rest of the code ...
13daca3feedd8df8803904a60199a9dfa47dad8d
fuel_test/cobbler/test_single.py
fuel_test/cobbler/test_single.py
import unittest from fuel_test.cobbler.cobbler_test_case import CobblerTestCase from fuel_test.manifest import Manifest from fuel_test.settings import OPENSTACK_SNAPSHOT class SingleTestCase(CobblerTestCase): def test_single(self): Manifest().write_openstack_single_manifest( remote=self.remote(), ci=self.ci(), ) self.validate( self.nodes().controllers, 'puppet agent --test') self.environment().snapshot(OPENSTACK_SNAPSHOT, force=True) if __name__ == '__main__': unittest.main()
import unittest from fuel_test.cobbler.cobbler_test_case import CobblerTestCase from fuel_test.manifest import Manifest from fuel_test.settings import OPENSTACK_SNAPSHOT class SingleTestCase(CobblerTestCase): def test_single(self): Manifest().write_openstack_single_manifest( remote=self.remote(), ci=self.ci(), quantum=False, ) self.validate( self.nodes().controllers, 'puppet agent --test') self.environment().snapshot(OPENSTACK_SNAPSHOT, force=True) if __name__ == '__main__': unittest.main()
Switch off quantum at single node in test
Switch off quantum at single node in test
Python
apache-2.0
huntxu/fuel-library,eayunstack/fuel-library,eayunstack/fuel-library,SmartInfrastructures/fuel-library-dev,SmartInfrastructures/fuel-library-dev,stackforge/fuel-library,huntxu/fuel-library,stackforge/fuel-library,zhaochao/fuel-library,xarses/fuel-library,SmartInfrastructures/fuel-library-dev,ddepaoli3/fuel-library-dev,zhaochao/fuel-library,xarses/fuel-library,slystopad/fuel-lib,stackforge/fuel-library,zhaochao/fuel-library,ddepaoli3/fuel-library-dev,Metaswitch/fuel-library,ddepaoli3/fuel-library-dev,slystopad/fuel-lib,stackforge/fuel-library,xarses/fuel-library,eayunstack/fuel-library,Metaswitch/fuel-library,Metaswitch/fuel-library,eayunstack/fuel-library,Metaswitch/fuel-library,zhaochao/fuel-library,SmartInfrastructures/fuel-library-dev,ddepaoli3/fuel-library-dev,huntxu/fuel-library,slystopad/fuel-lib,huntxu/fuel-library,slystopad/fuel-lib,zhaochao/fuel-library,SmartInfrastructures/fuel-library-dev,eayunstack/fuel-library,huntxu/fuel-library,ddepaoli3/fuel-library-dev,xarses/fuel-library
import unittest from fuel_test.cobbler.cobbler_test_case import CobblerTestCase from fuel_test.manifest import Manifest from fuel_test.settings import OPENSTACK_SNAPSHOT class SingleTestCase(CobblerTestCase): def test_single(self): Manifest().write_openstack_single_manifest( remote=self.remote(), ci=self.ci(), + quantum=False, ) self.validate( self.nodes().controllers, 'puppet agent --test') self.environment().snapshot(OPENSTACK_SNAPSHOT, force=True) if __name__ == '__main__': unittest.main()
Switch off quantum at single node in test
## Code Before: import unittest from fuel_test.cobbler.cobbler_test_case import CobblerTestCase from fuel_test.manifest import Manifest from fuel_test.settings import OPENSTACK_SNAPSHOT class SingleTestCase(CobblerTestCase): def test_single(self): Manifest().write_openstack_single_manifest( remote=self.remote(), ci=self.ci(), ) self.validate( self.nodes().controllers, 'puppet agent --test') self.environment().snapshot(OPENSTACK_SNAPSHOT, force=True) if __name__ == '__main__': unittest.main() ## Instruction: Switch off quantum at single node in test ## Code After: import unittest from fuel_test.cobbler.cobbler_test_case import CobblerTestCase from fuel_test.manifest import Manifest from fuel_test.settings import OPENSTACK_SNAPSHOT class SingleTestCase(CobblerTestCase): def test_single(self): Manifest().write_openstack_single_manifest( remote=self.remote(), ci=self.ci(), quantum=False, ) self.validate( self.nodes().controllers, 'puppet agent --test') self.environment().snapshot(OPENSTACK_SNAPSHOT, force=True) if __name__ == '__main__': unittest.main()
... ci=self.ci(), quantum=False, ) ...
80347266377f01932fe8277c7a12ce87663b9018
comtypes/messageloop.py
comtypes/messageloop.py
import ctypes from ctypes import WinDLL, byref, WinError from ctypes.wintypes import MSG _user32 = WinDLL("user32") GetMessage = _user32.GetMessageA GetMessage.argtypes = [ ctypes.c_void_p, ctypes.c_void_p, ctypes.c_uint, ctypes.c_uint, ] TranslateMessage = _user32.TranslateMessage DispatchMessage = _user32.DispatchMessageA class _MessageLoop(object): def __init__(self): self._filters = [] def insert_filter(self, obj, index=-1): self._filters.insert(index, obj) def remove_filter(self, obj): self._filters.remove(obj) def run(self): msg = MSG() lpmsg = byref(msg) while 1: ret = GetMessage(lpmsg, 0, 0, 0) if ret == -1: raise WinError() elif ret == 0: return # got WM_QUIT if not self.filter_message(lpmsg): TranslateMessage(lpmsg) DispatchMessage(lpmsg) def filter_message(self, lpmsg): for filter in self._filters: if filter(lpmsg): return True return False _messageloop = _MessageLoop() run = _messageloop.run insert_filter = _messageloop.insert_filter remove_filter = _messageloop.remove_filter __all__ = ["run", "insert_filter", "remove_filter"]
import ctypes from ctypes import WinDLL, byref, WinError from ctypes.wintypes import MSG _user32 = WinDLL("user32") GetMessage = _user32.GetMessageA GetMessage.argtypes = [ ctypes.c_void_p, ctypes.c_void_p, ctypes.c_uint, ctypes.c_uint, ] TranslateMessage = _user32.TranslateMessage DispatchMessage = _user32.DispatchMessageA class _MessageLoop(object): def __init__(self): self._filters = [] def insert_filter(self, obj, index=-1): self._filters.insert(index, obj) def remove_filter(self, obj): self._filters.remove(obj) def run(self): msg = MSG() lpmsg = byref(msg) while 1: ret = GetMessage(lpmsg, 0, 0, 0) if ret == -1: raise WinError() elif ret == 0: return # got WM_QUIT if not self.filter_message(lpmsg): TranslateMessage(lpmsg) DispatchMessage(lpmsg) def filter_message(self, lpmsg): return any(filter(lpmsg) for filter in self._filters) _messageloop = _MessageLoop() run = _messageloop.run insert_filter = _messageloop.insert_filter remove_filter = _messageloop.remove_filter __all__ = ["run", "insert_filter", "remove_filter"]
Use any for concise code
Use any for concise code
Python
mit
denfromufa/comtypes,denfromufa/comtypes,denfromufa/comtypes,denfromufa/comtypes,denfromufa/comtypes
import ctypes from ctypes import WinDLL, byref, WinError from ctypes.wintypes import MSG _user32 = WinDLL("user32") GetMessage = _user32.GetMessageA GetMessage.argtypes = [ ctypes.c_void_p, ctypes.c_void_p, ctypes.c_uint, ctypes.c_uint, ] TranslateMessage = _user32.TranslateMessage DispatchMessage = _user32.DispatchMessageA class _MessageLoop(object): def __init__(self): self._filters = [] def insert_filter(self, obj, index=-1): self._filters.insert(index, obj) def remove_filter(self, obj): self._filters.remove(obj) def run(self): msg = MSG() lpmsg = byref(msg) while 1: ret = GetMessage(lpmsg, 0, 0, 0) if ret == -1: raise WinError() elif ret == 0: return # got WM_QUIT if not self.filter_message(lpmsg): TranslateMessage(lpmsg) DispatchMessage(lpmsg) def filter_message(self, lpmsg): + return any(filter(lpmsg) for filter in self._filters) - for filter in self._filters: - if filter(lpmsg): - return True - return False _messageloop = _MessageLoop() run = _messageloop.run insert_filter = _messageloop.insert_filter remove_filter = _messageloop.remove_filter __all__ = ["run", "insert_filter", "remove_filter"]
Use any for concise code
## Code Before: import ctypes from ctypes import WinDLL, byref, WinError from ctypes.wintypes import MSG _user32 = WinDLL("user32") GetMessage = _user32.GetMessageA GetMessage.argtypes = [ ctypes.c_void_p, ctypes.c_void_p, ctypes.c_uint, ctypes.c_uint, ] TranslateMessage = _user32.TranslateMessage DispatchMessage = _user32.DispatchMessageA class _MessageLoop(object): def __init__(self): self._filters = [] def insert_filter(self, obj, index=-1): self._filters.insert(index, obj) def remove_filter(self, obj): self._filters.remove(obj) def run(self): msg = MSG() lpmsg = byref(msg) while 1: ret = GetMessage(lpmsg, 0, 0, 0) if ret == -1: raise WinError() elif ret == 0: return # got WM_QUIT if not self.filter_message(lpmsg): TranslateMessage(lpmsg) DispatchMessage(lpmsg) def filter_message(self, lpmsg): for filter in self._filters: if filter(lpmsg): return True return False _messageloop = _MessageLoop() run = _messageloop.run insert_filter = _messageloop.insert_filter remove_filter = _messageloop.remove_filter __all__ = ["run", "insert_filter", "remove_filter"] ## Instruction: Use any for concise code ## Code After: import ctypes from ctypes import WinDLL, byref, WinError from ctypes.wintypes import MSG _user32 = WinDLL("user32") GetMessage = _user32.GetMessageA GetMessage.argtypes = [ ctypes.c_void_p, ctypes.c_void_p, ctypes.c_uint, ctypes.c_uint, ] TranslateMessage = _user32.TranslateMessage DispatchMessage = _user32.DispatchMessageA class _MessageLoop(object): def __init__(self): self._filters = [] def insert_filter(self, obj, index=-1): self._filters.insert(index, obj) def remove_filter(self, obj): self._filters.remove(obj) def run(self): msg = MSG() lpmsg = byref(msg) while 1: ret = GetMessage(lpmsg, 0, 0, 0) if ret == -1: raise WinError() elif ret == 0: return # got WM_QUIT if not self.filter_message(lpmsg): TranslateMessage(lpmsg) DispatchMessage(lpmsg) def filter_message(self, lpmsg): return any(filter(lpmsg) for filter in self._filters) _messageloop = _MessageLoop() run = _messageloop.run insert_filter = _messageloop.insert_filter remove_filter = _messageloop.remove_filter __all__ = ["run", "insert_filter", "remove_filter"]
// ... existing code ... def filter_message(self, lpmsg): return any(filter(lpmsg) for filter in self._filters) // ... rest of the code ...
2050385a5f5fdcffe333ae17463d6469af0b5cd8
mopidy/__init__.py
mopidy/__init__.py
from __future__ import unicode_literals import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'Mopidy requires Python >= 2.7, < 3, but found %s' % '.'.join(map(str, sys.version_info[:3]))) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'Mopidy requires Pykka >= 1.1, < 2, but found %s' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
from __future__ import unicode_literals import platform import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'ERROR: Mopidy requires Python 2.7, but found %s.' % platform.python_version()) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'ERROR: Mopidy requires Pykka >= 1.1, < 2, but found %s.' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
Update Python and Pykka version check error messages
Update Python and Pykka version check error messages
Python
apache-2.0
jmarsik/mopidy,adamcik/mopidy,priestd09/mopidy,woutervanwijk/mopidy,glogiotatidis/mopidy,tkem/mopidy,bencevans/mopidy,hkariti/mopidy,jcass77/mopidy,pacificIT/mopidy,vrs01/mopidy,ali/mopidy,bencevans/mopidy,mokieyue/mopidy,rawdlite/mopidy,swak/mopidy,tkem/mopidy,rawdlite/mopidy,jcass77/mopidy,woutervanwijk/mopidy,swak/mopidy,swak/mopidy,SuperStarPL/mopidy,SuperStarPL/mopidy,dbrgn/mopidy,mopidy/mopidy,bencevans/mopidy,jcass77/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,mokieyue/mopidy,SuperStarPL/mopidy,jodal/mopidy,mopidy/mopidy,ali/mopidy,tkem/mopidy,pacificIT/mopidy,quartz55/mopidy,dbrgn/mopidy,ali/mopidy,rawdlite/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,rawdlite/mopidy,priestd09/mopidy,jodal/mopidy,priestd09/mopidy,dbrgn/mopidy,hkariti/mopidy,jmarsik/mopidy,mopidy/mopidy,ZenithDK/mopidy,jmarsik/mopidy,dbrgn/mopidy,quartz55/mopidy,SuperStarPL/mopidy,kingosticks/mopidy,bacontext/mopidy,hkariti/mopidy,kingosticks/mopidy,tkem/mopidy,jodal/mopidy,jmarsik/mopidy,diandiankan/mopidy,diandiankan/mopidy,ZenithDK/mopidy,vrs01/mopidy,ZenithDK/mopidy,vrs01/mopidy,bacontext/mopidy,ali/mopidy,bencevans/mopidy,hkariti/mopidy,bacontext/mopidy,swak/mopidy,quartz55/mopidy,mokieyue/mopidy,diandiankan/mopidy,adamcik/mopidy,glogiotatidis/mopidy,kingosticks/mopidy,adamcik/mopidy,quartz55/mopidy,bacontext/mopidy,vrs01/mopidy,mokieyue/mopidy,diandiankan/mopidy
from __future__ import unicode_literals + import platform import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( - 'Mopidy requires Python >= 2.7, < 3, but found %s' % + 'ERROR: Mopidy requires Python 2.7, but found %s.' % - '.'.join(map(str, sys.version_info[:3]))) + platform.python_version()) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( - 'Mopidy requires Pykka >= 1.1, < 2, but found %s' % pykka.__version__) + 'ERROR: Mopidy requires Pykka >= 1.1, < 2, but found %s.' % + pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
Update Python and Pykka version check error messages
## Code Before: from __future__ import unicode_literals import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'Mopidy requires Python >= 2.7, < 3, but found %s' % '.'.join(map(str, sys.version_info[:3]))) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'Mopidy requires Pykka >= 1.1, < 2, but found %s' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4' ## Instruction: Update Python and Pykka version check error messages ## Code After: from __future__ import unicode_literals import platform import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'ERROR: Mopidy requires Python 2.7, but found %s.' % platform.python_version()) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'ERROR: Mopidy requires Pykka >= 1.1, < 2, but found %s.' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
# ... existing code ... import platform import sys # ... modified code ... sys.exit( 'ERROR: Mopidy requires Python 2.7, but found %s.' % platform.python_version()) ... sys.exit( 'ERROR: Mopidy requires Pykka >= 1.1, < 2, but found %s.' % pykka.__version__) # ... rest of the code ...
5c1fad9e6a75ee43d3a3b7bce6c9249cf601b4b9
tendrl/commons/objects/cluster_tendrl_context/__init__.py
tendrl/commons/objects/cluster_tendrl_context/__init__.py
import json import logging import os import socket import uuid from tendrl.commons.etcdobj import EtcdObj from tendrl.commons.utils import cmd_utils from tendrl.commons import objects LOG = logging.getLogger(__name__) class ClusterTendrlContext(objects.BaseObject): def __init__( self, integration_id=None, cluster_id=None, cluster_name=None, sds_name=None, sds_version=None, *args, **kwargs): super(ClusterTendrlContext, self).__init__(*args, **kwargs) self.value = 'clusters/%s/TendrlContext' # integration_id is the Tendrl generated cluster UUID self.integration_id = integration_id self.cluster_id=cluster_id self.cluster_name=cluster_name self.sds_name=sds_name self.sds_version=sds_version self._etcd_cls = _ClusterTendrlContextEtcd class _ClusterTendrlContextEtcd(EtcdObj): """A table of the cluster tendrl context, lazily updated """ __name__ = 'clusters/%s/TendrlContext' _tendrl_cls = ClusterTendrlContext def render(self): self.__name__ = self.__name__ % NS.node_context.node_id return super(_ClusterTendrlContextEtcd, self).render()
import json import logging import os import socket import uuid from tendrl.commons.etcdobj import EtcdObj from tendrl.commons.utils import cmd_utils from tendrl.commons import objects LOG = logging.getLogger(__name__) class ClusterTendrlContext(objects.BaseObject): def __init__( self, integration_id=None, cluster_id=None, cluster_name=None, sds_name=None, sds_version=None, *args, **kwargs): super(ClusterTendrlContext, self).__init__(*args, **kwargs) self.value = 'clusters/%s/TendrlContext' # integration_id is the Tendrl generated cluster UUID self.integration_id = integration_id self.cluster_id=cluster_id self.cluster_name=cluster_name self.sds_name=sds_name self.sds_version=sds_version self._etcd_cls = _ClusterTendrlContextEtcd class _ClusterTendrlContextEtcd(EtcdObj): """A table of the cluster tendrl context, lazily updated """ __name__ = 'clusters/%s/TendrlContext' _tendrl_cls = ClusterTendrlContext def render(self): self.__name__ = self.__name__ % NS.tendrl_context.integration_id return super(_ClusterTendrlContextEtcd, self).render()
Write cluster_tendrl_context to proper location
Write cluster_tendrl_context to proper location Currently it is written to clusters/<node-id>/TendrlContext This is fixed in this PR tendrl-bug-id: Tendrl/commons#302 Signed-off-by: nnDarshan <[email protected]>
Python
lgpl-2.1
r0h4n/commons,Tendrl/commons,rishubhjain/commons
import json import logging import os import socket import uuid from tendrl.commons.etcdobj import EtcdObj from tendrl.commons.utils import cmd_utils from tendrl.commons import objects LOG = logging.getLogger(__name__) class ClusterTendrlContext(objects.BaseObject): def __init__( self, integration_id=None, cluster_id=None, cluster_name=None, sds_name=None, sds_version=None, *args, **kwargs): super(ClusterTendrlContext, self).__init__(*args, **kwargs) self.value = 'clusters/%s/TendrlContext' # integration_id is the Tendrl generated cluster UUID self.integration_id = integration_id self.cluster_id=cluster_id self.cluster_name=cluster_name self.sds_name=sds_name self.sds_version=sds_version self._etcd_cls = _ClusterTendrlContextEtcd class _ClusterTendrlContextEtcd(EtcdObj): """A table of the cluster tendrl context, lazily updated """ __name__ = 'clusters/%s/TendrlContext' _tendrl_cls = ClusterTendrlContext def render(self): - self.__name__ = self.__name__ % NS.node_context.node_id + self.__name__ = self.__name__ % NS.tendrl_context.integration_id return super(_ClusterTendrlContextEtcd, self).render()
Write cluster_tendrl_context to proper location
## Code Before: import json import logging import os import socket import uuid from tendrl.commons.etcdobj import EtcdObj from tendrl.commons.utils import cmd_utils from tendrl.commons import objects LOG = logging.getLogger(__name__) class ClusterTendrlContext(objects.BaseObject): def __init__( self, integration_id=None, cluster_id=None, cluster_name=None, sds_name=None, sds_version=None, *args, **kwargs): super(ClusterTendrlContext, self).__init__(*args, **kwargs) self.value = 'clusters/%s/TendrlContext' # integration_id is the Tendrl generated cluster UUID self.integration_id = integration_id self.cluster_id=cluster_id self.cluster_name=cluster_name self.sds_name=sds_name self.sds_version=sds_version self._etcd_cls = _ClusterTendrlContextEtcd class _ClusterTendrlContextEtcd(EtcdObj): """A table of the cluster tendrl context, lazily updated """ __name__ = 'clusters/%s/TendrlContext' _tendrl_cls = ClusterTendrlContext def render(self): self.__name__ = self.__name__ % NS.node_context.node_id return super(_ClusterTendrlContextEtcd, self).render() ## Instruction: Write cluster_tendrl_context to proper location ## Code After: import json import logging import os import socket import uuid from tendrl.commons.etcdobj import EtcdObj from tendrl.commons.utils import cmd_utils from tendrl.commons import objects LOG = logging.getLogger(__name__) class ClusterTendrlContext(objects.BaseObject): def __init__( self, integration_id=None, cluster_id=None, cluster_name=None, sds_name=None, sds_version=None, *args, **kwargs): super(ClusterTendrlContext, self).__init__(*args, **kwargs) self.value = 'clusters/%s/TendrlContext' # integration_id is the Tendrl generated cluster UUID self.integration_id = integration_id self.cluster_id=cluster_id self.cluster_name=cluster_name self.sds_name=sds_name self.sds_version=sds_version self._etcd_cls = _ClusterTendrlContextEtcd class _ClusterTendrlContextEtcd(EtcdObj): """A table of the cluster tendrl context, lazily updated """ __name__ = 'clusters/%s/TendrlContext' _tendrl_cls = ClusterTendrlContext def render(self): self.__name__ = self.__name__ % NS.tendrl_context.integration_id return super(_ClusterTendrlContextEtcd, self).render()
... def render(self): self.__name__ = self.__name__ % NS.tendrl_context.integration_id return super(_ClusterTendrlContextEtcd, self).render() ...
c987ed375da13f53928157f14528bed0c148eeac
tasks.py
tasks.py
import asyncio import threading class Tasks: loop = asyncio.new_event_loop() @classmethod def _run(cls): try: cls.loop.run_forever() finally: cls.loop.close() @classmethod def do(cls, func, *args, **kwargs): cls.loop.call_soon(lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def later(cls, func, *args, after=None, **kwargs): cls.loop.call_later(after, lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def periodic(cls, func, *args, interval=None, **kwargs): @asyncio.coroutine def f(): while True: yield from asyncio.sleep(interval) func(*args, **kwargs) cls.loop.create_task(f()) cls.loop._write_to_self() threading.Thread(name="tasks", target=Tasks._run, daemon=True).start()
import asyncio import threading class Tasks: loop = asyncio.new_event_loop() @classmethod def _run(cls): asyncio.set_event_loop(cls.loop) try: cls.loop.run_forever() finally: cls.loop.close() @classmethod def do(cls, func, *args, **kwargs): cls.loop.call_soon(lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def later(cls, func, *args, after=None, **kwargs): cls.loop.call_later(after, lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def periodic(cls, func, *args, interval=None, **kwargs): @asyncio.coroutine def f(): while True: yield from asyncio.sleep(interval) func(*args, **kwargs) cls.loop.create_task(f()) cls.loop._write_to_self() threading.Thread(name="tasks", target=Tasks._run, daemon=True).start()
Set implicit loop for Python <3.6
Set implicit loop for Python <3.6
Python
apache-2.0
Charcoal-SE/SmokeDetector,Charcoal-SE/SmokeDetector
import asyncio import threading class Tasks: loop = asyncio.new_event_loop() @classmethod def _run(cls): + asyncio.set_event_loop(cls.loop) + try: cls.loop.run_forever() finally: cls.loop.close() @classmethod def do(cls, func, *args, **kwargs): cls.loop.call_soon(lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def later(cls, func, *args, after=None, **kwargs): cls.loop.call_later(after, lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def periodic(cls, func, *args, interval=None, **kwargs): @asyncio.coroutine def f(): while True: yield from asyncio.sleep(interval) func(*args, **kwargs) cls.loop.create_task(f()) cls.loop._write_to_self() threading.Thread(name="tasks", target=Tasks._run, daemon=True).start()
Set implicit loop for Python <3.6
## Code Before: import asyncio import threading class Tasks: loop = asyncio.new_event_loop() @classmethod def _run(cls): try: cls.loop.run_forever() finally: cls.loop.close() @classmethod def do(cls, func, *args, **kwargs): cls.loop.call_soon(lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def later(cls, func, *args, after=None, **kwargs): cls.loop.call_later(after, lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def periodic(cls, func, *args, interval=None, **kwargs): @asyncio.coroutine def f(): while True: yield from asyncio.sleep(interval) func(*args, **kwargs) cls.loop.create_task(f()) cls.loop._write_to_self() threading.Thread(name="tasks", target=Tasks._run, daemon=True).start() ## Instruction: Set implicit loop for Python <3.6 ## Code After: import asyncio import threading class Tasks: loop = asyncio.new_event_loop() @classmethod def _run(cls): asyncio.set_event_loop(cls.loop) try: cls.loop.run_forever() finally: cls.loop.close() @classmethod def do(cls, func, *args, **kwargs): cls.loop.call_soon(lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def later(cls, func, *args, after=None, **kwargs): cls.loop.call_later(after, lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def periodic(cls, func, *args, interval=None, **kwargs): @asyncio.coroutine def f(): while True: yield from asyncio.sleep(interval) func(*args, **kwargs) cls.loop.create_task(f()) cls.loop._write_to_self() threading.Thread(name="tasks", target=Tasks._run, daemon=True).start()
... def _run(cls): asyncio.set_event_loop(cls.loop) try: ...
5fb365333711f7e999f71d53061ae14c386e575c
src/waldur_core/core/api_groups_mapping.py
src/waldur_core/core/api_groups_mapping.py
API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], }
API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], 'accounting': ['/api/invoices/', '/api/invoice-items/',], }
Add accounting group to apidocs
Add accounting group to apidocs
Python
mit
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind
API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], + 'accounting': ['/api/invoices/', '/api/invoice-items/',], }
Add accounting group to apidocs
## Code Before: API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], } ## Instruction: Add accounting group to apidocs ## Code After: API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], 'accounting': ['/api/invoices/', '/api/invoice-items/',], }
... ], 'accounting': ['/api/invoices/', '/api/invoice-items/',], } ...
d7001ccab0879e17308bf2dc945b5fd3b726be27
statblock/dice.py
statblock/dice.py
from random import random class Die: """ Abstracts the random dice throw. Roll will produce the result. The die can be further parametrized by a multiplicator and/or a modifier, like 2 * Die(8) +4. """ def __init__(self, number, multiplicator=1, modifier=0): self.number = number self.multiplicator = multiplicator self.modifier = modifier def roll(self): return self.multiplicator * random.choice(range(1, self.number + 1)) + self.modifier def __rmul__(self, other): return Die(self.number, multiplicator=other, modifier=self.modifier) def __add__(self, other): return Die(self.number, multiplicator=self.multiplicator, modifier=other) def __call__(self): return self.roll() def __eq__(self, other): return (other.number == self.number and other.multiplicator == self.multiplicator and other.modifier == self.modifier) @classmethod def parse(cls, text): return cls.__new__() def __repr__(self): return "%sd%s+%s" % (self.multiplicator, self.number, self.modifier) d4 = Die(4) d6 = Die(6) d8 = Die(8) d10 = Die(10) d12 = Die(12) d20 = Die(20) d100 = Die(100)
from random import random class Die: """ Abstracts the random dice throw. Roll will produce the result. The die can be further parametrized by a multiplicator and/or a modifier, like 2 * Die(8) +4. """ def __init__(self, number, multiplicator=1, modifier=0): self.number = number self.multiplicator = multiplicator self.modifier = modifier def roll(self): return self.multiplicator * random.choice(range(1, self.number + 1)) + self.modifier def __rmul__(self, other): return Die(self.number, multiplicator=other, modifier=self.modifier) def __add__(self, other): return Die(self.number, multiplicator=self.multiplicator, modifier=other) def __call__(self): return self.roll() def __eq__(self, other): return (other.number == self.number and other.multiplicator == self.multiplicator and other.modifier == self.modifier) @classmethod def parse(cls, text): return cls.__new__() def __repr__(self): base = "%sd%s" % (self.multiplicator, self.number) if self.modifier > 0: return base + ("+%s" % self.modifier) return base d4 = Die(4) d6 = Die(6) d8 = Die(8) d10 = Die(10) d12 = Die(12) d20 = Die(20) d100 = Die(100)
Write the critical multiplier or the range when the damage gets converted into a String
Write the critical multiplier or the range when the damage gets converted into a String
Python
mit
bkittelmann/statblock
from random import random class Die: """ Abstracts the random dice throw. Roll will produce the result. The die can be further parametrized by a multiplicator and/or a modifier, like 2 * Die(8) +4. """ def __init__(self, number, multiplicator=1, modifier=0): self.number = number self.multiplicator = multiplicator self.modifier = modifier def roll(self): return self.multiplicator * random.choice(range(1, self.number + 1)) + self.modifier def __rmul__(self, other): return Die(self.number, multiplicator=other, modifier=self.modifier) def __add__(self, other): return Die(self.number, multiplicator=self.multiplicator, modifier=other) def __call__(self): return self.roll() def __eq__(self, other): return (other.number == self.number and other.multiplicator == self.multiplicator and other.modifier == self.modifier) @classmethod def parse(cls, text): return cls.__new__() def __repr__(self): - return "%sd%s+%s" % (self.multiplicator, self.number, self.modifier) + base = "%sd%s" % (self.multiplicator, self.number) + if self.modifier > 0: + return base + ("+%s" % self.modifier) + return base d4 = Die(4) d6 = Die(6) d8 = Die(8) d10 = Die(10) d12 = Die(12) d20 = Die(20) d100 = Die(100)
Write the critical multiplier or the range when the damage gets converted into a String
## Code Before: from random import random class Die: """ Abstracts the random dice throw. Roll will produce the result. The die can be further parametrized by a multiplicator and/or a modifier, like 2 * Die(8) +4. """ def __init__(self, number, multiplicator=1, modifier=0): self.number = number self.multiplicator = multiplicator self.modifier = modifier def roll(self): return self.multiplicator * random.choice(range(1, self.number + 1)) + self.modifier def __rmul__(self, other): return Die(self.number, multiplicator=other, modifier=self.modifier) def __add__(self, other): return Die(self.number, multiplicator=self.multiplicator, modifier=other) def __call__(self): return self.roll() def __eq__(self, other): return (other.number == self.number and other.multiplicator == self.multiplicator and other.modifier == self.modifier) @classmethod def parse(cls, text): return cls.__new__() def __repr__(self): return "%sd%s+%s" % (self.multiplicator, self.number, self.modifier) d4 = Die(4) d6 = Die(6) d8 = Die(8) d10 = Die(10) d12 = Die(12) d20 = Die(20) d100 = Die(100) ## Instruction: Write the critical multiplier or the range when the damage gets converted into a String ## Code After: from random import random class Die: """ Abstracts the random dice throw. Roll will produce the result. The die can be further parametrized by a multiplicator and/or a modifier, like 2 * Die(8) +4. """ def __init__(self, number, multiplicator=1, modifier=0): self.number = number self.multiplicator = multiplicator self.modifier = modifier def roll(self): return self.multiplicator * random.choice(range(1, self.number + 1)) + self.modifier def __rmul__(self, other): return Die(self.number, multiplicator=other, modifier=self.modifier) def __add__(self, other): return Die(self.number, multiplicator=self.multiplicator, modifier=other) def __call__(self): return self.roll() def __eq__(self, other): return (other.number == self.number and other.multiplicator == self.multiplicator and other.modifier == self.modifier) @classmethod def parse(cls, text): return cls.__new__() def __repr__(self): base = "%sd%s" % (self.multiplicator, self.number) if self.modifier > 0: return base + ("+%s" % self.modifier) return base d4 = Die(4) d6 = Die(6) d8 = Die(8) d10 = Die(10) d12 = Die(12) d20 = Die(20) d100 = Die(100)
# ... existing code ... def __repr__(self): base = "%sd%s" % (self.multiplicator, self.number) if self.modifier > 0: return base + ("+%s" % self.modifier) return base # ... rest of the code ...
cc841cc1020ca4df6f303fbb05e497a7c69c92f0
akvo/rsr/migrations/0087_auto_20161110_0920.py
akvo/rsr/migrations/0087_auto_20161110_0920.py
from __future__ import unicode_literals from django.db import migrations def fix_employment_groups(apps, schema_editor): # We can't import the Employment or Group model directly as it may be a # newer version than this migration expects. We use the historical version. Group = apps.get_model("auth", "Group") Employment = apps.get_model("rsr", "Employment") for employment in Employment.objects.filter(group=None): employment.group = Group.objects.get(name='Users') employment.save() class Migration(migrations.Migration): dependencies = [ ('rsr', '0086_auto_20160921_0947'), ] operations = [ migrations.RunPython(fix_employment_groups), ]
from __future__ import unicode_literals from django.db import migrations def fix_employment_groups(apps, schema_editor): # We can't import the Employment or Group model directly as it may be a # newer version than this migration expects. We use the historical version. Group = apps.get_model("auth", "Group") Employment = apps.get_model("rsr", "Employment") for employment in Employment.objects.filter(group=None): try: employment.group = Group.objects.get(name='Users') employment.save() except Exception as e: print(e) class Migration(migrations.Migration): dependencies = [ ('rsr', '0086_auto_20160921_0947'), ] operations = [ migrations.RunPython(fix_employment_groups), ]
Fix broken migration with try-except blocks
Fix broken migration with try-except blocks Duplicate key errors were being caused if an employment similar to the one being created by the migration already existed.
Python
agpl-3.0
akvo/akvo-rsr,akvo/akvo-rsr,akvo/akvo-rsr,akvo/akvo-rsr
from __future__ import unicode_literals from django.db import migrations def fix_employment_groups(apps, schema_editor): # We can't import the Employment or Group model directly as it may be a # newer version than this migration expects. We use the historical version. Group = apps.get_model("auth", "Group") Employment = apps.get_model("rsr", "Employment") for employment in Employment.objects.filter(group=None): + try: - employment.group = Group.objects.get(name='Users') + employment.group = Group.objects.get(name='Users') - employment.save() + employment.save() + except Exception as e: + print(e) class Migration(migrations.Migration): dependencies = [ ('rsr', '0086_auto_20160921_0947'), ] operations = [ migrations.RunPython(fix_employment_groups), ]
Fix broken migration with try-except blocks
## Code Before: from __future__ import unicode_literals from django.db import migrations def fix_employment_groups(apps, schema_editor): # We can't import the Employment or Group model directly as it may be a # newer version than this migration expects. We use the historical version. Group = apps.get_model("auth", "Group") Employment = apps.get_model("rsr", "Employment") for employment in Employment.objects.filter(group=None): employment.group = Group.objects.get(name='Users') employment.save() class Migration(migrations.Migration): dependencies = [ ('rsr', '0086_auto_20160921_0947'), ] operations = [ migrations.RunPython(fix_employment_groups), ] ## Instruction: Fix broken migration with try-except blocks ## Code After: from __future__ import unicode_literals from django.db import migrations def fix_employment_groups(apps, schema_editor): # We can't import the Employment or Group model directly as it may be a # newer version than this migration expects. We use the historical version. Group = apps.get_model("auth", "Group") Employment = apps.get_model("rsr", "Employment") for employment in Employment.objects.filter(group=None): try: employment.group = Group.objects.get(name='Users') employment.save() except Exception as e: print(e) class Migration(migrations.Migration): dependencies = [ ('rsr', '0086_auto_20160921_0947'), ] operations = [ migrations.RunPython(fix_employment_groups), ]
# ... existing code ... for employment in Employment.objects.filter(group=None): try: employment.group = Group.objects.get(name='Users') employment.save() except Exception as e: print(e) # ... rest of the code ...
1bd287d3f6f7545e47364832a824e7380c6609e8
web/core/api/resources.py
web/core/api/resources.py
import tastypie.resources import tastypie.authentication import django.db.models import web.core.models import web.core.api.authorization class FileResource(tastypie.resources.ModelResource): class Meta: queryset = web.core.models.File.objects.all() allowed_methods = ['get', 'post'] authentication = tastypie.authentication.MultiAuthentication( tastypie.authentication.SessionAuthentication(), tastypie.authentication.ApiKeyAuthentication() ) authorization = web.core.api.authorization.UserObjectsOnlyAuthorization() def hydrate(self, bundle, request=None): bundle.obj.owner = django.db.models.User.objects.get(pk=bundle.request.user.id) return bundle
import tastypie.resources import tastypie.authentication import tastypie.fields import django.contrib.auth.models import web.core.models import web.core.api.authorization class FileResource(tastypie.resources.ModelResource): class Meta: queryset = web.core.models.File.objects.all() allowed_methods = ['get', 'post'] always_return_data = True authentication = tastypie.authentication.MultiAuthentication( tastypie.authentication.SessionAuthentication(), tastypie.authentication.ApiKeyAuthentication() ) authorization = web.core.api.authorization.UserObjectsOnlyAuthorization() def hydrate(self, bundle, request=None): bundle.obj.author = django.contrib.auth.models.User.objects.get(pk=bundle.request.user.id) return bundle def deserialize(self, request, data, format=None): if not format: format = request.META.get('CONTENT_TYPE', 'application/json') if format == 'application/x-www-form-urlencoded': return request.POST if format.startswith('multipart'): data = request.POST.copy() data.update(request.FILES) return data return super(FileResource, self).deserialize(request, data, format)
Allow files to be uploaded through the TastyPie API
Allow files to be uploaded through the TastyPie API
Python
bsd-3-clause
ambientsound/rsync,ambientsound/rsync,ambientsound/rsync,ambientsound/rsync
import tastypie.resources import tastypie.authentication + import tastypie.fields - import django.db.models + import django.contrib.auth.models import web.core.models import web.core.api.authorization class FileResource(tastypie.resources.ModelResource): class Meta: queryset = web.core.models.File.objects.all() allowed_methods = ['get', 'post'] + always_return_data = True authentication = tastypie.authentication.MultiAuthentication( tastypie.authentication.SessionAuthentication(), tastypie.authentication.ApiKeyAuthentication() ) authorization = web.core.api.authorization.UserObjectsOnlyAuthorization() def hydrate(self, bundle, request=None): - bundle.obj.owner = django.db.models.User.objects.get(pk=bundle.request.user.id) + bundle.obj.author = django.contrib.auth.models.User.objects.get(pk=bundle.request.user.id) return bundle + def deserialize(self, request, data, format=None): + if not format: + format = request.META.get('CONTENT_TYPE', 'application/json') + + if format == 'application/x-www-form-urlencoded': + return request.POST + + if format.startswith('multipart'): + data = request.POST.copy() + data.update(request.FILES) + + return data + + return super(FileResource, self).deserialize(request, data, format) +
Allow files to be uploaded through the TastyPie API
## Code Before: import tastypie.resources import tastypie.authentication import django.db.models import web.core.models import web.core.api.authorization class FileResource(tastypie.resources.ModelResource): class Meta: queryset = web.core.models.File.objects.all() allowed_methods = ['get', 'post'] authentication = tastypie.authentication.MultiAuthentication( tastypie.authentication.SessionAuthentication(), tastypie.authentication.ApiKeyAuthentication() ) authorization = web.core.api.authorization.UserObjectsOnlyAuthorization() def hydrate(self, bundle, request=None): bundle.obj.owner = django.db.models.User.objects.get(pk=bundle.request.user.id) return bundle ## Instruction: Allow files to be uploaded through the TastyPie API ## Code After: import tastypie.resources import tastypie.authentication import tastypie.fields import django.contrib.auth.models import web.core.models import web.core.api.authorization class FileResource(tastypie.resources.ModelResource): class Meta: queryset = web.core.models.File.objects.all() allowed_methods = ['get', 'post'] always_return_data = True authentication = tastypie.authentication.MultiAuthentication( tastypie.authentication.SessionAuthentication(), tastypie.authentication.ApiKeyAuthentication() ) authorization = web.core.api.authorization.UserObjectsOnlyAuthorization() def hydrate(self, bundle, request=None): bundle.obj.author = django.contrib.auth.models.User.objects.get(pk=bundle.request.user.id) return bundle def deserialize(self, request, data, format=None): if not format: format = request.META.get('CONTENT_TYPE', 'application/json') if format == 'application/x-www-form-urlencoded': return request.POST if format.startswith('multipart'): data = request.POST.copy() data.update(request.FILES) return data return super(FileResource, self).deserialize(request, data, format)
# ... existing code ... import tastypie.authentication import tastypie.fields import django.contrib.auth.models # ... modified code ... allowed_methods = ['get', 'post'] always_return_data = True authentication = tastypie.authentication.MultiAuthentication( ... def hydrate(self, bundle, request=None): bundle.obj.author = django.contrib.auth.models.User.objects.get(pk=bundle.request.user.id) return bundle def deserialize(self, request, data, format=None): if not format: format = request.META.get('CONTENT_TYPE', 'application/json') if format == 'application/x-www-form-urlencoded': return request.POST if format.startswith('multipart'): data = request.POST.copy() data.update(request.FILES) return data return super(FileResource, self).deserialize(request, data, format) # ... rest of the code ...
56446567f764625e88d8efdbfa2849e0a579d5c4
indra/tests/test_rest_api.py
indra/tests/test_rest_api.py
import requests from nose.plugins.attrib import attr @attr('webservice') def test_rest_api_responsive(): stmt_str = '{"statements": [{"sbo": "http://identifiers.org/sbo/SBO:0000526", "type": "Complex", "id": "acc6d47c-f622-41a4-8ae9-d7b0f3d24a2f", "members": [{"db_refs": {"TEXT": "MEK", "FPLX": "MEK"}, "name": "MEK"}, {"db_refs": {"TEXT": "ERK", "NCIT": "C26360", "FPLX": "ERK"}, "name": "ERK"}], "evidence": [{"text": "MEK binds ERK", "source_api": "trips"}]}]}' url = 'http://ec2-54-88-146-250.compute-1.amazonaws.com:8080/' + \ 'assemblers/cyjs' res = requests.post(url, stmt_str) assert res.status_code == 200
import requests from nose.plugins.attrib import attr @attr('webservice') def test_rest_api_responsive(): stmt_str = '{"statements": [{"sbo": "http://identifiers.org/sbo/SBO:0000526", "type": "Complex", "id": "acc6d47c-f622-41a4-8ae9-d7b0f3d24a2f", "members": [{"db_refs": {"TEXT": "MEK", "FPLX": "MEK"}, "name": "MEK"}, {"db_refs": {"TEXT": "ERK", "NCIT": "C26360", "FPLX": "ERK"}, "name": "ERK"}], "evidence": [{"text": "MEK binds ERK", "source_api": "trips"}]}]}' url = 'http://indra-api-72031e2dfde08e09.elb.us-east-1.amazonaws.com:8000/' + \ 'assemblers/cyjs' res = requests.post(url, stmt_str) assert res.status_code == 200
Update REST API address in test
Update REST API address in test
Python
bsd-2-clause
sorgerlab/belpy,sorgerlab/indra,sorgerlab/belpy,bgyori/indra,pvtodorov/indra,bgyori/indra,sorgerlab/indra,johnbachman/belpy,pvtodorov/indra,pvtodorov/indra,sorgerlab/indra,pvtodorov/indra,johnbachman/belpy,bgyori/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/indra,johnbachman/indra,johnbachman/indra
import requests from nose.plugins.attrib import attr @attr('webservice') def test_rest_api_responsive(): stmt_str = '{"statements": [{"sbo": "http://identifiers.org/sbo/SBO:0000526", "type": "Complex", "id": "acc6d47c-f622-41a4-8ae9-d7b0f3d24a2f", "members": [{"db_refs": {"TEXT": "MEK", "FPLX": "MEK"}, "name": "MEK"}, {"db_refs": {"TEXT": "ERK", "NCIT": "C26360", "FPLX": "ERK"}, "name": "ERK"}], "evidence": [{"text": "MEK binds ERK", "source_api": "trips"}]}]}' - url = 'http://ec2-54-88-146-250.compute-1.amazonaws.com:8080/' + \ + url = 'http://indra-api-72031e2dfde08e09.elb.us-east-1.amazonaws.com:8000/' + \ 'assemblers/cyjs' res = requests.post(url, stmt_str) assert res.status_code == 200
Update REST API address in test
## Code Before: import requests from nose.plugins.attrib import attr @attr('webservice') def test_rest_api_responsive(): stmt_str = '{"statements": [{"sbo": "http://identifiers.org/sbo/SBO:0000526", "type": "Complex", "id": "acc6d47c-f622-41a4-8ae9-d7b0f3d24a2f", "members": [{"db_refs": {"TEXT": "MEK", "FPLX": "MEK"}, "name": "MEK"}, {"db_refs": {"TEXT": "ERK", "NCIT": "C26360", "FPLX": "ERK"}, "name": "ERK"}], "evidence": [{"text": "MEK binds ERK", "source_api": "trips"}]}]}' url = 'http://ec2-54-88-146-250.compute-1.amazonaws.com:8080/' + \ 'assemblers/cyjs' res = requests.post(url, stmt_str) assert res.status_code == 200 ## Instruction: Update REST API address in test ## Code After: import requests from nose.plugins.attrib import attr @attr('webservice') def test_rest_api_responsive(): stmt_str = '{"statements": [{"sbo": "http://identifiers.org/sbo/SBO:0000526", "type": "Complex", "id": "acc6d47c-f622-41a4-8ae9-d7b0f3d24a2f", "members": [{"db_refs": {"TEXT": "MEK", "FPLX": "MEK"}, "name": "MEK"}, {"db_refs": {"TEXT": "ERK", "NCIT": "C26360", "FPLX": "ERK"}, "name": "ERK"}], "evidence": [{"text": "MEK binds ERK", "source_api": "trips"}]}]}' url = 'http://indra-api-72031e2dfde08e09.elb.us-east-1.amazonaws.com:8000/' + \ 'assemblers/cyjs' res = requests.post(url, stmt_str) assert res.status_code == 200
... stmt_str = '{"statements": [{"sbo": "http://identifiers.org/sbo/SBO:0000526", "type": "Complex", "id": "acc6d47c-f622-41a4-8ae9-d7b0f3d24a2f", "members": [{"db_refs": {"TEXT": "MEK", "FPLX": "MEK"}, "name": "MEK"}, {"db_refs": {"TEXT": "ERK", "NCIT": "C26360", "FPLX": "ERK"}, "name": "ERK"}], "evidence": [{"text": "MEK binds ERK", "source_api": "trips"}]}]}' url = 'http://indra-api-72031e2dfde08e09.elb.us-east-1.amazonaws.com:8000/' + \ 'assemblers/cyjs' ...
d407f1bcd95daf4f4bd8dfe8ae3b4b9e68061cb5
cref/sequence/fragment.py
cref/sequence/fragment.py
def fragment(sequence, size=5): """ Fragment a string sequence using a sliding window given by size :param sequence: String containing the sequence :param size: Size of the window :return: a fragment of the sequence with the given size """ for i in range(len(sequence) - size + 1): yield sequence[i: i + size]
def fragment(sequence, size=5): """ Fragment a string sequence using a sliding window given by size :param sequence: String containing the sequence :param size: Size of the window :return: a fragment of the sequence with the given size """ if size > 0: for i in range(len(sequence) - size + 1): yield sequence[i: i + size]
Handle sliding window with size 0
Handle sliding window with size 0
Python
mit
mchelem/cref2,mchelem/cref2,mchelem/cref2
def fragment(sequence, size=5): """ Fragment a string sequence using a sliding window given by size :param sequence: String containing the sequence :param size: Size of the window :return: a fragment of the sequence with the given size """ + if size > 0: - for i in range(len(sequence) - size + 1): + for i in range(len(sequence) - size + 1): - yield sequence[i: i + size] + yield sequence[i: i + size] -
Handle sliding window with size 0
## Code Before: def fragment(sequence, size=5): """ Fragment a string sequence using a sliding window given by size :param sequence: String containing the sequence :param size: Size of the window :return: a fragment of the sequence with the given size """ for i in range(len(sequence) - size + 1): yield sequence[i: i + size] ## Instruction: Handle sliding window with size 0 ## Code After: def fragment(sequence, size=5): """ Fragment a string sequence using a sliding window given by size :param sequence: String containing the sequence :param size: Size of the window :return: a fragment of the sequence with the given size """ if size > 0: for i in range(len(sequence) - size + 1): yield sequence[i: i + size]
... """ if size > 0: for i in range(len(sequence) - size + 1): yield sequence[i: i + size] ...
b8c2376368290fa4fef103ba86d4f2ed164a3b7d
numscons/checkers/__init__.py
numscons/checkers/__init__.py
from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK from fft_checkers import CheckFFT from simple_check import NumpyCheckLibAndHeader from perflib import * from fortran import * from perflib_info import write_info import blas_lapack_checkers import fft_checkers import perflib import perflib_info __all__ = blas_lapack_checkers.__all__ __all__ += fft_checkers.__all__ __all__ += perflib.__all__ __all__ += perflib_info.__all__ __all__ += fortran.__all__ __all__ += ['NumpyCheckLibAndHeader']
from numscons.checkers.new.netlib_checkers import \ CheckCblas as CheckCBLAS, \ CheckF77Blas as CheckF77BLAS, \ CheckF77Lapack as CheckF77LAPACK from numscons.checkers.new.common import \ get_perflib_implementation from numscons.checkers.new.common import \ write_configuration_results as write_info from numscons.checkers.simple_check import \ NumpyCheckLibAndHeader from numscons.checkers.fortran import * from numscons.checkers import fortran # Those are for compatibility only def CheckCLAPACK(context, autoadd=1, check_version=0): context.Message("Checking for CLAPACK ... ") context.Result(0) return 0 def IsVeclib(env, interface): return get_perflib_implementation(env, interface.upper()) == 'VECLIB' def IsAccelerate(env, interface): return get_perflib_implementation(env, interface.upper()) == 'ACCELERATE' def IsATLAS(env, interface): return get_perflib_implementation(env, interface.upper()) == 'ATLAS' def GetATLASVersion(env): return '' __all__ = [] __all__ += ['CheckCBLAS', 'CheckF77LAPACK', 'CheckF77BLAS', 'CheckCLAPACK', 'write_info', 'IsVeclib', 'IsAccelerate', 'IsATLAS', 'GetATLASVersion'] __all__ += fortran.__all__ __all__ += ['NumpyCheckLibAndHeader']
Use the new framework for checkers.
Use the new framework for checkers.
Python
bsd-3-clause
cournape/numscons,cournape/numscons,cournape/numscons
- from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK - from fft_checkers import CheckFFT + from numscons.checkers.new.netlib_checkers import \ + CheckCblas as CheckCBLAS, \ + CheckF77Blas as CheckF77BLAS, \ + CheckF77Lapack as CheckF77LAPACK + from numscons.checkers.new.common import \ + get_perflib_implementation + from numscons.checkers.new.common import \ + write_configuration_results as write_info - from simple_check import NumpyCheckLibAndHeader + from numscons.checkers.simple_check import \ + NumpyCheckLibAndHeader + from numscons.checkers.fortran import * + from numscons.checkers import fortran - from perflib import * - from fortran import * + # Those are for compatibility only + def CheckCLAPACK(context, autoadd=1, check_version=0): + context.Message("Checking for CLAPACK ... ") + context.Result(0) + return 0 - from perflib_info import write_info + def IsVeclib(env, interface): + return get_perflib_implementation(env, interface.upper()) == 'VECLIB' + def IsAccelerate(env, interface): + return get_perflib_implementation(env, interface.upper()) == 'ACCELERATE' - import blas_lapack_checkers - import fft_checkers - import perflib - import perflib_info - __all__ = blas_lapack_checkers.__all__ - __all__ += fft_checkers.__all__ - __all__ += perflib.__all__ - __all__ += perflib_info.__all__ + def IsATLAS(env, interface): + return get_perflib_implementation(env, interface.upper()) == 'ATLAS' + + def GetATLASVersion(env): + return '' + + __all__ = [] + __all__ += ['CheckCBLAS', 'CheckF77LAPACK', 'CheckF77BLAS', 'CheckCLAPACK', + 'write_info', 'IsVeclib', 'IsAccelerate', 'IsATLAS', 'GetATLASVersion'] __all__ += fortran.__all__ __all__ += ['NumpyCheckLibAndHeader']
Use the new framework for checkers.
## Code Before: from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK from fft_checkers import CheckFFT from simple_check import NumpyCheckLibAndHeader from perflib import * from fortran import * from perflib_info import write_info import blas_lapack_checkers import fft_checkers import perflib import perflib_info __all__ = blas_lapack_checkers.__all__ __all__ += fft_checkers.__all__ __all__ += perflib.__all__ __all__ += perflib_info.__all__ __all__ += fortran.__all__ __all__ += ['NumpyCheckLibAndHeader'] ## Instruction: Use the new framework for checkers. ## Code After: from numscons.checkers.new.netlib_checkers import \ CheckCblas as CheckCBLAS, \ CheckF77Blas as CheckF77BLAS, \ CheckF77Lapack as CheckF77LAPACK from numscons.checkers.new.common import \ get_perflib_implementation from numscons.checkers.new.common import \ write_configuration_results as write_info from numscons.checkers.simple_check import \ NumpyCheckLibAndHeader from numscons.checkers.fortran import * from numscons.checkers import fortran # Those are for compatibility only def CheckCLAPACK(context, autoadd=1, check_version=0): context.Message("Checking for CLAPACK ... ") context.Result(0) return 0 def IsVeclib(env, interface): return get_perflib_implementation(env, interface.upper()) == 'VECLIB' def IsAccelerate(env, interface): return get_perflib_implementation(env, interface.upper()) == 'ACCELERATE' def IsATLAS(env, interface): return get_perflib_implementation(env, interface.upper()) == 'ATLAS' def GetATLASVersion(env): return '' __all__ = [] __all__ += ['CheckCBLAS', 'CheckF77LAPACK', 'CheckF77BLAS', 'CheckCLAPACK', 'write_info', 'IsVeclib', 'IsAccelerate', 'IsATLAS', 'GetATLASVersion'] __all__ += fortran.__all__ __all__ += ['NumpyCheckLibAndHeader']
// ... existing code ... from numscons.checkers.new.netlib_checkers import \ CheckCblas as CheckCBLAS, \ CheckF77Blas as CheckF77BLAS, \ CheckF77Lapack as CheckF77LAPACK from numscons.checkers.new.common import \ get_perflib_implementation from numscons.checkers.new.common import \ write_configuration_results as write_info from numscons.checkers.simple_check import \ NumpyCheckLibAndHeader from numscons.checkers.fortran import * from numscons.checkers import fortran # Those are for compatibility only def CheckCLAPACK(context, autoadd=1, check_version=0): context.Message("Checking for CLAPACK ... ") context.Result(0) return 0 def IsVeclib(env, interface): return get_perflib_implementation(env, interface.upper()) == 'VECLIB' def IsAccelerate(env, interface): return get_perflib_implementation(env, interface.upper()) == 'ACCELERATE' def IsATLAS(env, interface): return get_perflib_implementation(env, interface.upper()) == 'ATLAS' def GetATLASVersion(env): return '' __all__ = [] __all__ += ['CheckCBLAS', 'CheckF77LAPACK', 'CheckF77BLAS', 'CheckCLAPACK', 'write_info', 'IsVeclib', 'IsAccelerate', 'IsATLAS', 'GetATLASVersion'] __all__ += fortran.__all__ // ... rest of the code ...
bd193b0fdb7fec412aed24ad8f4c6353372d634f
polling_stations/apps/data_collection/management/commands/import_westberks.py
polling_stations/apps/data_collection/management/commands/import_westberks.py
from data_collection.management.commands import BaseShpImporter, import_polling_station_shapefiles class Command(BaseShpImporter): """ Imports the Polling Station data from Wokingham Council """ council_id = 'E06000037' districts_name = 'polling_districts' stations_name = 'polling_places.shp' def district_record_to_dict(self, record): return { 'internal_council_id': record[0], 'name': record[2], } def station_record_to_dict(self, record): return { 'internal_council_id': record[4], 'postcode' : record[5].split(',')[-1], 'address' : "\n".join(record[5].split(',')[:-1]), } def import_polling_stations(self): import_polling_station_shapefiles(self)
from data_collection.management.commands import BaseShpShpImporter class Command(BaseShpShpImporter): """ Imports the Polling Station data from Wokingham Council """ council_id = 'E06000037' districts_name = 'polling_districts' stations_name = 'polling_places.shp' def district_record_to_dict(self, record): return { 'internal_council_id': record[0], 'name': record[2], } def station_record_to_dict(self, record): return { 'internal_council_id': record[4], 'postcode' : record[5].split(',')[-1], 'address' : "\n".join(record[5].split(',')[:-1]), }
Refactor West Berks to use new BaseShpShpImporter
Refactor West Berks to use new BaseShpShpImporter
Python
bsd-3-clause
chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,andylolz/UK-Polling-Stations,andylolz/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
- from data_collection.management.commands import BaseShpImporter, import_polling_station_shapefiles + from data_collection.management.commands import BaseShpShpImporter - class Command(BaseShpImporter): + class Command(BaseShpShpImporter): """ Imports the Polling Station data from Wokingham Council """ council_id = 'E06000037' districts_name = 'polling_districts' stations_name = 'polling_places.shp' - + - def district_record_to_dict(self, record): + def district_record_to_dict(self, record): return { 'internal_council_id': record[0], 'name': record[2], } - def station_record_to_dict(self, record): return { 'internal_council_id': record[4], 'postcode' : record[5].split(',')[-1], 'address' : "\n".join(record[5].split(',')[:-1]), } - - def import_polling_stations(self): - import_polling_station_shapefiles(self)
Refactor West Berks to use new BaseShpShpImporter
## Code Before: from data_collection.management.commands import BaseShpImporter, import_polling_station_shapefiles class Command(BaseShpImporter): """ Imports the Polling Station data from Wokingham Council """ council_id = 'E06000037' districts_name = 'polling_districts' stations_name = 'polling_places.shp' def district_record_to_dict(self, record): return { 'internal_council_id': record[0], 'name': record[2], } def station_record_to_dict(self, record): return { 'internal_council_id': record[4], 'postcode' : record[5].split(',')[-1], 'address' : "\n".join(record[5].split(',')[:-1]), } def import_polling_stations(self): import_polling_station_shapefiles(self) ## Instruction: Refactor West Berks to use new BaseShpShpImporter ## Code After: from data_collection.management.commands import BaseShpShpImporter class Command(BaseShpShpImporter): """ Imports the Polling Station data from Wokingham Council """ council_id = 'E06000037' districts_name = 'polling_districts' stations_name = 'polling_places.shp' def district_record_to_dict(self, record): return { 'internal_council_id': record[0], 'name': record[2], } def station_record_to_dict(self, record): return { 'internal_council_id': record[4], 'postcode' : record[5].split(',')[-1], 'address' : "\n".join(record[5].split(',')[:-1]), }
# ... existing code ... from data_collection.management.commands import BaseShpShpImporter class Command(BaseShpShpImporter): """ # ... modified code ... stations_name = 'polling_places.shp' def district_record_to_dict(self, record): return { ... } ... } # ... rest of the code ...
83b290b8d3da89d371ae88057472b838c5433471
cura/Settings/MaterialSettingsVisibilityHandler.py
cura/Settings/MaterialSettingsVisibilityHandler.py
from UM.Settings.Models.SettingVisibilityHandler import SettingVisibilityHandler class MaterialSettingsVisibilityHandler(SettingVisibilityHandler): def __init__(self, parent = None, *args, **kwargs): super().__init__(parent = parent, *args, **kwargs) material_settings = { "default_material_print_temperature", "material_bed_temperature", "material_standby_temperature", "cool_fan_speed", "retraction_amount", "retraction_speed", } self.setVisible(material_settings)
import UM.Settings.Models.SettingVisibilityHandler class MaterialSettingsVisibilityHandler(UM.Settings.Models.SettingVisibilityHandler.SettingVisibilityHandler): def __init__(self, parent = None, *args, **kwargs): super().__init__(parent = parent, *args, **kwargs) material_settings = { "default_material_print_temperature", "material_bed_temperature", "material_standby_temperature", "cool_fan_speed", "retraction_amount", "retraction_speed", } self.setVisible(material_settings)
Use full import path for parent class
Use full import path for parent class Something seems off with the build for some reason. I'm trying to fix it this way.
Python
agpl-3.0
hmflash/Cura,ynotstartups/Wanhao,ynotstartups/Wanhao,hmflash/Cura,Curahelper/Cura,fieldOfView/Cura,fieldOfView/Cura,Curahelper/Cura
- from UM.Settings.Models.SettingVisibilityHandler import SettingVisibilityHandler + import UM.Settings.Models.SettingVisibilityHandler - class MaterialSettingsVisibilityHandler(SettingVisibilityHandler): + class MaterialSettingsVisibilityHandler(UM.Settings.Models.SettingVisibilityHandler.SettingVisibilityHandler): def __init__(self, parent = None, *args, **kwargs): super().__init__(parent = parent, *args, **kwargs) material_settings = { "default_material_print_temperature", "material_bed_temperature", "material_standby_temperature", "cool_fan_speed", "retraction_amount", "retraction_speed", } self.setVisible(material_settings)
Use full import path for parent class
## Code Before: from UM.Settings.Models.SettingVisibilityHandler import SettingVisibilityHandler class MaterialSettingsVisibilityHandler(SettingVisibilityHandler): def __init__(self, parent = None, *args, **kwargs): super().__init__(parent = parent, *args, **kwargs) material_settings = { "default_material_print_temperature", "material_bed_temperature", "material_standby_temperature", "cool_fan_speed", "retraction_amount", "retraction_speed", } self.setVisible(material_settings) ## Instruction: Use full import path for parent class ## Code After: import UM.Settings.Models.SettingVisibilityHandler class MaterialSettingsVisibilityHandler(UM.Settings.Models.SettingVisibilityHandler.SettingVisibilityHandler): def __init__(self, parent = None, *args, **kwargs): super().__init__(parent = parent, *args, **kwargs) material_settings = { "default_material_print_temperature", "material_bed_temperature", "material_standby_temperature", "cool_fan_speed", "retraction_amount", "retraction_speed", } self.setVisible(material_settings)
# ... existing code ... import UM.Settings.Models.SettingVisibilityHandler class MaterialSettingsVisibilityHandler(UM.Settings.Models.SettingVisibilityHandler.SettingVisibilityHandler): def __init__(self, parent = None, *args, **kwargs): # ... rest of the code ...
d67099ce7d30e31b98251f7386b33caaa5199a01
censusreporter/config/prod/wsgi.py
censusreporter/config/prod/wsgi.py
import os from django.core.wsgi import get_wsgi_application import newrelic.agent newrelic.agent.initialize('/var/www-data/censusreporter/conf/newrelic.ini') os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.prod.settings") application = get_wsgi_application()
import os from django.core.wsgi import get_wsgi_application import newrelic.agent newrelic.agent.initialize(os.path.join(os.path.abspath(os.path.dirname(__file__)), '../../../conf/newrelic.ini')) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.prod.settings") application = get_wsgi_application()
Correct location of newrelic config
Correct location of newrelic config
Python
mit
sseguku/simplecensusug,Code4SA/censusreporter,Code4SA/censusreporter,Code4SA/censusreporter,sseguku/simplecensusug,4bic/censusreporter,sseguku/simplecensusug,4bic/censusreporter,Code4SA/censusreporter,4bic/censusreporter
import os from django.core.wsgi import get_wsgi_application import newrelic.agent - newrelic.agent.initialize('/var/www-data/censusreporter/conf/newrelic.ini') + newrelic.agent.initialize(os.path.join(os.path.abspath(os.path.dirname(__file__)), '../../../conf/newrelic.ini')) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.prod.settings") application = get_wsgi_application()
Correct location of newrelic config
## Code Before: import os from django.core.wsgi import get_wsgi_application import newrelic.agent newrelic.agent.initialize('/var/www-data/censusreporter/conf/newrelic.ini') os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.prod.settings") application = get_wsgi_application() ## Instruction: Correct location of newrelic config ## Code After: import os from django.core.wsgi import get_wsgi_application import newrelic.agent newrelic.agent.initialize(os.path.join(os.path.abspath(os.path.dirname(__file__)), '../../../conf/newrelic.ini')) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.prod.settings") application = get_wsgi_application()
... import newrelic.agent newrelic.agent.initialize(os.path.join(os.path.abspath(os.path.dirname(__file__)), '../../../conf/newrelic.ini')) ...
72fcd8f8ec44bf11fa1ed746de188ee4312150c3
apps/sumo/urls.py
apps/sumo/urls.py
from django.conf import settings from django.conf.urls.defaults import patterns, url, include from django.views.generic.simple import redirect_to from sumo import views services_patterns = patterns('', url('^/monitor$', views.monitor, name='sumo.monitor'), url('^/version$', views.version_check, name='sumo.version'), url('^/error$', views.error, name='sumo.error'), ) urlpatterns = patterns('', url(r'^robots.txt$', views.robots, name='robots.txt'), ('^services', include(services_patterns)), url('^locales$', views.locales, name='sumo.locales'), # Shortcuts: url('^contribute/?$', redirect_to, {'url': '/kb/superheroes-wanted', 'permanent': False}), url(r'^windows7-support(?:\\/)?$', redirect_to, {'url': '/home/?as=u', 'permanent': False}), ) if 'django_qunit' in settings.INSTALLED_APPS: urlpatterns += patterns('', url(r'^qunit/(?P<path>.*)', views.kitsune_qunit), url(r'^_qunit/', include('django_qunit.urls')), )
from django.conf import settings from django.conf.urls.defaults import patterns, url, include from django.views.generic.base import RedirectView from sumo import views services_patterns = patterns('', url('^/monitor$', views.monitor, name='sumo.monitor'), url('^/version$', views.version_check, name='sumo.version'), url('^/error$', views.error, name='sumo.error'), ) urlpatterns = patterns('', url(r'^robots.txt$', views.robots, name='robots.txt'), ('^services', include(services_patterns)), url('^locales$', views.locales, name='sumo.locales'), # Shortcuts: url('^contribute/?$', RedirectView.as_view(url='/kb/superheroes-wanted', permanent=False)), url(r'^windows7-support(?:\\/)?$', RedirectView.as_view(url='/home/?as=u', permanent=False)), ) if 'django_qunit' in settings.INSTALLED_APPS: urlpatterns += patterns('', url(r'^qunit/(?P<path>.*)', views.kitsune_qunit), url(r'^_qunit/', include('django_qunit.urls')), )
Switch to class based generic views.
Switch to class based generic views.
Python
bsd-3-clause
feer56/Kitsune1,iDTLabssl/kitsune,silentbob73/kitsune,YOTOV-LIMITED/kitsune,mozilla/kitsune,rlr/kitsune,anushbmx/kitsune,anushbmx/kitsune,iDTLabssl/kitsune,silentbob73/kitsune,silentbob73/kitsune,iDTLabssl/kitsune,brittanystoroz/kitsune,safwanrahman/kitsune,orvi2014/kitsune,feer56/Kitsune2,turtleloveshoes/kitsune,MikkCZ/kitsune,mozilla/kitsune,brittanystoroz/kitsune,safwanrahman/kitsune,anushbmx/kitsune,silentbob73/kitsune,H1ghT0p/kitsune,NewPresident1/kitsune,MziRintu/kitsune,anushbmx/kitsune,NewPresident1/kitsune,mythmon/kitsune,YOTOV-LIMITED/kitsune,chirilo/kitsune,NewPresident1/kitsune,philipp-sumo/kitsune,turtleloveshoes/kitsune,orvi2014/kitsune,mythmon/kitsune,YOTOV-LIMITED/kitsune,Osmose/kitsune,mozilla/kitsune,rlr/kitsune,MikkCZ/kitsune,H1ghT0p/kitsune,brittanystoroz/kitsune,Osmose/kitsune,feer56/Kitsune1,asdofindia/kitsune,MziRintu/kitsune,safwanrahman/linuxdesh,chirilo/kitsune,NewPresident1/kitsune,feer56/Kitsune2,safwanrahman/kitsune,asdofindia/kitsune,orvi2014/kitsune,rlr/kitsune,H1ghT0p/kitsune,dbbhattacharya/kitsune,asdofindia/kitsune,dbbhattacharya/kitsune,mozilla/kitsune,MziRintu/kitsune,brittanystoroz/kitsune,philipp-sumo/kitsune,safwanrahman/linuxdesh,feer56/Kitsune1,MziRintu/kitsune,MikkCZ/kitsune,iDTLabssl/kitsune,turtleloveshoes/kitsune,safwanrahman/kitsune,turtleloveshoes/kitsune,mythmon/kitsune,chirilo/kitsune,safwanrahman/linuxdesh,feer56/Kitsune2,MikkCZ/kitsune,YOTOV-LIMITED/kitsune,mythmon/kitsune,dbbhattacharya/kitsune,Osmose/kitsune,asdofindia/kitsune,Osmose/kitsune,philipp-sumo/kitsune,rlr/kitsune,orvi2014/kitsune,chirilo/kitsune,feer56/Kitsune2,H1ghT0p/kitsune,dbbhattacharya/kitsune
from django.conf import settings from django.conf.urls.defaults import patterns, url, include - from django.views.generic.simple import redirect_to + from django.views.generic.base import RedirectView from sumo import views services_patterns = patterns('', url('^/monitor$', views.monitor, name='sumo.monitor'), url('^/version$', views.version_check, name='sumo.version'), url('^/error$', views.error, name='sumo.error'), ) urlpatterns = patterns('', url(r'^robots.txt$', views.robots, name='robots.txt'), ('^services', include(services_patterns)), url('^locales$', views.locales, name='sumo.locales'), # Shortcuts: - url('^contribute/?$', redirect_to, - {'url': '/kb/superheroes-wanted', 'permanent': False}), + url('^contribute/?$', RedirectView.as_view(url='/kb/superheroes-wanted', + permanent=False)), - url(r'^windows7-support(?:\\/)?$', redirect_to, + url(r'^windows7-support(?:\\/)?$', - {'url': '/home/?as=u', 'permanent': False}), + RedirectView.as_view(url='/home/?as=u', permanent=False)), ) if 'django_qunit' in settings.INSTALLED_APPS: urlpatterns += patterns('', url(r'^qunit/(?P<path>.*)', views.kitsune_qunit), url(r'^_qunit/', include('django_qunit.urls')), )
Switch to class based generic views.
## Code Before: from django.conf import settings from django.conf.urls.defaults import patterns, url, include from django.views.generic.simple import redirect_to from sumo import views services_patterns = patterns('', url('^/monitor$', views.monitor, name='sumo.monitor'), url('^/version$', views.version_check, name='sumo.version'), url('^/error$', views.error, name='sumo.error'), ) urlpatterns = patterns('', url(r'^robots.txt$', views.robots, name='robots.txt'), ('^services', include(services_patterns)), url('^locales$', views.locales, name='sumo.locales'), # Shortcuts: url('^contribute/?$', redirect_to, {'url': '/kb/superheroes-wanted', 'permanent': False}), url(r'^windows7-support(?:\\/)?$', redirect_to, {'url': '/home/?as=u', 'permanent': False}), ) if 'django_qunit' in settings.INSTALLED_APPS: urlpatterns += patterns('', url(r'^qunit/(?P<path>.*)', views.kitsune_qunit), url(r'^_qunit/', include('django_qunit.urls')), ) ## Instruction: Switch to class based generic views. ## Code After: from django.conf import settings from django.conf.urls.defaults import patterns, url, include from django.views.generic.base import RedirectView from sumo import views services_patterns = patterns('', url('^/monitor$', views.monitor, name='sumo.monitor'), url('^/version$', views.version_check, name='sumo.version'), url('^/error$', views.error, name='sumo.error'), ) urlpatterns = patterns('', url(r'^robots.txt$', views.robots, name='robots.txt'), ('^services', include(services_patterns)), url('^locales$', views.locales, name='sumo.locales'), # Shortcuts: url('^contribute/?$', RedirectView.as_view(url='/kb/superheroes-wanted', permanent=False)), url(r'^windows7-support(?:\\/)?$', RedirectView.as_view(url='/home/?as=u', permanent=False)), ) if 'django_qunit' in settings.INSTALLED_APPS: urlpatterns += patterns('', url(r'^qunit/(?P<path>.*)', views.kitsune_qunit), url(r'^_qunit/', include('django_qunit.urls')), )
... from django.conf.urls.defaults import patterns, url, include from django.views.generic.base import RedirectView ... # Shortcuts: url('^contribute/?$', RedirectView.as_view(url='/kb/superheroes-wanted', permanent=False)), url(r'^windows7-support(?:\\/)?$', RedirectView.as_view(url='/home/?as=u', permanent=False)), ) ...
68b01ea3b6d70a991d3ca0f3e6bff08290caa292
packr/home/views.py
packr/home/views.py
from flask import Blueprint, render_template home = Blueprint('home', __name__) @home.route('/', defaults={'path': ''}) @home.route('/<path:path>') def index(path): print('angularhit') return render_template('index.html')
from flask import Blueprint, render_template home = Blueprint('home', __name__) @home.route('/', defaults={'path': ''}) @home.route('/<path:path>') def index(path): return render_template('index.html')
Remove uneccessary 'angularhit' debug printout.
Remove uneccessary 'angularhit' debug printout.
Python
mit
KnightHawk3/packr,KnightHawk3/packr,KnightHawk3/packr,KnightHawk3/packr,KnightHawk3/packr,KnightHawk3/packr
from flask import Blueprint, render_template home = Blueprint('home', __name__) @home.route('/', defaults={'path': ''}) @home.route('/<path:path>') def index(path): - print('angularhit') return render_template('index.html')
Remove uneccessary 'angularhit' debug printout.
## Code Before: from flask import Blueprint, render_template home = Blueprint('home', __name__) @home.route('/', defaults={'path': ''}) @home.route('/<path:path>') def index(path): print('angularhit') return render_template('index.html') ## Instruction: Remove uneccessary 'angularhit' debug printout. ## Code After: from flask import Blueprint, render_template home = Blueprint('home', __name__) @home.route('/', defaults={'path': ''}) @home.route('/<path:path>') def index(path): return render_template('index.html')
// ... existing code ... def index(path): return render_template('index.html') // ... rest of the code ...
0f55195f4461c80e85d132026a70049b36b8cc0b
sub_numbers_lambda/handle.py
sub_numbers_lambda/handle.py
import json import time def lambda_handler(event,context): number_1 = event['key1'] number_2 = event['key2'] return {"number" : abs(number_1 - number_2)}
import json def lambda_handler(event, context): number_1 = int(event['key1']) number_2 = int(event['key2']) return {"number" : abs(number_1 - number_2)}
Add int() function for casting from string to integer
Add int() function for casting from string to integer
Python
mit
OsamaJBR/teach-me-aws-stepfunctions
import json - import time - def lambda_handler(event,context): + def lambda_handler(event, context): - number_1 = event['key1'] + number_1 = int(event['key1']) - number_2 = event['key2'] + number_2 = int(event['key2']) return {"number" : abs(number_1 - number_2)} +
Add int() function for casting from string to integer
## Code Before: import json import time def lambda_handler(event,context): number_1 = event['key1'] number_2 = event['key2'] return {"number" : abs(number_1 - number_2)} ## Instruction: Add int() function for casting from string to integer ## Code After: import json def lambda_handler(event, context): number_1 = int(event['key1']) number_2 = int(event['key2']) return {"number" : abs(number_1 - number_2)}
... import json def lambda_handler(event, context): number_1 = int(event['key1']) number_2 = int(event['key2']) return {"number" : abs(number_1 - number_2)} ...
c87f75334aa7253ad209bdd4d88c2429723e51d1
admin/desk/urls.py
admin/desk/urls.py
from django.conf.urls import url from admin.desk import views urlpatterns = [ url(r'^$', views.DeskCaseList.as_view(), name='cases'), url(r'^customer/(?P<user_id>[a-z0-9]+)/$', views.DeskCustomer.as_view(), name='customer'), url(r'^cases/(?P<user_id>[a-z0-9]+)/$', views.DeskCaseList.as_view(), name='user_cases'), ]
from django.conf.urls import url from admin.desk import views urlpatterns = [ url(r'^customer/(?P<user_id>[a-z0-9]+)/$', views.DeskCustomer.as_view(), name='customer'), url(r'^cases/(?P<user_id>[a-z0-9]+)/$', views.DeskCaseList.as_view(), name='user_cases'), ]
Remove unused bare desk URL
Remove unused bare desk URL
Python
apache-2.0
hmoco/osf.io,aaxelb/osf.io,laurenrevere/osf.io,pattisdr/osf.io,caneruguz/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,chrisseto/osf.io,cwisecarver/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,acshi/osf.io,crcresearch/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,laurenrevere/osf.io,felliott/osf.io,monikagrabowska/osf.io,chennan47/osf.io,pattisdr/osf.io,acshi/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,acshi/osf.io,cwisecarver/osf.io,saradbowman/osf.io,mluo613/osf.io,mfraezz/osf.io,mattclark/osf.io,brianjgeiger/osf.io,adlius/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,icereval/osf.io,chrisseto/osf.io,caneruguz/osf.io,mluo613/osf.io,erinspace/osf.io,aaxelb/osf.io,mattclark/osf.io,cslzchen/osf.io,acshi/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,caseyrollins/osf.io,leb2dg/osf.io,alexschiller/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,alexschiller/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,felliott/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,erinspace/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,chennan47/osf.io,crcresearch/osf.io,icereval/osf.io,acshi/osf.io,caneruguz/osf.io,Nesiehr/osf.io,laurenrevere/osf.io,adlius/osf.io,sloria/osf.io,leb2dg/osf.io,mattclark/osf.io,hmoco/osf.io,chrisseto/osf.io,mfraezz/osf.io,chrisseto/osf.io,TomBaxter/osf.io,alexschiller/osf.io,rdhyee/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,sloria/osf.io,aaxelb/osf.io,Nesiehr/osf.io,felliott/osf.io,pattisdr/osf.io,chennan47/osf.io,mluo613/osf.io,caneruguz/osf.io,binoculars/osf.io,adlius/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,mluo613/osf.io,cslzchen/osf.io,mfraezz/osf.io,hmoco/osf.io,crcresearch/osf.io,hmoco/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,erinspace/osf.io,felliott/osf.io,rdhyee/osf.io,binoculars/osf.io,sloria/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,rdhyee/osf.io,leb2dg/osf.io,cwisecarver/osf.io,HalcyonChimera/osf.io,adlius/osf.io
from django.conf.urls import url from admin.desk import views urlpatterns = [ - url(r'^$', views.DeskCaseList.as_view(), name='cases'), url(r'^customer/(?P<user_id>[a-z0-9]+)/$', views.DeskCustomer.as_view(), name='customer'), url(r'^cases/(?P<user_id>[a-z0-9]+)/$', views.DeskCaseList.as_view(), name='user_cases'), ]
Remove unused bare desk URL
## Code Before: from django.conf.urls import url from admin.desk import views urlpatterns = [ url(r'^$', views.DeskCaseList.as_view(), name='cases'), url(r'^customer/(?P<user_id>[a-z0-9]+)/$', views.DeskCustomer.as_view(), name='customer'), url(r'^cases/(?P<user_id>[a-z0-9]+)/$', views.DeskCaseList.as_view(), name='user_cases'), ] ## Instruction: Remove unused bare desk URL ## Code After: from django.conf.urls import url from admin.desk import views urlpatterns = [ url(r'^customer/(?P<user_id>[a-z0-9]+)/$', views.DeskCustomer.as_view(), name='customer'), url(r'^cases/(?P<user_id>[a-z0-9]+)/$', views.DeskCaseList.as_view(), name='user_cases'), ]
... urlpatterns = [ url(r'^customer/(?P<user_id>[a-z0-9]+)/$', views.DeskCustomer.as_view(), ...
1e6b0b6f53a4508c3e4218345b2ee57d48fbc8d1
flask_app.py
flask_app.py
from flask import abort from flask import Flask from flask_caching import Cache import main app = Flask(__name__) cache = Cache(app, config={'CACHE_TYPE': 'simple'}) @app.route('/') def display_available(): content = ('<html>' + '<head>' + '<title>Restaurant Menu Parser</title>' + '</head>' + '<body>' + '<p><a href="ki">Campus Solna (KI)</a></p>' + '<p><a href="uu">Campus Uppsala (BMC)</a></p>' + '</body>' + '</html>') return content @app.route('/api/restaurants') @cache.cached(timeout=3600) def api_list_restaurants(): return main.list_restaurants() @app.route('/api/restaurant/<name>') @cache.cached(timeout=3600) def api_get_restaurant(name): data = main.get_restaurant(name) if not data: abort(404) return data @app.route('/ki') @cache.cached(timeout=3600) def make_menu_ki(): return main.gen_ki_menu() @app.route('/uu') @cache.cached(timeout=3600) def make_menu_uu(): return main.gen_uu_menu()
import json from flask import abort from flask import Flask from flask_caching import Cache import main app = Flask(__name__) cache = Cache(app, config={'CACHE_TYPE': 'simple'}) @app.route('/') def display_available(): content = ('<html>' + '<head>' + '<title>Restaurant Menu Parser</title>' + '</head>' + '<body>' + '<p><a href="ki">Campus Solna (KI)</a></p>' + '<p><a href="uu">Campus Uppsala (BMC)</a></p>' + '</body>' + '</html>') return content @app.route('/api/restaurants') @cache.cached(timeout=3600) def api_list_restaurants(): return json.dumps(main.list_restaurants()) @app.route('/api/restaurant/<name>') @cache.cached(timeout=3600) def api_get_restaurant(name): data = main.get_restaurant(name) if not data: abort(404) return json.dumps(data) @app.route('/ki') @cache.cached(timeout=3600) def make_menu_ki(): return main.gen_ki_menu() @app.route('/uu') @cache.cached(timeout=3600) def make_menu_uu(): return main.gen_uu_menu()
Return str instead of dict.
Return str instead of dict.
Python
bsd-3-clause
talavis/kimenu
+ import json + from flask import abort from flask import Flask from flask_caching import Cache import main app = Flask(__name__) cache = Cache(app, config={'CACHE_TYPE': 'simple'}) @app.route('/') def display_available(): content = ('<html>' + '<head>' + '<title>Restaurant Menu Parser</title>' + '</head>' + '<body>' + '<p><a href="ki">Campus Solna (KI)</a></p>' + '<p><a href="uu">Campus Uppsala (BMC)</a></p>' + '</body>' + '</html>') return content @app.route('/api/restaurants') @cache.cached(timeout=3600) def api_list_restaurants(): - return main.list_restaurants() + return json.dumps(main.list_restaurants()) @app.route('/api/restaurant/<name>') @cache.cached(timeout=3600) def api_get_restaurant(name): data = main.get_restaurant(name) if not data: abort(404) - return data + return json.dumps(data) @app.route('/ki') @cache.cached(timeout=3600) def make_menu_ki(): return main.gen_ki_menu() @app.route('/uu') @cache.cached(timeout=3600) def make_menu_uu(): return main.gen_uu_menu()
Return str instead of dict.
## Code Before: from flask import abort from flask import Flask from flask_caching import Cache import main app = Flask(__name__) cache = Cache(app, config={'CACHE_TYPE': 'simple'}) @app.route('/') def display_available(): content = ('<html>' + '<head>' + '<title>Restaurant Menu Parser</title>' + '</head>' + '<body>' + '<p><a href="ki">Campus Solna (KI)</a></p>' + '<p><a href="uu">Campus Uppsala (BMC)</a></p>' + '</body>' + '</html>') return content @app.route('/api/restaurants') @cache.cached(timeout=3600) def api_list_restaurants(): return main.list_restaurants() @app.route('/api/restaurant/<name>') @cache.cached(timeout=3600) def api_get_restaurant(name): data = main.get_restaurant(name) if not data: abort(404) return data @app.route('/ki') @cache.cached(timeout=3600) def make_menu_ki(): return main.gen_ki_menu() @app.route('/uu') @cache.cached(timeout=3600) def make_menu_uu(): return main.gen_uu_menu() ## Instruction: Return str instead of dict. ## Code After: import json from flask import abort from flask import Flask from flask_caching import Cache import main app = Flask(__name__) cache = Cache(app, config={'CACHE_TYPE': 'simple'}) @app.route('/') def display_available(): content = ('<html>' + '<head>' + '<title>Restaurant Menu Parser</title>' + '</head>' + '<body>' + '<p><a href="ki">Campus Solna (KI)</a></p>' + '<p><a href="uu">Campus Uppsala (BMC)</a></p>' + '</body>' + '</html>') return content @app.route('/api/restaurants') @cache.cached(timeout=3600) def api_list_restaurants(): return json.dumps(main.list_restaurants()) @app.route('/api/restaurant/<name>') @cache.cached(timeout=3600) def api_get_restaurant(name): data = main.get_restaurant(name) if not data: abort(404) return json.dumps(data) @app.route('/ki') @cache.cached(timeout=3600) def make_menu_ki(): return main.gen_ki_menu() @app.route('/uu') @cache.cached(timeout=3600) def make_menu_uu(): return main.gen_uu_menu()
// ... existing code ... import json from flask import abort // ... modified code ... def api_list_restaurants(): return json.dumps(main.list_restaurants()) ... abort(404) return json.dumps(data) // ... rest of the code ...
874ead2ed9de86eea20c4a67ce7b53cb2766c09e
erpnext/patches/v5_0/link_warehouse_with_account.py
erpnext/patches/v5_0/link_warehouse_with_account.py
from __future__ import unicode_literals import frappe def execute(): frappe.db.sql("""update tabAccount set warehouse=master_name where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")
from __future__ import unicode_literals import frappe def execute(): if "master_name" in frappe.db.get_table_columns("Account"): frappe.db.sql("""update tabAccount set warehouse=master_name where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")
Update warehouse as per master_name if master_name exists
Update warehouse as per master_name if master_name exists
Python
agpl-3.0
indictranstech/fbd_erpnext,gangadharkadam/saloon_erp_install,mbauskar/helpdesk-erpnext,gmarke/erpnext,Tejal011089/paypal_erpnext,Tejal011089/trufil-erpnext,treejames/erpnext,indictranstech/reciphergroup-erpnext,pombredanne/erpnext,gangadharkadam/saloon_erp,gangadharkadam/vlinkerp,hatwar/buyback-erpnext,shft117/SteckerApp,Drooids/erpnext,treejames/erpnext,mbauskar/omnitech-erpnext,susuchina/ERPNEXT,gmarke/erpnext,shft117/SteckerApp,mbauskar/alec_frappe5_erpnext,indictranstech/reciphergroup-erpnext,mbauskar/omnitech-demo-erpnext,Tejal011089/fbd_erpnext,Tejal011089/fbd_erpnext,sheafferusa/erpnext,mbauskar/alec_frappe5_erpnext,fuhongliang/erpnext,geekroot/erpnext,mahabuber/erpnext,hatwar/buyback-erpnext,saurabh6790/test-erp,gangadharkadam/saloon_erp,Tejal011089/osmosis_erpnext,mbauskar/Das_Erpnext,mbauskar/alec_frappe5_erpnext,gangadharkadam/contributionerp,mbauskar/helpdesk-erpnext,meisterkleister/erpnext,indictranstech/fbd_erpnext,SPKian/Testing2,hanselke/erpnext-1,sheafferusa/erpnext,hatwar/Das_erpnext,indictranstech/trufil-erpnext,mbauskar/omnitech-erpnext,anandpdoshi/erpnext,hatwar/buyback-erpnext,Tejal011089/osmosis_erpnext,susuchina/ERPNEXT,gangadharkadam/vlinkerp,mbauskar/helpdesk-erpnext,indictranstech/tele-erpnext,indictranstech/Das_Erpnext,mbauskar/Das_Erpnext,tmimori/erpnext,Aptitudetech/ERPNext,netfirms/erpnext,gangadharkadam/contributionerp,netfirms/erpnext,rohitwaghchaure/GenieManager-erpnext,ShashaQin/erpnext,pombredanne/erpnext,SPKian/Testing,hanselke/erpnext-1,hernad/erpnext,mbauskar/sapphire-erpnext,hernad/erpnext,mahabuber/erpnext,anandpdoshi/erpnext,Tejal011089/osmosis_erpnext,hanselke/erpnext-1,susuchina/ERPNEXT,rohitwaghchaure/erpnext-receipher,indictranstech/reciphergroup-erpnext,MartinEnder/erpnext-de,Tejal011089/huntercamp_erpnext,ThiagoGarciaAlves/erpnext,shft117/SteckerApp,rohitwaghchaure/GenieManager-erpnext,SPKian/Testing2,shitolepriya/test-erp,saurabh6790/test-erp,Drooids/erpnext,njmube/erpnext,pombredanne/erpnext,gsnbng/erpnext,mbauskar/omnitech-erpnext,gangadharkadam/v6_erp,indictranstech/biggift-erpnext,rohitwaghchaure/GenieManager-erpnext,indictranstech/tele-erpnext,ShashaQin/erpnext,gangadharkadam/saloon_erp_install,tmimori/erpnext,Tejal011089/huntercamp_erpnext,Tejal011089/paypal_erpnext,indictranstech/erpnext,njmube/erpnext,mbauskar/Das_Erpnext,ThiagoGarciaAlves/erpnext,hatwar/Das_erpnext,gangadhar-kadam/helpdesk-erpnext,hernad/erpnext,rohitwaghchaure/GenieManager-erpnext,Drooids/erpnext,indictranstech/reciphergroup-erpnext,shitolepriya/test-erp,fuhongliang/erpnext,dieface/erpnext,indictranstech/osmosis-erpnext,gangadharkadam/contributionerp,Tejal011089/huntercamp_erpnext,mahabuber/erpnext,mbauskar/Das_Erpnext,gmarke/erpnext,indictranstech/tele-erpnext,saurabh6790/test-erp,pombredanne/erpnext,Suninus/erpnext,ShashaQin/erpnext,sheafferusa/erpnext,treejames/erpnext,SPKian/Testing,fuhongliang/erpnext,indictranstech/fbd_erpnext,mahabuber/erpnext,gangadharkadam/saloon_erp_install,MartinEnder/erpnext-de,Suninus/erpnext,ThiagoGarciaAlves/erpnext,Tejal011089/trufil-erpnext,MartinEnder/erpnext-de,Suninus/erpnext,indictranstech/erpnext,rohitwaghchaure/erpnext-receipher,mbauskar/helpdesk-erpnext,indictranstech/biggift-erpnext,indictranstech/Das_Erpnext,gangadharkadam/v6_erp,gmarke/erpnext,gsnbng/erpnext,sagar30051991/ozsmart-erp,indictranstech/erpnext,geekroot/erpnext,susuchina/ERPNEXT,netfirms/erpnext,dieface/erpnext,SPKian/Testing,indictranstech/fbd_erpnext,treejames/erpnext,tmimori/erpnext,gangadharkadam/saloon_erp,indictranstech/tele-erpnext,hatwar/Das_erpnext,aruizramon/alec_erpnext,mbauskar/sapphire-erpnext,ThiagoGarciaAlves/erpnext,mbauskar/omnitech-erpnext,Tejal011089/osmosis_erpnext,Tejal011089/huntercamp_erpnext,indictranstech/osmosis-erpnext,fuhongliang/erpnext,gangadharkadam/v6_erp,gangadharkadam/vlinkerp,aruizramon/alec_erpnext,indictranstech/osmosis-erpnext,SPKian/Testing,sheafferusa/erpnext,gangadhar-kadam/helpdesk-erpnext,Tejal011089/trufil-erpnext,indictranstech/trufil-erpnext,anandpdoshi/erpnext,indictranstech/Das_Erpnext,gangadhar-kadam/helpdesk-erpnext,netfirms/erpnext,mbauskar/sapphire-erpnext,gangadharkadam/saloon_erp_install,dieface/erpnext,aruizramon/alec_erpnext,shft117/SteckerApp,sagar30051991/ozsmart-erp,gangadharkadam/vlinkerp,saurabh6790/test-erp,Drooids/erpnext,indictranstech/biggift-erpnext,SPKian/Testing2,Suninus/erpnext,gsnbng/erpnext,indictranstech/trufil-erpnext,mbauskar/sapphire-erpnext,Tejal011089/fbd_erpnext,ShashaQin/erpnext,shitolepriya/test-erp,njmube/erpnext,MartinEnder/erpnext-de,Tejal011089/paypal_erpnext,aruizramon/alec_erpnext,tmimori/erpnext,Tejal011089/fbd_erpnext,indictranstech/trufil-erpnext,mbauskar/omnitech-demo-erpnext,gangadhar-kadam/helpdesk-erpnext,sagar30051991/ozsmart-erp,indictranstech/Das_Erpnext,hatwar/buyback-erpnext,anandpdoshi/erpnext,indictranstech/osmosis-erpnext,rohitwaghchaure/erpnext-receipher,meisterkleister/erpnext,hanselke/erpnext-1,gangadharkadam/saloon_erp,sagar30051991/ozsmart-erp,mbauskar/alec_frappe5_erpnext,SPKian/Testing2,mbauskar/omnitech-demo-erpnext,geekroot/erpnext,njmube/erpnext,meisterkleister/erpnext,hatwar/Das_erpnext,hernad/erpnext,indictranstech/erpnext,gsnbng/erpnext,gangadharkadam/contributionerp,indictranstech/biggift-erpnext,meisterkleister/erpnext,gangadharkadam/v6_erp,shitolepriya/test-erp,geekroot/erpnext,Tejal011089/paypal_erpnext,dieface/erpnext,mbauskar/omnitech-demo-erpnext,Tejal011089/trufil-erpnext,rohitwaghchaure/erpnext-receipher
from __future__ import unicode_literals import frappe def execute(): + if "master_name" in frappe.db.get_table_columns("Account"): - frappe.db.sql("""update tabAccount set warehouse=master_name + frappe.db.sql("""update tabAccount set warehouse=master_name - where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""") + where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")
Update warehouse as per master_name if master_name exists
## Code Before: from __future__ import unicode_literals import frappe def execute(): frappe.db.sql("""update tabAccount set warehouse=master_name where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""") ## Instruction: Update warehouse as per master_name if master_name exists ## Code After: from __future__ import unicode_literals import frappe def execute(): if "master_name" in frappe.db.get_table_columns("Account"): frappe.db.sql("""update tabAccount set warehouse=master_name where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")
// ... existing code ... def execute(): if "master_name" in frappe.db.get_table_columns("Account"): frappe.db.sql("""update tabAccount set warehouse=master_name where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""") // ... rest of the code ...
c8a7a53f09f72d9dbe44b1bcb5b85c8ee5ba2c2c
services/migrations/0012_unit_data_source.py
services/migrations/0012_unit_data_source.py
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('services', '0011_unit_extensions'), ] operations = [ migrations.AddField( model_name='unit', name='data_source', field=models.CharField(null=True, max_length=20), ), ]
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('services', '0011_unit_extensions'), ] operations = [ migrations.AddField( model_name='unit', name='data_source', field=models.CharField(null=True, max_length=20, default='tprek'), preserve_default=False ), ]
Add default to data_source migration.
Add default to data_source migration.
Python
agpl-3.0
City-of-Helsinki/smbackend,City-of-Helsinki/smbackend
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('services', '0011_unit_extensions'), ] operations = [ migrations.AddField( model_name='unit', name='data_source', - field=models.CharField(null=True, max_length=20), + field=models.CharField(null=True, max_length=20, default='tprek'), + preserve_default=False ), ]
Add default to data_source migration.
## Code Before: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('services', '0011_unit_extensions'), ] operations = [ migrations.AddField( model_name='unit', name='data_source', field=models.CharField(null=True, max_length=20), ), ] ## Instruction: Add default to data_source migration. ## Code After: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('services', '0011_unit_extensions'), ] operations = [ migrations.AddField( model_name='unit', name='data_source', field=models.CharField(null=True, max_length=20, default='tprek'), preserve_default=False ), ]
... name='data_source', field=models.CharField(null=True, max_length=20, default='tprek'), preserve_default=False ), ...
9185d882dc5fc7131b90d3b93dff8b6603538a3d
app/cogs/twitch_emotes.py
app/cogs/twitch_emotes.py
from io import BytesIO import requests from discord.ext import commands from discord.ext.commands import Bot TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json' class TwitchEmotes: def __init__(self, bot: Bot): self.bot = bot r = requests.get(TWITCH_EMOTES_API) emote_data = r.json() emote_template = emote_data['template']['small'] emote_ids = {name: info['image_id'] for name, info in emote_data['emotes'].items()} emote_cache = {} @bot.listen('on_message') async def respond(message): if message.author == bot.user: return text = message.content if text in emote_ids: if text not in emote_cache: url = emote_template.replace('{image_id}', str(emote_ids[text])) emote_img = requests.get(url).content emote_cache[text] = emote_img data = BytesIO(emote_cache[text]) filename = '%s.png' % text await bot.send_file(message.channel, data, filename=filename) def setup(bot: Bot): bot.add_cog(TwitchEmotes(bot))
from io import BytesIO import logging import requests from discord.ext import commands from discord.ext.commands import Bot TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json' logger = logging.getLogger(__name__) class TwitchEmotes: def __init__(self, bot: Bot): self.bot = bot r = requests.get(TWITCH_EMOTES_API) emote_data = r.json() emote_template = emote_data['template']['small'] emote_ids = {name: info['image_id'] for name, info in emote_data['emotes'].items()} emote_cache = {} logger.info('Got %d emotes from Twitchemotes.com API' % len(emote_ids)) logger.info('Using template: %s' % emote_template) @bot.listen('on_message') async def respond(message): if message.author == bot.user: return text = message.content if text in emote_ids: if text not in emote_cache: url = emote_template.replace('{image_id}', str(emote_ids[text])) logger.info('Fetching emote %s from %s' % (text, url)) emote_img = requests.get(url).content emote_cache[text] = emote_img data = BytesIO(emote_cache[text]) filename = '%s.png' % text await bot.send_file(message.channel, data, filename=filename) def setup(bot: Bot): bot.add_cog(TwitchEmotes(bot))
Add logging to Twitch emotes module
Add logging to Twitch emotes module
Python
mit
andrewlin16/duckbot,andrewlin16/duckbot
from io import BytesIO + import logging import requests from discord.ext import commands from discord.ext.commands import Bot TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json' + + + logger = logging.getLogger(__name__) class TwitchEmotes: def __init__(self, bot: Bot): self.bot = bot r = requests.get(TWITCH_EMOTES_API) emote_data = r.json() emote_template = emote_data['template']['small'] emote_ids = {name: info['image_id'] for name, info in emote_data['emotes'].items()} emote_cache = {} + logger.info('Got %d emotes from Twitchemotes.com API' % len(emote_ids)) + logger.info('Using template: %s' % emote_template) @bot.listen('on_message') async def respond(message): if message.author == bot.user: return text = message.content if text in emote_ids: if text not in emote_cache: url = emote_template.replace('{image_id}', str(emote_ids[text])) + logger.info('Fetching emote %s from %s' % (text, url)) emote_img = requests.get(url).content emote_cache[text] = emote_img data = BytesIO(emote_cache[text]) filename = '%s.png' % text await bot.send_file(message.channel, data, filename=filename) def setup(bot: Bot): bot.add_cog(TwitchEmotes(bot))
Add logging to Twitch emotes module
## Code Before: from io import BytesIO import requests from discord.ext import commands from discord.ext.commands import Bot TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json' class TwitchEmotes: def __init__(self, bot: Bot): self.bot = bot r = requests.get(TWITCH_EMOTES_API) emote_data = r.json() emote_template = emote_data['template']['small'] emote_ids = {name: info['image_id'] for name, info in emote_data['emotes'].items()} emote_cache = {} @bot.listen('on_message') async def respond(message): if message.author == bot.user: return text = message.content if text in emote_ids: if text not in emote_cache: url = emote_template.replace('{image_id}', str(emote_ids[text])) emote_img = requests.get(url).content emote_cache[text] = emote_img data = BytesIO(emote_cache[text]) filename = '%s.png' % text await bot.send_file(message.channel, data, filename=filename) def setup(bot: Bot): bot.add_cog(TwitchEmotes(bot)) ## Instruction: Add logging to Twitch emotes module ## Code After: from io import BytesIO import logging import requests from discord.ext import commands from discord.ext.commands import Bot TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json' logger = logging.getLogger(__name__) class TwitchEmotes: def __init__(self, bot: Bot): self.bot = bot r = requests.get(TWITCH_EMOTES_API) emote_data = r.json() emote_template = emote_data['template']['small'] emote_ids = {name: info['image_id'] for name, info in emote_data['emotes'].items()} emote_cache = {} logger.info('Got %d emotes from Twitchemotes.com API' % len(emote_ids)) logger.info('Using template: %s' % emote_template) @bot.listen('on_message') async def respond(message): if message.author == bot.user: return text = message.content if text in emote_ids: if text not in emote_cache: url = emote_template.replace('{image_id}', str(emote_ids[text])) logger.info('Fetching emote %s from %s' % (text, url)) emote_img = requests.get(url).content emote_cache[text] = emote_img data = BytesIO(emote_cache[text]) filename = '%s.png' % text await bot.send_file(message.channel, data, filename=filename) def setup(bot: Bot): bot.add_cog(TwitchEmotes(bot))
# ... existing code ... from io import BytesIO import logging import requests # ... modified code ... TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json' logger = logging.getLogger(__name__) ... emote_cache = {} logger.info('Got %d emotes from Twitchemotes.com API' % len(emote_ids)) logger.info('Using template: %s' % emote_template) ... str(emote_ids[text])) logger.info('Fetching emote %s from %s' % (text, url)) # ... rest of the code ...
0983361e6fba5812416d8fb5b695f6b3034bc927
registration/management/commands/cleanupregistration.py
registration/management/commands/cleanupregistration.py
from django.core.management.base import NoArgsCommand from ...models import RegistrationProfile class Command(NoArgsCommand): help = "Delete expired user registrations from the database" def handle_noargs(self, **options): RegistrationProfile.objects.delete_expired_users()
from django.core.management.base import BaseCommand from ...models import RegistrationProfile class Command(BaseCommand): help = "Delete expired user registrations from the database" def handle(self, *args, **options): RegistrationProfile.objects.delete_expired_users()
Fix deprecated class NoArgsCommand class.
Fix deprecated class NoArgsCommand class. Solve the warning RemovedInDjango110Warning: NoArgsCommand class is deprecated and will be removed in Django 1.10. Use BaseCommand instead, which takes no arguments by default.
Python
bsd-3-clause
sergafts/django-registration,timgraham/django-registration,sergafts/django-registration,pando85/django-registration,pando85/django-registration,allo-/django-registration,allo-/django-registration,timgraham/django-registration
- from django.core.management.base import NoArgsCommand + from django.core.management.base import BaseCommand from ...models import RegistrationProfile - class Command(NoArgsCommand): + class Command(BaseCommand): help = "Delete expired user registrations from the database" - def handle_noargs(self, **options): + def handle(self, *args, **options): RegistrationProfile.objects.delete_expired_users()
Fix deprecated class NoArgsCommand class.
## Code Before: from django.core.management.base import NoArgsCommand from ...models import RegistrationProfile class Command(NoArgsCommand): help = "Delete expired user registrations from the database" def handle_noargs(self, **options): RegistrationProfile.objects.delete_expired_users() ## Instruction: Fix deprecated class NoArgsCommand class. ## Code After: from django.core.management.base import BaseCommand from ...models import RegistrationProfile class Command(BaseCommand): help = "Delete expired user registrations from the database" def handle(self, *args, **options): RegistrationProfile.objects.delete_expired_users()
... from django.core.management.base import BaseCommand ... class Command(BaseCommand): help = "Delete expired user registrations from the database" ... def handle(self, *args, **options): RegistrationProfile.objects.delete_expired_users() ...
8cc88e1f6e09e91f2ffc5bbf43b58b2d129a12c9
bnc.py
bnc.py
import nltk.corpus.reader.bnc
import nltk.corpus.reader.bnc import time start_time = time.perf_counter() BNC_data = nltk.corpus.reader.bnc.BNCCorpusReader(root='/home/ubuntu/ug-d/bncbaby/', fileids=r'aca/\w*\.xml', # r'aca/\w*\.xml', # r'[a-z]{3}/\w*\.xml') lazy=False) # found here: https://github.com/nltk/nltk/issues/781 talk about how much more efficient it is time_taken = time.perf_counter() - start_time print('\n|| Successfully loaded the British National Corpus in {:.1f}'.format(time_taken), 'seconds. ||\n')
Load BNC into memory and time process.
Load BNC into memory and time process.
Python
mit
albertomh/ug-dissertation
import nltk.corpus.reader.bnc + import time + + start_time = time.perf_counter() + BNC_data = nltk.corpus.reader.bnc.BNCCorpusReader(root='/home/ubuntu/ug-d/bncbaby/', + fileids=r'aca/\w*\.xml', # r'aca/\w*\.xml', # r'[a-z]{3}/\w*\.xml') + lazy=False) # found here: https://github.com/nltk/nltk/issues/781 talk about how much more efficient it is + time_taken = time.perf_counter() - start_time + print('\n|| Successfully loaded the British National Corpus in {:.1f}'.format(time_taken), 'seconds. ||\n') +
Load BNC into memory and time process.
## Code Before: import nltk.corpus.reader.bnc ## Instruction: Load BNC into memory and time process. ## Code After: import nltk.corpus.reader.bnc import time start_time = time.perf_counter() BNC_data = nltk.corpus.reader.bnc.BNCCorpusReader(root='/home/ubuntu/ug-d/bncbaby/', fileids=r'aca/\w*\.xml', # r'aca/\w*\.xml', # r'[a-z]{3}/\w*\.xml') lazy=False) # found here: https://github.com/nltk/nltk/issues/781 talk about how much more efficient it is time_taken = time.perf_counter() - start_time print('\n|| Successfully loaded the British National Corpus in {:.1f}'.format(time_taken), 'seconds. ||\n')
// ... existing code ... import nltk.corpus.reader.bnc import time start_time = time.perf_counter() BNC_data = nltk.corpus.reader.bnc.BNCCorpusReader(root='/home/ubuntu/ug-d/bncbaby/', fileids=r'aca/\w*\.xml', # r'aca/\w*\.xml', # r'[a-z]{3}/\w*\.xml') lazy=False) # found here: https://github.com/nltk/nltk/issues/781 talk about how much more efficient it is time_taken = time.perf_counter() - start_time print('\n|| Successfully loaded the British National Corpus in {:.1f}'.format(time_taken), 'seconds. ||\n') // ... rest of the code ...
b973a1686f269044e670704b56c07ca79336c29c
mythril/laser/ethereum/strategy/basic.py
mythril/laser/ethereum/strategy/basic.py
class DepthFirstSearchStrategy: def __init__(self, content, max_depth): self.content = content self.max_depth = max_depth def __iter__(self): return self def __next__(self): try: global_state = self.content.pop(0) if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration()
class DepthFirstSearchStrategy: """ Implements a depth first search strategy I.E. Follow one path to a leaf, and then continue to the next one """ def __init__(self, work_list, max_depth): self.work_list = work_list self.max_depth = max_depth def __iter__(self): return self def __next__(self): """ Picks the next state to execute """ try: # This strategies assumes that new states are appended at the end of the work_list # By taking the last element we effectively pick the "newest" states, which amounts to dfs global_state = self.work_list.pop() if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration()
Add documentation and fix pop
Add documentation and fix pop
Python
mit
b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril
+ + class DepthFirstSearchStrategy: - + """ + Implements a depth first search strategy + I.E. Follow one path to a leaf, and then continue to the next one + """ - def __init__(self, content, max_depth): + def __init__(self, work_list, max_depth): - self.content = content + self.work_list = work_list self.max_depth = max_depth def __iter__(self): return self def __next__(self): + """ Picks the next state to execute """ try: + # This strategies assumes that new states are appended at the end of the work_list + # By taking the last element we effectively pick the "newest" states, which amounts to dfs - global_state = self.content.pop(0) + global_state = self.work_list.pop() if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration() +
Add documentation and fix pop
## Code Before: class DepthFirstSearchStrategy: def __init__(self, content, max_depth): self.content = content self.max_depth = max_depth def __iter__(self): return self def __next__(self): try: global_state = self.content.pop(0) if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration() ## Instruction: Add documentation and fix pop ## Code After: class DepthFirstSearchStrategy: """ Implements a depth first search strategy I.E. Follow one path to a leaf, and then continue to the next one """ def __init__(self, work_list, max_depth): self.work_list = work_list self.max_depth = max_depth def __iter__(self): return self def __next__(self): """ Picks the next state to execute """ try: # This strategies assumes that new states are appended at the end of the work_list # By taking the last element we effectively pick the "newest" states, which amounts to dfs global_state = self.work_list.pop() if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration()
// ... existing code ... class DepthFirstSearchStrategy: """ Implements a depth first search strategy I.E. Follow one path to a leaf, and then continue to the next one """ def __init__(self, work_list, max_depth): self.work_list = work_list self.max_depth = max_depth // ... modified code ... def __next__(self): """ Picks the next state to execute """ try: # This strategies assumes that new states are appended at the end of the work_list # By taking the last element we effectively pick the "newest" states, which amounts to dfs global_state = self.work_list.pop() if global_state.mstate.depth >= self.max_depth: ... raise StopIteration() // ... rest of the code ...
5da928fd9b08aeb0028b71535413159da18393b4
comics/sets/forms.py
comics/sets/forms.py
import datetime from django import forms from django.template.defaultfilters import slugify from comics.core.models import Comic from comics.sets.models import Set class NewSetForm(forms.ModelForm): class Meta: model = Set fields = ('name',) def save(self, commit=True): set = super(NewSetForm, self).save(commit=False) set.name = slugify(set.name) set.last_modified = datetime.datetime.now() set.last_loaded = datetime.datetime.now() if commit: set.save() return set class EditSetForm(forms.ModelForm): comics = forms.ModelMultipleChoiceField( Comic.objects.all(), required=False, widget=forms.CheckboxSelectMultiple) add_new_comics = forms.BooleanField( label='Automatically add new comics to the set', required=False) hide_empty_comics = forms.BooleanField( label='Hide comics without matching releases from view', required=False) class Meta: model = Set fields = ('comics', 'add_new_comics', 'hide_empty_comics') def save(self, commit=True): comics_set = super(EditSetForm, self).save(commit=False) comics_set.last_modified = datetime.datetime.now() if commit: comics_set.save() self.save_m2m() return comics_set
import datetime from django import forms from django.template.defaultfilters import slugify from comics.core.models import Comic from comics.sets.models import Set class NewSetForm(forms.ModelForm): class Meta: model = Set fields = ('name',) def save(self, commit=True): set = super(NewSetForm, self).save(commit=False) set.name = slugify(set.name) set.last_modified = datetime.datetime.now() set.last_loaded = datetime.datetime.now() if commit: set.save() return set class EditSetForm(forms.ModelForm): comics = forms.ModelMultipleChoiceField( Comic.objects.filter(active=True), required=False, widget=forms.CheckboxSelectMultiple) add_new_comics = forms.BooleanField( label='Automatically add new comics to the set', required=False) hide_empty_comics = forms.BooleanField( label='Hide comics without matching releases from view', required=False) class Meta: model = Set fields = ('comics', 'add_new_comics', 'hide_empty_comics') def save(self, commit=True): comics_set = super(EditSetForm, self).save(commit=False) comics_set.last_modified = datetime.datetime.now() if commit: comics_set.save() self.save_m2m() return comics_set
Exclude inactive comics from sets editing, effectively throwing them out of the set when saved
Exclude inactive comics from sets editing, effectively throwing them out of the set when saved
Python
agpl-3.0
datagutten/comics,klette/comics,jodal/comics,datagutten/comics,jodal/comics,klette/comics,jodal/comics,datagutten/comics,jodal/comics,klette/comics,datagutten/comics
import datetime from django import forms from django.template.defaultfilters import slugify from comics.core.models import Comic from comics.sets.models import Set class NewSetForm(forms.ModelForm): class Meta: model = Set fields = ('name',) def save(self, commit=True): set = super(NewSetForm, self).save(commit=False) set.name = slugify(set.name) set.last_modified = datetime.datetime.now() set.last_loaded = datetime.datetime.now() if commit: set.save() return set class EditSetForm(forms.ModelForm): comics = forms.ModelMultipleChoiceField( - Comic.objects.all(), + Comic.objects.filter(active=True), required=False, widget=forms.CheckboxSelectMultiple) add_new_comics = forms.BooleanField( label='Automatically add new comics to the set', required=False) hide_empty_comics = forms.BooleanField( label='Hide comics without matching releases from view', required=False) class Meta: model = Set fields = ('comics', 'add_new_comics', 'hide_empty_comics') def save(self, commit=True): comics_set = super(EditSetForm, self).save(commit=False) comics_set.last_modified = datetime.datetime.now() if commit: comics_set.save() self.save_m2m() return comics_set
Exclude inactive comics from sets editing, effectively throwing them out of the set when saved
## Code Before: import datetime from django import forms from django.template.defaultfilters import slugify from comics.core.models import Comic from comics.sets.models import Set class NewSetForm(forms.ModelForm): class Meta: model = Set fields = ('name',) def save(self, commit=True): set = super(NewSetForm, self).save(commit=False) set.name = slugify(set.name) set.last_modified = datetime.datetime.now() set.last_loaded = datetime.datetime.now() if commit: set.save() return set class EditSetForm(forms.ModelForm): comics = forms.ModelMultipleChoiceField( Comic.objects.all(), required=False, widget=forms.CheckboxSelectMultiple) add_new_comics = forms.BooleanField( label='Automatically add new comics to the set', required=False) hide_empty_comics = forms.BooleanField( label='Hide comics without matching releases from view', required=False) class Meta: model = Set fields = ('comics', 'add_new_comics', 'hide_empty_comics') def save(self, commit=True): comics_set = super(EditSetForm, self).save(commit=False) comics_set.last_modified = datetime.datetime.now() if commit: comics_set.save() self.save_m2m() return comics_set ## Instruction: Exclude inactive comics from sets editing, effectively throwing them out of the set when saved ## Code After: import datetime from django import forms from django.template.defaultfilters import slugify from comics.core.models import Comic from comics.sets.models import Set class NewSetForm(forms.ModelForm): class Meta: model = Set fields = ('name',) def save(self, commit=True): set = super(NewSetForm, self).save(commit=False) set.name = slugify(set.name) set.last_modified = datetime.datetime.now() set.last_loaded = datetime.datetime.now() if commit: set.save() return set class EditSetForm(forms.ModelForm): comics = forms.ModelMultipleChoiceField( Comic.objects.filter(active=True), required=False, widget=forms.CheckboxSelectMultiple) add_new_comics = forms.BooleanField( label='Automatically add new comics to the set', required=False) hide_empty_comics = forms.BooleanField( label='Hide comics without matching releases from view', required=False) class Meta: model = Set fields = ('comics', 'add_new_comics', 'hide_empty_comics') def save(self, commit=True): comics_set = super(EditSetForm, self).save(commit=False) comics_set.last_modified = datetime.datetime.now() if commit: comics_set.save() self.save_m2m() return comics_set
... comics = forms.ModelMultipleChoiceField( Comic.objects.filter(active=True), required=False, ...
fe974197217eff350f1dc0bc5687c83066d6dd34
kaggle_tools/features_engineering/dates_engineering.py
kaggle_tools/features_engineering/dates_engineering.py
import pandas as pd def date_features(input_df, datetime_column='tms_gmt'): """ Given a datetime column, extracts useful date information (minute, hour, dow...) """ df = input_df.copy() return (df.set_index(time_column) .assign(minute=lambda df: df.index.minute, hour=lambda df: df.index.hour, day=lambda df: df.index.day, dow=lambda df: df.index.dayofweek, month=lambda df: df.index.month, week=lambda df: df.index.week, woy=lambda df: df.index.weekofyear, year=lambda df: df.index.year))
import pandas as pd import pytz def date_features(input_df, datetime_column='tms_gmt'): """ Given a datetime column, extracts useful date information (minute, hour, dow...) """ df = input_df.copy() return (df.set_index(time_column) .assign(minute=lambda df: df.index.minute, hour=lambda df: df.index.hour, day=lambda df: df.index.day, dow=lambda df: df.index.dayofweek, month=lambda df: df.index.month, week=lambda df: df.index.week, woy=lambda df: df.index.weekofyear, year=lambda df: df.index.year)) def localize_datetime(input_df, timezone='Europe/Paris', datetime_column='tms_gmt'): """ Convert datetime column from UTC to another timezone. """ tmz = pytz.timezone(timezone) df = input_df.copy() return (df.set_index(datetime_column) .tz_localize(pytz.utc) #  UTC time .tz_convert(tmz)) # Timezone time
Add a datetime localization function
Add a datetime localization function
Python
mit
yassineAlouini/kaggle-tools,yassineAlouini/kaggle-tools
import pandas as pd + import pytz def date_features(input_df, datetime_column='tms_gmt'): """ Given a datetime column, extracts useful date information (minute, hour, dow...) """ df = input_df.copy() return (df.set_index(time_column) .assign(minute=lambda df: df.index.minute, hour=lambda df: df.index.hour, day=lambda df: df.index.day, dow=lambda df: df.index.dayofweek, month=lambda df: df.index.month, week=lambda df: df.index.week, woy=lambda df: df.index.weekofyear, year=lambda df: df.index.year)) + + def localize_datetime(input_df, timezone='Europe/Paris', + datetime_column='tms_gmt'): + """ + Convert datetime column from UTC to another timezone. + """ + tmz = pytz.timezone(timezone) + df = input_df.copy() + return (df.set_index(datetime_column) + .tz_localize(pytz.utc) #  UTC time + .tz_convert(tmz)) # Timezone time +
Add a datetime localization function
## Code Before: import pandas as pd def date_features(input_df, datetime_column='tms_gmt'): """ Given a datetime column, extracts useful date information (minute, hour, dow...) """ df = input_df.copy() return (df.set_index(time_column) .assign(minute=lambda df: df.index.minute, hour=lambda df: df.index.hour, day=lambda df: df.index.day, dow=lambda df: df.index.dayofweek, month=lambda df: df.index.month, week=lambda df: df.index.week, woy=lambda df: df.index.weekofyear, year=lambda df: df.index.year)) ## Instruction: Add a datetime localization function ## Code After: import pandas as pd import pytz def date_features(input_df, datetime_column='tms_gmt'): """ Given a datetime column, extracts useful date information (minute, hour, dow...) """ df = input_df.copy() return (df.set_index(time_column) .assign(minute=lambda df: df.index.minute, hour=lambda df: df.index.hour, day=lambda df: df.index.day, dow=lambda df: df.index.dayofweek, month=lambda df: df.index.month, week=lambda df: df.index.week, woy=lambda df: df.index.weekofyear, year=lambda df: df.index.year)) def localize_datetime(input_df, timezone='Europe/Paris', datetime_column='tms_gmt'): """ Convert datetime column from UTC to another timezone. """ tmz = pytz.timezone(timezone) df = input_df.copy() return (df.set_index(datetime_column) .tz_localize(pytz.utc) #  UTC time .tz_convert(tmz)) # Timezone time
// ... existing code ... import pandas as pd import pytz // ... modified code ... year=lambda df: df.index.year)) def localize_datetime(input_df, timezone='Europe/Paris', datetime_column='tms_gmt'): """ Convert datetime column from UTC to another timezone. """ tmz = pytz.timezone(timezone) df = input_df.copy() return (df.set_index(datetime_column) .tz_localize(pytz.utc) #  UTC time .tz_convert(tmz)) # Timezone time // ... rest of the code ...
013154d359570d591f9315b10c738616d9cddb49
loqusdb/build_models/profile_variant.py
loqusdb/build_models/profile_variant.py
import logging import json from loqusdb.models import ProfileVariant from .variant import get_variant_id LOG = logging.getLogger(__name__) def get_maf(variant): """ if ID CAF exists in INFO column, return the allele frequency for the alt allele. The CAF INFO tag from dbSNP is a Comma delimited list of allele frequencies based on 1000Genomes. Args: variant (cyvcf2.Variant) Returns: maf (float): Minor allele frequency """ if not variant.INFO.get('CAF'): return None maf_list = json.loads(variant.INFO.get('CAF')) return maf_list[1] def build_profile_variant(variant): """Returns a ProfileVariant object Args: variant (cyvcf2.Variant) Returns: variant (models.ProfileVariant) """ chrom = variant.CHROM if chrom.startswith(('chr', 'CHR', 'Chr')): chrom = chrom[3:] pos = int(variant.POS) variant_id = get_variant_id(variant) ref = variant.REF alt = variant.ALT[0] maf = get_maf(variant) profile_variant = ProfileVariant( variant_id=variant_id, chrom=chrom, pos=pos, ref=ref, alt=alt, maf=maf, id_column = variant.ID ) return profile_variant
import logging from loqusdb.models import ProfileVariant from .variant import get_variant_id LOG = logging.getLogger(__name__) def get_maf(variant): """ Gets the MAF (minor allele frequency) tag from the info field for the variant. Args: variant (cyvcf2.Variant) Returns: maf (float): Minor allele frequency """ return variant.INFO.get('MAF') def build_profile_variant(variant): """Returns a ProfileVariant object Args: variant (cyvcf2.Variant) Returns: variant (models.ProfileVariant) """ chrom = variant.CHROM if chrom.startswith(('chr', 'CHR', 'Chr')): chrom = chrom[3:] pos = int(variant.POS) variant_id = get_variant_id(variant) ref = variant.REF alt = variant.ALT[0] maf = get_maf(variant) profile_variant = ProfileVariant( variant_id=variant_id, chrom=chrom, pos=pos, ref=ref, alt=alt, maf=maf, id_column = variant.ID ) return profile_variant
Change from CAF to MAF tag when looking for MAF in vcf file
Change from CAF to MAF tag when looking for MAF in vcf file
Python
mit
moonso/loqusdb
import logging - import json from loqusdb.models import ProfileVariant from .variant import get_variant_id LOG = logging.getLogger(__name__) def get_maf(variant): """ + Gets the MAF (minor allele frequency) tag from the info field for the + variant. - if ID CAF exists in INFO column, return the allele frequency for - the alt allele. The CAF INFO tag from dbSNP is a Comma delimited list of - allele frequencies based on 1000Genomes. Args: variant (cyvcf2.Variant) Returns: - maf (float): Minor allele frequency + maf (float): Minor allele frequency """ - if not variant.INFO.get('CAF'): + return variant.INFO.get('MAF') - return None - maf_list = json.loads(variant.INFO.get('CAF')) - return maf_list[1] def build_profile_variant(variant): """Returns a ProfileVariant object Args: variant (cyvcf2.Variant) Returns: variant (models.ProfileVariant) """ chrom = variant.CHROM if chrom.startswith(('chr', 'CHR', 'Chr')): chrom = chrom[3:] pos = int(variant.POS) variant_id = get_variant_id(variant) ref = variant.REF alt = variant.ALT[0] maf = get_maf(variant) profile_variant = ProfileVariant( variant_id=variant_id, chrom=chrom, pos=pos, ref=ref, alt=alt, maf=maf, id_column = variant.ID ) return profile_variant
Change from CAF to MAF tag when looking for MAF in vcf file
## Code Before: import logging import json from loqusdb.models import ProfileVariant from .variant import get_variant_id LOG = logging.getLogger(__name__) def get_maf(variant): """ if ID CAF exists in INFO column, return the allele frequency for the alt allele. The CAF INFO tag from dbSNP is a Comma delimited list of allele frequencies based on 1000Genomes. Args: variant (cyvcf2.Variant) Returns: maf (float): Minor allele frequency """ if not variant.INFO.get('CAF'): return None maf_list = json.loads(variant.INFO.get('CAF')) return maf_list[1] def build_profile_variant(variant): """Returns a ProfileVariant object Args: variant (cyvcf2.Variant) Returns: variant (models.ProfileVariant) """ chrom = variant.CHROM if chrom.startswith(('chr', 'CHR', 'Chr')): chrom = chrom[3:] pos = int(variant.POS) variant_id = get_variant_id(variant) ref = variant.REF alt = variant.ALT[0] maf = get_maf(variant) profile_variant = ProfileVariant( variant_id=variant_id, chrom=chrom, pos=pos, ref=ref, alt=alt, maf=maf, id_column = variant.ID ) return profile_variant ## Instruction: Change from CAF to MAF tag when looking for MAF in vcf file ## Code After: import logging from loqusdb.models import ProfileVariant from .variant import get_variant_id LOG = logging.getLogger(__name__) def get_maf(variant): """ Gets the MAF (minor allele frequency) tag from the info field for the variant. Args: variant (cyvcf2.Variant) Returns: maf (float): Minor allele frequency """ return variant.INFO.get('MAF') def build_profile_variant(variant): """Returns a ProfileVariant object Args: variant (cyvcf2.Variant) Returns: variant (models.ProfileVariant) """ chrom = variant.CHROM if chrom.startswith(('chr', 'CHR', 'Chr')): chrom = chrom[3:] pos = int(variant.POS) variant_id = get_variant_id(variant) ref = variant.REF alt = variant.ALT[0] maf = get_maf(variant) profile_variant = ProfileVariant( variant_id=variant_id, chrom=chrom, pos=pos, ref=ref, alt=alt, maf=maf, id_column = variant.ID ) return profile_variant
// ... existing code ... import logging // ... modified code ... """ Gets the MAF (minor allele frequency) tag from the info field for the variant. ... Returns: maf (float): Minor allele frequency ... return variant.INFO.get('MAF') // ... rest of the code ...
5f1fa23dd8e0850a9f0e6a054ec6738e5a174ff7
database/tables.py
database/tables.py
from sqlalchemy import MetaData, Table, Column, Integer, String METADATA = MetaData() Table("quote", METADATA, Column("qid", Integer, primary_key=True), Column("text", String, nullable=False))
from sqlalchemy import MetaData, Table, Column, Integer, String METADATA = MetaData() Table("quote", METADATA, Column("qid", Integer, primary_key=True), Column("text", String, nullable=False)) Table("moderator", METADATA, Column("stream", String, primary_key=True), Column("name", String, primary_key=True))
Add a table for caching moderators
Add a table for caching moderators
Python
mit
pyrige/pump19
from sqlalchemy import MetaData, Table, Column, Integer, String METADATA = MetaData() Table("quote", METADATA, Column("qid", Integer, primary_key=True), Column("text", String, nullable=False)) + Table("moderator", METADATA, + Column("stream", String, primary_key=True), + Column("name", String, primary_key=True)) +
Add a table for caching moderators
## Code Before: from sqlalchemy import MetaData, Table, Column, Integer, String METADATA = MetaData() Table("quote", METADATA, Column("qid", Integer, primary_key=True), Column("text", String, nullable=False)) ## Instruction: Add a table for caching moderators ## Code After: from sqlalchemy import MetaData, Table, Column, Integer, String METADATA = MetaData() Table("quote", METADATA, Column("qid", Integer, primary_key=True), Column("text", String, nullable=False)) Table("moderator", METADATA, Column("stream", String, primary_key=True), Column("name", String, primary_key=True))
... Column("text", String, nullable=False)) Table("moderator", METADATA, Column("stream", String, primary_key=True), Column("name", String, primary_key=True)) ...
526b1028925a59957e805b29fc624dae318661ef
finances/models.py
finances/models.py
import os import hashlib import datetime import peewee database = peewee.Proxy() class BaseModel(peewee.Model): class Meta: database = database class User(BaseModel): id = peewee.IntegerField(primary_key=True) name = peewee.CharField(unique=True) password = peewee.CharField() salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace')) join_date = peewee.DateTimeField(default=datetime.datetime.now) class AuthError(Exception): pass class RegisterError(Exception): pass @classmethod def auth(cls, name, password): user = User.get(name=name) pass_with_salt = password + user.salt pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest() if not pass_hash == user.password: raise cls.AuthError('Wrong password!') return user @classmethod def register(cls, name, password): try: User.get(name=name) raise cls.RegisterError('User with that name does exist') except User.DoesNotExist: pass user = User(name=name) pass_with_salt = password + user.salt user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest() user.save()
import os import hashlib import datetime import peewee database = peewee.Proxy() class BaseModel(peewee.Model): class Meta: database = database class User(BaseModel): id = peewee.IntegerField(primary_key=True) name = peewee.CharField(unique=True) password = peewee.CharField() salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace')) join_date = peewee.DateTimeField(default=datetime.datetime.now) class AuthError(Exception): pass class RegisterError(Exception): pass @classmethod def auth(cls, name, password): user = User.get(name=name) pass_with_salt = password + user.salt pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest() if not pass_hash == user.password: raise cls.AuthError('Wrong password!') return user @classmethod def register(cls, name, password): try: User.get(name=name) raise cls.RegisterError('User with that name does exist') except User.DoesNotExist: pass user = User(name=name) pass_with_salt = password + user.salt user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest() user.save() def __repr__(self): return '<User %r>' % self.username
Add __repr__ for User model
Add __repr__ for User model
Python
mit
Afonasev/YourFinances
import os import hashlib import datetime import peewee database = peewee.Proxy() class BaseModel(peewee.Model): class Meta: database = database class User(BaseModel): id = peewee.IntegerField(primary_key=True) name = peewee.CharField(unique=True) password = peewee.CharField() salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace')) join_date = peewee.DateTimeField(default=datetime.datetime.now) class AuthError(Exception): pass class RegisterError(Exception): pass @classmethod def auth(cls, name, password): user = User.get(name=name) pass_with_salt = password + user.salt pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest() if not pass_hash == user.password: raise cls.AuthError('Wrong password!') return user @classmethod def register(cls, name, password): try: User.get(name=name) raise cls.RegisterError('User with that name does exist') except User.DoesNotExist: pass user = User(name=name) pass_with_salt = password + user.salt user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest() user.save() + def __repr__(self): + return '<User %r>' % self.username +
Add __repr__ for User model
## Code Before: import os import hashlib import datetime import peewee database = peewee.Proxy() class BaseModel(peewee.Model): class Meta: database = database class User(BaseModel): id = peewee.IntegerField(primary_key=True) name = peewee.CharField(unique=True) password = peewee.CharField() salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace')) join_date = peewee.DateTimeField(default=datetime.datetime.now) class AuthError(Exception): pass class RegisterError(Exception): pass @classmethod def auth(cls, name, password): user = User.get(name=name) pass_with_salt = password + user.salt pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest() if not pass_hash == user.password: raise cls.AuthError('Wrong password!') return user @classmethod def register(cls, name, password): try: User.get(name=name) raise cls.RegisterError('User with that name does exist') except User.DoesNotExist: pass user = User(name=name) pass_with_salt = password + user.salt user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest() user.save() ## Instruction: Add __repr__ for User model ## Code After: import os import hashlib import datetime import peewee database = peewee.Proxy() class BaseModel(peewee.Model): class Meta: database = database class User(BaseModel): id = peewee.IntegerField(primary_key=True) name = peewee.CharField(unique=True) password = peewee.CharField() salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace')) join_date = peewee.DateTimeField(default=datetime.datetime.now) class AuthError(Exception): pass class RegisterError(Exception): pass @classmethod def auth(cls, name, password): user = User.get(name=name) pass_with_salt = password + user.salt pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest() if not pass_hash == user.password: raise cls.AuthError('Wrong password!') return user @classmethod def register(cls, name, password): try: User.get(name=name) raise cls.RegisterError('User with that name does exist') except User.DoesNotExist: pass user = User(name=name) pass_with_salt = password + user.salt user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest() user.save() def __repr__(self): return '<User %r>' % self.username
... user.save() def __repr__(self): return '<User %r>' % self.username ...
d2b4ec50442a00df85ef525cc82aca971b72eb86
erpnext/patches/v11_0/rename_field_max_days_allowed.py
erpnext/patches/v11_0/rename_field_max_days_allowed.py
import frappe from frappe.model.utils.rename_field import rename_field def execute(): frappe.reload_doc("hr", "doctype", "leave_type") frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""") rename_field("Leave Type", "max_days_allowed", "max_continuous_days_allowed")
import frappe def execute(): frappe.db.sql(""" UPDATE `tabLeave Type` SET max_days_allowed = '0' WHERE trim(coalesce(max_days_allowed, '')) = '' """) frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""")
Set null values to '0' before changing column type
[fix] Set null values to '0' before changing column type
Python
agpl-3.0
gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext
import frappe - from frappe.model.utils.rename_field import rename_field def execute(): - frappe.reload_doc("hr", "doctype", "leave_type") + frappe.db.sql(""" + UPDATE `tabLeave Type` + SET max_days_allowed = '0' + WHERE trim(coalesce(max_days_allowed, '')) = '' + """) frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""") - rename_field("Leave Type", "max_days_allowed", "max_continuous_days_allowed") +
Set null values to '0' before changing column type
## Code Before: import frappe from frappe.model.utils.rename_field import rename_field def execute(): frappe.reload_doc("hr", "doctype", "leave_type") frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""") rename_field("Leave Type", "max_days_allowed", "max_continuous_days_allowed") ## Instruction: Set null values to '0' before changing column type ## Code After: import frappe def execute(): frappe.db.sql(""" UPDATE `tabLeave Type` SET max_days_allowed = '0' WHERE trim(coalesce(max_days_allowed, '')) = '' """) frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""")
# ... existing code ... import frappe # ... modified code ... def execute(): frappe.db.sql(""" UPDATE `tabLeave Type` SET max_days_allowed = '0' WHERE trim(coalesce(max_days_allowed, '')) = '' """) frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""") # ... rest of the code ...
6be3a40010b7256cb5b8fadfe4ef40b6c5691a06
jungle/session.py
jungle/session.py
import boto3 def create_session(profile_name): if not profile_name: return boto3 else: return boto3.Session(profile_name=profile_name)
import sys import boto3 import botocore import click def create_session(profile_name): if profile_name is None: return boto3 else: try: session = boto3.Session(profile_name=profile_name) return session except botocore.exceptions.ProfileNotFound as e: click.echo("Invalid profile name: {0}".format(profile_name, e), err=True) sys.exit(2)
Add error message when wrong AWS Profile Name is given
Add error message when wrong AWS Profile Name is given
Python
mit
achiku/jungle
+ import sys + import boto3 + import botocore + import click def create_session(profile_name): - if not profile_name: + if profile_name is None: return boto3 else: + try: - return boto3.Session(profile_name=profile_name) + session = boto3.Session(profile_name=profile_name) + return session + except botocore.exceptions.ProfileNotFound as e: + click.echo("Invalid profile name: {0}".format(profile_name, e), err=True) + sys.exit(2)
Add error message when wrong AWS Profile Name is given
## Code Before: import boto3 def create_session(profile_name): if not profile_name: return boto3 else: return boto3.Session(profile_name=profile_name) ## Instruction: Add error message when wrong AWS Profile Name is given ## Code After: import sys import boto3 import botocore import click def create_session(profile_name): if profile_name is None: return boto3 else: try: session = boto3.Session(profile_name=profile_name) return session except botocore.exceptions.ProfileNotFound as e: click.echo("Invalid profile name: {0}".format(profile_name, e), err=True) sys.exit(2)
... import sys import boto3 import botocore import click ... def create_session(profile_name): if profile_name is None: return boto3 ... else: try: session = boto3.Session(profile_name=profile_name) return session except botocore.exceptions.ProfileNotFound as e: click.echo("Invalid profile name: {0}".format(profile_name, e), err=True) sys.exit(2) ...
dbe7bfdba6392cb2cc5c8d0e710682c2cb9c2bc5
cellom2tif/filetypes.py
cellom2tif/filetypes.py
def is_cellomics_image(fn): """Determine whether a file is a Cellomics image. Parameters ---------- fn : string The filename of the file in question. Returns ------- is_cellom : bool True if the filename points to a Cellomics image. """ is_cellom = fn.endswith('.C01') or fn.endswith('.c01') return is_cellom def is_cellomics_mask(fn): """Determine whether a file is a Cellomics mask image. Parameters ---------- fn : string The filename. Returns ------- is_mask : bool True if the filename points to a Cellomics mask image. """ is_mask = fn.endswith('o1.C01') or fn.endswith('o1.c01') return is_mask
import os def fn_has_ext(fn, ext, case_sensitive=False): """ Determine whether a file has a particular extension. Parameters ---------- fn : string The filename of the query file. ext : string The extension being checked. case_sensitive : bool Whether or not to treat the extension as case sensitive. Returns ------- file_has_ext : bool True if the filename has the specified extension. """ fn_ext = os.path.splitext(fn)[1][1:] if case_sensitive: file_has_ext = fn_ext == ext else: file_has_ext = fn_ext.lower() == ext.lower() return file_has_ext def is_cellomics_image(fn): """Determine whether a file is a Cellomics image. Parameters ---------- fn : string The filename of the file in question. Returns ------- is_cellom : bool True if the filename points to a Cellomics image. """ is_cellom = fn_has_ext(fn, 'C01') or fn_has_ext(fn, 'DIB') return is_cellom def is_cellomics_mask(fn): """Determine whether a file is a Cellomics mask image. Parameters ---------- fn : string The filename. Returns ------- is_mask : bool True if the filename points to a Cellomics mask image. """ is_mask = fn.endswith('o1.C01') or fn.endswith('o1.c01') return is_mask
Add DIB files to cellomics file filter
Add DIB files to cellomics file filter
Python
bsd-3-clause
jni/cellom2tif
+ import os + + + def fn_has_ext(fn, ext, case_sensitive=False): + """ + Determine whether a file has a particular extension. + + Parameters + ---------- + fn : string + The filename of the query file. + ext : string + The extension being checked. + case_sensitive : bool + Whether or not to treat the extension as case sensitive. + + Returns + ------- + file_has_ext : bool + True if the filename has the specified extension. + """ + fn_ext = os.path.splitext(fn)[1][1:] + if case_sensitive: + file_has_ext = fn_ext == ext + else: + file_has_ext = fn_ext.lower() == ext.lower() + return file_has_ext + + def is_cellomics_image(fn): """Determine whether a file is a Cellomics image. Parameters ---------- fn : string The filename of the file in question. Returns ------- is_cellom : bool True if the filename points to a Cellomics image. """ - is_cellom = fn.endswith('.C01') or fn.endswith('.c01') + is_cellom = fn_has_ext(fn, 'C01') or fn_has_ext(fn, 'DIB') return is_cellom def is_cellomics_mask(fn): """Determine whether a file is a Cellomics mask image. Parameters ---------- fn : string The filename. Returns ------- is_mask : bool True if the filename points to a Cellomics mask image. """ is_mask = fn.endswith('o1.C01') or fn.endswith('o1.c01') return is_mask
Add DIB files to cellomics file filter
## Code Before: def is_cellomics_image(fn): """Determine whether a file is a Cellomics image. Parameters ---------- fn : string The filename of the file in question. Returns ------- is_cellom : bool True if the filename points to a Cellomics image. """ is_cellom = fn.endswith('.C01') or fn.endswith('.c01') return is_cellom def is_cellomics_mask(fn): """Determine whether a file is a Cellomics mask image. Parameters ---------- fn : string The filename. Returns ------- is_mask : bool True if the filename points to a Cellomics mask image. """ is_mask = fn.endswith('o1.C01') or fn.endswith('o1.c01') return is_mask ## Instruction: Add DIB files to cellomics file filter ## Code After: import os def fn_has_ext(fn, ext, case_sensitive=False): """ Determine whether a file has a particular extension. Parameters ---------- fn : string The filename of the query file. ext : string The extension being checked. case_sensitive : bool Whether or not to treat the extension as case sensitive. Returns ------- file_has_ext : bool True if the filename has the specified extension. """ fn_ext = os.path.splitext(fn)[1][1:] if case_sensitive: file_has_ext = fn_ext == ext else: file_has_ext = fn_ext.lower() == ext.lower() return file_has_ext def is_cellomics_image(fn): """Determine whether a file is a Cellomics image. Parameters ---------- fn : string The filename of the file in question. Returns ------- is_cellom : bool True if the filename points to a Cellomics image. """ is_cellom = fn_has_ext(fn, 'C01') or fn_has_ext(fn, 'DIB') return is_cellom def is_cellomics_mask(fn): """Determine whether a file is a Cellomics mask image. Parameters ---------- fn : string The filename. Returns ------- is_mask : bool True if the filename points to a Cellomics mask image. """ is_mask = fn.endswith('o1.C01') or fn.endswith('o1.c01') return is_mask
... import os def fn_has_ext(fn, ext, case_sensitive=False): """ Determine whether a file has a particular extension. Parameters ---------- fn : string The filename of the query file. ext : string The extension being checked. case_sensitive : bool Whether or not to treat the extension as case sensitive. Returns ------- file_has_ext : bool True if the filename has the specified extension. """ fn_ext = os.path.splitext(fn)[1][1:] if case_sensitive: file_has_ext = fn_ext == ext else: file_has_ext = fn_ext.lower() == ext.lower() return file_has_ext def is_cellomics_image(fn): ... """ is_cellom = fn_has_ext(fn, 'C01') or fn_has_ext(fn, 'DIB') return is_cellom ...
1ba3536e214e283f503db0a9bf0d1ac4aa64f771
tcconfig/_tc_command_helper.py
tcconfig/_tc_command_helper.py
from __future__ import absolute_import, unicode_literals import errno import sys import subprocrunner as spr from ._common import find_bin_path from ._const import Tc, TcSubCommand from ._error import NetworkInterfaceNotFoundError from ._logger import logger def check_tc_command_installation(): try: spr.Which("tc").verify() except spr.CommandNotFoundError as e: logger.error("{:s}: {}".format(e.__class__.__name__, e)) sys.exit(errno.ENOENT) def get_tc_base_command(tc_subcommand): if tc_subcommand not in TcSubCommand: raise ValueError("the argument must be a TcSubCommand value") return "{:s} {:s}".format(find_bin_path("tc"), tc_subcommand.value) def run_tc_show(subcommand, device): from ._network import verify_network_interface verify_network_interface(device) runner = spr.SubprocessRunner( "{:s} show dev {:s}".format(get_tc_base_command(subcommand), device)) if runner.run() != 0 and runner.stderr.find("Cannot find device") != -1: # reach here if the device does not exist at the system and netiface # not installed. raise NetworkInterfaceNotFoundError(device=device) return runner.stdout
from __future__ import absolute_import, unicode_literals import errno import sys import subprocrunner as spr from ._common import find_bin_path from ._const import Tc, TcSubCommand from ._error import NetworkInterfaceNotFoundError from ._logger import logger def check_tc_command_installation(): if find_bin_path("tc"): return logger.error("command not found: tc") sys.exit(errno.ENOENT) def get_tc_base_command(tc_subcommand): if tc_subcommand not in TcSubCommand: raise ValueError("the argument must be a TcSubCommand value") return "{:s} {:s}".format(find_bin_path("tc"), tc_subcommand.value) def run_tc_show(subcommand, device): from ._network import verify_network_interface verify_network_interface(device) runner = spr.SubprocessRunner( "{:s} show dev {:s}".format(get_tc_base_command(subcommand), device)) if runner.run() != 0 and runner.stderr.find("Cannot find device") != -1: # reach here if the device does not exist at the system and netiface # not installed. raise NetworkInterfaceNotFoundError(device=device) return runner.stdout
Change command installation check process
Change command installation check process To properly check even if the user is not root.
Python
mit
thombashi/tcconfig,thombashi/tcconfig
from __future__ import absolute_import, unicode_literals import errno import sys import subprocrunner as spr from ._common import find_bin_path from ._const import Tc, TcSubCommand from ._error import NetworkInterfaceNotFoundError from ._logger import logger def check_tc_command_installation(): - try: - spr.Which("tc").verify() - except spr.CommandNotFoundError as e: - logger.error("{:s}: {}".format(e.__class__.__name__, e)) + if find_bin_path("tc"): + return + + logger.error("command not found: tc") - sys.exit(errno.ENOENT) + sys.exit(errno.ENOENT) def get_tc_base_command(tc_subcommand): if tc_subcommand not in TcSubCommand: raise ValueError("the argument must be a TcSubCommand value") return "{:s} {:s}".format(find_bin_path("tc"), tc_subcommand.value) def run_tc_show(subcommand, device): from ._network import verify_network_interface verify_network_interface(device) runner = spr.SubprocessRunner( "{:s} show dev {:s}".format(get_tc_base_command(subcommand), device)) if runner.run() != 0 and runner.stderr.find("Cannot find device") != -1: # reach here if the device does not exist at the system and netiface # not installed. raise NetworkInterfaceNotFoundError(device=device) return runner.stdout
Change command installation check process
## Code Before: from __future__ import absolute_import, unicode_literals import errno import sys import subprocrunner as spr from ._common import find_bin_path from ._const import Tc, TcSubCommand from ._error import NetworkInterfaceNotFoundError from ._logger import logger def check_tc_command_installation(): try: spr.Which("tc").verify() except spr.CommandNotFoundError as e: logger.error("{:s}: {}".format(e.__class__.__name__, e)) sys.exit(errno.ENOENT) def get_tc_base_command(tc_subcommand): if tc_subcommand not in TcSubCommand: raise ValueError("the argument must be a TcSubCommand value") return "{:s} {:s}".format(find_bin_path("tc"), tc_subcommand.value) def run_tc_show(subcommand, device): from ._network import verify_network_interface verify_network_interface(device) runner = spr.SubprocessRunner( "{:s} show dev {:s}".format(get_tc_base_command(subcommand), device)) if runner.run() != 0 and runner.stderr.find("Cannot find device") != -1: # reach here if the device does not exist at the system and netiface # not installed. raise NetworkInterfaceNotFoundError(device=device) return runner.stdout ## Instruction: Change command installation check process ## Code After: from __future__ import absolute_import, unicode_literals import errno import sys import subprocrunner as spr from ._common import find_bin_path from ._const import Tc, TcSubCommand from ._error import NetworkInterfaceNotFoundError from ._logger import logger def check_tc_command_installation(): if find_bin_path("tc"): return logger.error("command not found: tc") sys.exit(errno.ENOENT) def get_tc_base_command(tc_subcommand): if tc_subcommand not in TcSubCommand: raise ValueError("the argument must be a TcSubCommand value") return "{:s} {:s}".format(find_bin_path("tc"), tc_subcommand.value) def run_tc_show(subcommand, device): from ._network import verify_network_interface verify_network_interface(device) runner = spr.SubprocessRunner( "{:s} show dev {:s}".format(get_tc_base_command(subcommand), device)) if runner.run() != 0 and runner.stderr.find("Cannot find device") != -1: # reach here if the device does not exist at the system and netiface # not installed. raise NetworkInterfaceNotFoundError(device=device) return runner.stdout
... def check_tc_command_installation(): if find_bin_path("tc"): return logger.error("command not found: tc") sys.exit(errno.ENOENT) ...
00ef4db967b00c5cef5c72d5266327bbd9db5909
ibmcnx/test/loadFunction.py
ibmcnx/test/loadFunction.py
import sys from java.lang import String from java.util import HashSet from java.util import HashMap import java import lotusConnectionsCommonAdmin globdict = globals() def loadFilesService(): global globdict exec open("filesAdmin.py").read()
import sys from java.lang import String from java.util import HashSet from java.util import HashMap import java import lotusConnectionsCommonAdmin globdict = globals() locdict = locals() def loadFilesService(): global globdict global locdict execfile("filesAdmin.py",globdict,locdict)
Customize scripts to work with menu
Customize scripts to work with menu
Python
apache-2.0
stoeps13/ibmcnx2,stoeps13/ibmcnx2
import sys from java.lang import String from java.util import HashSet from java.util import HashMap import java import lotusConnectionsCommonAdmin globdict = globals() + locdict = locals() def loadFilesService(): global globdict - exec open("filesAdmin.py").read() + global locdict + execfile("filesAdmin.py",globdict,locdict)
Customize scripts to work with menu
## Code Before: import sys from java.lang import String from java.util import HashSet from java.util import HashMap import java import lotusConnectionsCommonAdmin globdict = globals() def loadFilesService(): global globdict exec open("filesAdmin.py").read() ## Instruction: Customize scripts to work with menu ## Code After: import sys from java.lang import String from java.util import HashSet from java.util import HashMap import java import lotusConnectionsCommonAdmin globdict = globals() locdict = locals() def loadFilesService(): global globdict global locdict execfile("filesAdmin.py",globdict,locdict)
... globdict = globals() locdict = locals() ... global globdict global locdict execfile("filesAdmin.py",globdict,locdict) ...
2a57e5c17115e9c89936e6667985af1a47bf3247
raiden/utils/typing.py
raiden/utils/typing.py
from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import from typing import NewType T_Address = bytes Address = NewType('Address', bytes) T_BlockExpiration = int BlockExpiration = NewType('BlockExpiration', int) T_BlockNumber = int BlockNumber = NewType('BlockNumber', int) T_BlockTimeout = int BlockTimeout = NewType('BlockNumber', int) T_ChannelID = T_Address ChannelID = NewType('ChannelID', Address) T_Keccak256 = bytes Keccak256 = NewType('Keccak256', bytes) T_Secret = bytes Secret = NewType('Secret', bytes) T_Signature = bytes Signature = NewType('Signature', bytes) T_TokenAmount = int TokenAmount = NewType('TokenAmount', int)
from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import from typing import NewType T_Address = bytes Address = NewType('Address', bytes) T_BlockExpiration = int BlockExpiration = NewType('BlockExpiration', int) T_BlockNumber = int BlockNumber = NewType('BlockNumber', int) T_BlockTimeout = int BlockTimeout = NewType('BlockNumber', int) T_ChannelID = bytes ChannelID = NewType('ChannelID', bytes) T_Keccak256 = bytes Keccak256 = NewType('Keccak256', bytes) T_Secret = bytes Secret = NewType('Secret', bytes) T_Signature = bytes Signature = NewType('Signature', bytes) T_TokenAmount = int TokenAmount = NewType('TokenAmount', int)
Fix an oversight in new type definitions
Fix an oversight in new type definitions
Python
mit
hackaugusto/raiden,hackaugusto/raiden
from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import from typing import NewType T_Address = bytes Address = NewType('Address', bytes) T_BlockExpiration = int BlockExpiration = NewType('BlockExpiration', int) T_BlockNumber = int BlockNumber = NewType('BlockNumber', int) T_BlockTimeout = int BlockTimeout = NewType('BlockNumber', int) - T_ChannelID = T_Address + T_ChannelID = bytes - ChannelID = NewType('ChannelID', Address) + ChannelID = NewType('ChannelID', bytes) T_Keccak256 = bytes Keccak256 = NewType('Keccak256', bytes) T_Secret = bytes Secret = NewType('Secret', bytes) T_Signature = bytes Signature = NewType('Signature', bytes) T_TokenAmount = int TokenAmount = NewType('TokenAmount', int)
Fix an oversight in new type definitions
## Code Before: from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import from typing import NewType T_Address = bytes Address = NewType('Address', bytes) T_BlockExpiration = int BlockExpiration = NewType('BlockExpiration', int) T_BlockNumber = int BlockNumber = NewType('BlockNumber', int) T_BlockTimeout = int BlockTimeout = NewType('BlockNumber', int) T_ChannelID = T_Address ChannelID = NewType('ChannelID', Address) T_Keccak256 = bytes Keccak256 = NewType('Keccak256', bytes) T_Secret = bytes Secret = NewType('Secret', bytes) T_Signature = bytes Signature = NewType('Signature', bytes) T_TokenAmount = int TokenAmount = NewType('TokenAmount', int) ## Instruction: Fix an oversight in new type definitions ## Code After: from typing import * # NOQA pylint:disable=wildcard-import,unused-wildcard-import from typing import NewType T_Address = bytes Address = NewType('Address', bytes) T_BlockExpiration = int BlockExpiration = NewType('BlockExpiration', int) T_BlockNumber = int BlockNumber = NewType('BlockNumber', int) T_BlockTimeout = int BlockTimeout = NewType('BlockNumber', int) T_ChannelID = bytes ChannelID = NewType('ChannelID', bytes) T_Keccak256 = bytes Keccak256 = NewType('Keccak256', bytes) T_Secret = bytes Secret = NewType('Secret', bytes) T_Signature = bytes Signature = NewType('Signature', bytes) T_TokenAmount = int TokenAmount = NewType('TokenAmount', int)
# ... existing code ... T_ChannelID = bytes ChannelID = NewType('ChannelID', bytes) # ... rest of the code ...
e5f662d9cebe4133705eca74a300c325d432ad04
anvil/components/cinder_client.py
anvil/components/cinder_client.py
from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) def _filter_pip_requires_line(self, line): if line.lower().find('keystoneclient') != -1: return None if line.lower().find('novaclient') != -1: return None if line.lower().find('glanceclient') != -1: return None return line class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs)
from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs)
Remove destruction of pips/test requires entries that don't exist.
Remove destruction of pips/test requires entries that don't exist.
Python
apache-2.0
stackforge/anvil,stackforge/anvil,mc2014/anvil,mc2014/anvil
from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) - def _filter_pip_requires_line(self, line): - if line.lower().find('keystoneclient') != -1: - return None - if line.lower().find('novaclient') != -1: - return None - if line.lower().find('glanceclient') != -1: - return None - return line - class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs)
Remove destruction of pips/test requires entries that don't exist.
## Code Before: from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) def _filter_pip_requires_line(self, line): if line.lower().find('keystoneclient') != -1: return None if line.lower().find('novaclient') != -1: return None if line.lower().find('glanceclient') != -1: return None return line class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs) ## Instruction: Remove destruction of pips/test requires entries that don't exist. ## Code After: from anvil import components as comp class CinderClientUninstaller(comp.PythonUninstallComponent): def __init__(self, *args, **kargs): comp.PythonUninstallComponent.__init__(self, *args, **kargs) class CinderClientInstaller(comp.PythonInstallComponent): def __init__(self, *args, **kargs): comp.PythonInstallComponent.__init__(self, *args, **kargs) class CinderClientRuntime(comp.EmptyRuntime): def __init__(self, *args, **kargs): comp.EmptyRuntime.__init__(self, *args, **kargs)
... ...
69e6db7a4a28ff1f50bd4f12f550a2b65f05eb38
utils/dusk/__init__.py
utils/dusk/__init__.py
from .context import Context # NOQA from .command import * # NOQA from .command_holder import CommandHolder # NOQA from .constants import * # NOQA
from .context import Context # NOQA from .command import * # NOQA from .command_holder import CommandHolder # NOQA from .constants import * # NOQA __version__ = "1.0.0"
Remove obsolete TODO and add version
Remove obsolete TODO and add version
Python
mit
awau/Amethyst,HexadecimalPython/Xeili
from .context import Context # NOQA from .command import * # NOQA from .command_holder import CommandHolder # NOQA from .constants import * # NOQA + __version__ = "1.0.0"
Remove obsolete TODO and add version
## Code Before: from .context import Context # NOQA from .command import * # NOQA from .command_holder import CommandHolder # NOQA from .constants import * # NOQA ## Instruction: Remove obsolete TODO and add version ## Code After: from .context import Context # NOQA from .command import * # NOQA from .command_holder import CommandHolder # NOQA from .constants import * # NOQA __version__ = "1.0.0"
// ... existing code ... from .constants import * # NOQA __version__ = "1.0.0" // ... rest of the code ...
fd4dc4bdd32283b67577630c38624d3df705efd3
mathphys/functions.py
mathphys/functions.py
"""Useful functions.""" import numpy as _np def polyfit(x, y, monomials, algorithm='lstsq'): """Implement Custom polyfit.""" X = _np.zeros((len(x), len(monomials))) N = _np.zeros((len(x), len(monomials))) for i in range(X.shape[1]): X[:, i] = x N[:, i] = monomials[i] XN = X ** N y_ = _np.zeros((len(y), 1)) y_[:, 0] = y XNt = _np.transpose(XN) b = _np.dot(XNt, y_) X = _np.dot(XNt, XN) if algorithm is 'lstsq': r = _np.linalg.lstsq(X, b) coeffs = r[0][:, 0] else: r = _np.linalg.solve(X, b) coeffs = r[:, 0] # finds maximum diff and its base value y_fitted = _np.dot(XN, coeffs) y_diff = abs(y_fitted - y_[:, 0]) max_error = max(y_diff) idx = [i for i, value in enumerate(y_diff) if value == max_error] base_value = y_[idx[0], 0] return (coeffs, (max_error, base_value))
"""Useful functions.""" import numpy as _np def polyfit(x, y, monomials): """Implement Custom polyfit.""" coef = _np.polynomial.polynomial.polyfit(x, y, deg=monomials) # finds maximum diff and its base value y_fitted = _np.polynomial.polynomial.polyval(x, coef) y_diff = abs(y_fitted - y) idx = _np.argmax(y_diff) coeffs = coef[monomials] return (coeffs, (y_diff[idx], y[idx]))
Change implementaton of polyfit method.
API: Change implementaton of polyfit method. Use new numpy.polynomial.polynomial.polyfit instead of implementing leastsquares by hand. This method is supposed to be more robust to numerical errors. With this change, the keyword argument algorithm was removed.
Python
mit
lnls-fac/mathphys
"""Useful functions.""" import numpy as _np - def polyfit(x, y, monomials, algorithm='lstsq'): + def polyfit(x, y, monomials): """Implement Custom polyfit.""" + coef = _np.polynomial.polynomial.polyfit(x, y, deg=monomials) - X = _np.zeros((len(x), len(monomials))) - N = _np.zeros((len(x), len(monomials))) - for i in range(X.shape[1]): - X[:, i] = x - N[:, i] = monomials[i] - XN = X ** N - y_ = _np.zeros((len(y), 1)) - y_[:, 0] = y - XNt = _np.transpose(XN) - b = _np.dot(XNt, y_) - X = _np.dot(XNt, XN) - - if algorithm is 'lstsq': - r = _np.linalg.lstsq(X, b) - coeffs = r[0][:, 0] - else: - r = _np.linalg.solve(X, b) - coeffs = r[:, 0] # finds maximum diff and its base value - y_fitted = _np.dot(XN, coeffs) + y_fitted = _np.polynomial.polynomial.polyval(x, coef) - y_diff = abs(y_fitted - y_[:, 0]) + y_diff = abs(y_fitted - y) + idx = _np.argmax(y_diff) - max_error = max(y_diff) - idx = [i for i, value in enumerate(y_diff) if value == max_error] - base_value = y_[idx[0], 0] - return (coeffs, (max_error, base_value)) + coeffs = coef[monomials] + return (coeffs, (y_diff[idx], y[idx]))
Change implementaton of polyfit method.
## Code Before: """Useful functions.""" import numpy as _np def polyfit(x, y, monomials, algorithm='lstsq'): """Implement Custom polyfit.""" X = _np.zeros((len(x), len(monomials))) N = _np.zeros((len(x), len(monomials))) for i in range(X.shape[1]): X[:, i] = x N[:, i] = monomials[i] XN = X ** N y_ = _np.zeros((len(y), 1)) y_[:, 0] = y XNt = _np.transpose(XN) b = _np.dot(XNt, y_) X = _np.dot(XNt, XN) if algorithm is 'lstsq': r = _np.linalg.lstsq(X, b) coeffs = r[0][:, 0] else: r = _np.linalg.solve(X, b) coeffs = r[:, 0] # finds maximum diff and its base value y_fitted = _np.dot(XN, coeffs) y_diff = abs(y_fitted - y_[:, 0]) max_error = max(y_diff) idx = [i for i, value in enumerate(y_diff) if value == max_error] base_value = y_[idx[0], 0] return (coeffs, (max_error, base_value)) ## Instruction: Change implementaton of polyfit method. ## Code After: """Useful functions.""" import numpy as _np def polyfit(x, y, monomials): """Implement Custom polyfit.""" coef = _np.polynomial.polynomial.polyfit(x, y, deg=monomials) # finds maximum diff and its base value y_fitted = _np.polynomial.polynomial.polyval(x, coef) y_diff = abs(y_fitted - y) idx = _np.argmax(y_diff) coeffs = coef[monomials] return (coeffs, (y_diff[idx], y[idx]))
... def polyfit(x, y, monomials): """Implement Custom polyfit.""" coef = _np.polynomial.polynomial.polyfit(x, y, deg=monomials) ... # finds maximum diff and its base value y_fitted = _np.polynomial.polynomial.polyval(x, coef) y_diff = abs(y_fitted - y) idx = _np.argmax(y_diff) coeffs = coef[monomials] return (coeffs, (y_diff[idx], y[idx])) ...
8a44705413d3a01e897d4a922e7c1383b60a2927
plugins/VersionUpgrade/VersionUpgrade21to22/__init__.py
plugins/VersionUpgrade/VersionUpgrade21to22/__init__.py
from . import VersionUpgrade21to22 from UM.i18n import i18nCatalog catalog = i18nCatalog("cura") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "Version Upgrade 2.1 to 2.2"), "author": "Ultimaker", "version": "1.0", "description": catalog.i18nc("@info:whatsthis", "Upgrades configurations from Cura 2.1 to Cura 2.2."), "api": 2 }, "version_upgrade": { "profile": { "from": 1, "to": 2 }, "machine_instance": { "from": 1, "to": 2 } } } def register(app): return { "version_upgrade": VersionUpgrade21to22.VersionUpgrade21to22() }
from . import VersionUpgrade21to22 from UM.i18n import i18nCatalog catalog = i18nCatalog("cura") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "Version Upgrade 2.1 to 2.2"), "author": "Ultimaker", "version": "1.0", "description": catalog.i18nc("@info:whatsthis", "Upgrades configurations from Cura 2.1 to Cura 2.2."), "api": 2 }, "version_upgrade": { # From To Upgrade function ("profile", 1): ("instance_container", 2, VersionUpgrade21to22.upgradeProfile), ("machine_instance", 1): ("container_stack", 2, VersionUpgrade21to22.upgradeMachineInstance), ("preferences", 1): ("preferences", 2, VersionUpgrade21to22.upgradePreferences) }, "sources": { "profile": { "get_version": VersionUpgrade21to22.getCfgVersion, "location": {"./profiles"} }, "machine_instance": { "get_version": VersionUpgrade21to22.getCfgVersion, "location": {"./machine_instances"} }, "preferences": { "get_version": VersionUpgrade21to22.getCfgVersion, "location": {"."} } } } def register(app): return { "version_upgrade": VersionUpgrade21to22.VersionUpgrade21to22() }
Update metadata with dynamic config types
Update metadata with dynamic config types After settings rework, we decided to make the upgrade plug-ins define their own configuration types. This is basically the definition for these configuration types. Only the get_version function is not yet implemented. Contributes to issue CURA-844.
Python
agpl-3.0
Curahelper/Cura,ynotstartups/Wanhao,hmflash/Cura,totalretribution/Cura,hmflash/Cura,Curahelper/Cura,senttech/Cura,fieldOfView/Cura,senttech/Cura,fieldOfView/Cura,totalretribution/Cura,ynotstartups/Wanhao
from . import VersionUpgrade21to22 from UM.i18n import i18nCatalog catalog = i18nCatalog("cura") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "Version Upgrade 2.1 to 2.2"), "author": "Ultimaker", "version": "1.0", "description": catalog.i18nc("@info:whatsthis", "Upgrades configurations from Cura 2.1 to Cura 2.2."), "api": 2 }, "version_upgrade": { + # From To Upgrade function + ("profile", 1): ("instance_container", 2, VersionUpgrade21to22.upgradeProfile), + ("machine_instance", 1): ("container_stack", 2, VersionUpgrade21to22.upgradeMachineInstance), + ("preferences", 1): ("preferences", 2, VersionUpgrade21to22.upgradePreferences) + }, + "sources": { "profile": { - "from": 1, - "to": 2 + "get_version": VersionUpgrade21to22.getCfgVersion, + "location": {"./profiles"} }, "machine_instance": { - "from": 1, + "get_version": VersionUpgrade21to22.getCfgVersion, + "location": {"./machine_instances"} + }, + "preferences": { + "get_version": VersionUpgrade21to22.getCfgVersion, - "to": 2 + "location": {"."} } } } def register(app): return { "version_upgrade": VersionUpgrade21to22.VersionUpgrade21to22() }
Update metadata with dynamic config types
## Code Before: from . import VersionUpgrade21to22 from UM.i18n import i18nCatalog catalog = i18nCatalog("cura") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "Version Upgrade 2.1 to 2.2"), "author": "Ultimaker", "version": "1.0", "description": catalog.i18nc("@info:whatsthis", "Upgrades configurations from Cura 2.1 to Cura 2.2."), "api": 2 }, "version_upgrade": { "profile": { "from": 1, "to": 2 }, "machine_instance": { "from": 1, "to": 2 } } } def register(app): return { "version_upgrade": VersionUpgrade21to22.VersionUpgrade21to22() } ## Instruction: Update metadata with dynamic config types ## Code After: from . import VersionUpgrade21to22 from UM.i18n import i18nCatalog catalog = i18nCatalog("cura") def getMetaData(): return { "plugin": { "name": catalog.i18nc("@label", "Version Upgrade 2.1 to 2.2"), "author": "Ultimaker", "version": "1.0", "description": catalog.i18nc("@info:whatsthis", "Upgrades configurations from Cura 2.1 to Cura 2.2."), "api": 2 }, "version_upgrade": { # From To Upgrade function ("profile", 1): ("instance_container", 2, VersionUpgrade21to22.upgradeProfile), ("machine_instance", 1): ("container_stack", 2, VersionUpgrade21to22.upgradeMachineInstance), ("preferences", 1): ("preferences", 2, VersionUpgrade21to22.upgradePreferences) }, "sources": { "profile": { "get_version": VersionUpgrade21to22.getCfgVersion, "location": {"./profiles"} }, "machine_instance": { "get_version": VersionUpgrade21to22.getCfgVersion, "location": {"./machine_instances"} }, "preferences": { "get_version": VersionUpgrade21to22.getCfgVersion, "location": {"."} } } } def register(app): return { "version_upgrade": VersionUpgrade21to22.VersionUpgrade21to22() }
# ... existing code ... "version_upgrade": { # From To Upgrade function ("profile", 1): ("instance_container", 2, VersionUpgrade21to22.upgradeProfile), ("machine_instance", 1): ("container_stack", 2, VersionUpgrade21to22.upgradeMachineInstance), ("preferences", 1): ("preferences", 2, VersionUpgrade21to22.upgradePreferences) }, "sources": { "profile": { "get_version": VersionUpgrade21to22.getCfgVersion, "location": {"./profiles"} }, # ... modified code ... "machine_instance": { "get_version": VersionUpgrade21to22.getCfgVersion, "location": {"./machine_instances"} }, "preferences": { "get_version": VersionUpgrade21to22.getCfgVersion, "location": {"."} } # ... rest of the code ...
77e9d92e040b60cc5e894a59ecfde0a91a8f1f8c
coop_cms/apps/email_auth/forms.py
coop_cms/apps/email_auth/forms.py
from django import forms from django.contrib.auth import authenticate from django.utils.translation import ugettext as _ class EmailAuthForm(forms.Form): email = forms.EmailField(required=True, label=_(u"Email")) password = forms.CharField(label=_("Password"), widget=forms.PasswordInput) def __init__(self, request=None, *args, **kwargs): super(EmailAuthForm, self).__init__(*args, **kwargs) def _authenticate(self): email = self.cleaned_data.get('email') password = self.cleaned_data.get('password') error_messages = { 'invalid_login': _("Please enter a correct %(email)s and password. " "Note that both fields may be case-sensitive."), } if email and password: self.user_cache = authenticate(email=email, password=password) if self.user_cache is None: raise forms.ValidationError( error_messages['invalid_login'], code='invalid_login', params={'email': _(u"email")}, ) def get_user(self): return self.user_cache def clean(self): self._authenticate() return self.cleaned_data
from django import forms from django.contrib.auth import authenticate from django.utils.translation import ugettext as _, ugettext_lazy as __ class EmailAuthForm(forms.Form): email = forms.EmailField(required=True, label=__(u"Email")) password = forms.CharField(label=__("Password"), widget=forms.PasswordInput) def __init__(self, request=None, *args, **kwargs): super(EmailAuthForm, self).__init__(*args, **kwargs) def _authenticate(self): email = self.cleaned_data.get('email') password = self.cleaned_data.get('password') error_messages = { 'invalid_login': _("Please enter a correct %(email)s and password. " "Note that both fields may be case-sensitive."), } if email and password: self.user_cache = authenticate(email=email, password=password) if self.user_cache is None: raise forms.ValidationError( error_messages['invalid_login'], code='invalid_login', params={'email': _(u"email")}, ) def get_user(self): return self.user_cache def clean(self): self._authenticate() return self.cleaned_data
Fix translation issue on EmailAuthForm
Fix translation issue on EmailAuthForm
Python
bsd-3-clause
ljean/coop_cms,ljean/coop_cms,ljean/coop_cms
from django import forms from django.contrib.auth import authenticate - from django.utils.translation import ugettext as _ + from django.utils.translation import ugettext as _, ugettext_lazy as __ class EmailAuthForm(forms.Form): - email = forms.EmailField(required=True, label=_(u"Email")) + email = forms.EmailField(required=True, label=__(u"Email")) - password = forms.CharField(label=_("Password"), widget=forms.PasswordInput) + password = forms.CharField(label=__("Password"), widget=forms.PasswordInput) def __init__(self, request=None, *args, **kwargs): super(EmailAuthForm, self).__init__(*args, **kwargs) def _authenticate(self): email = self.cleaned_data.get('email') password = self.cleaned_data.get('password') error_messages = { 'invalid_login': _("Please enter a correct %(email)s and password. " "Note that both fields may be case-sensitive."), } if email and password: self.user_cache = authenticate(email=email, password=password) if self.user_cache is None: raise forms.ValidationError( error_messages['invalid_login'], code='invalid_login', params={'email': _(u"email")}, ) def get_user(self): return self.user_cache def clean(self): self._authenticate() return self.cleaned_data
Fix translation issue on EmailAuthForm
## Code Before: from django import forms from django.contrib.auth import authenticate from django.utils.translation import ugettext as _ class EmailAuthForm(forms.Form): email = forms.EmailField(required=True, label=_(u"Email")) password = forms.CharField(label=_("Password"), widget=forms.PasswordInput) def __init__(self, request=None, *args, **kwargs): super(EmailAuthForm, self).__init__(*args, **kwargs) def _authenticate(self): email = self.cleaned_data.get('email') password = self.cleaned_data.get('password') error_messages = { 'invalid_login': _("Please enter a correct %(email)s and password. " "Note that both fields may be case-sensitive."), } if email and password: self.user_cache = authenticate(email=email, password=password) if self.user_cache is None: raise forms.ValidationError( error_messages['invalid_login'], code='invalid_login', params={'email': _(u"email")}, ) def get_user(self): return self.user_cache def clean(self): self._authenticate() return self.cleaned_data ## Instruction: Fix translation issue on EmailAuthForm ## Code After: from django import forms from django.contrib.auth import authenticate from django.utils.translation import ugettext as _, ugettext_lazy as __ class EmailAuthForm(forms.Form): email = forms.EmailField(required=True, label=__(u"Email")) password = forms.CharField(label=__("Password"), widget=forms.PasswordInput) def __init__(self, request=None, *args, **kwargs): super(EmailAuthForm, self).__init__(*args, **kwargs) def _authenticate(self): email = self.cleaned_data.get('email') password = self.cleaned_data.get('password') error_messages = { 'invalid_login': _("Please enter a correct %(email)s and password. " "Note that both fields may be case-sensitive."), } if email and password: self.user_cache = authenticate(email=email, password=password) if self.user_cache is None: raise forms.ValidationError( error_messages['invalid_login'], code='invalid_login', params={'email': _(u"email")}, ) def get_user(self): return self.user_cache def clean(self): self._authenticate() return self.cleaned_data
... from django.contrib.auth import authenticate from django.utils.translation import ugettext as _, ugettext_lazy as __ ... class EmailAuthForm(forms.Form): email = forms.EmailField(required=True, label=__(u"Email")) password = forms.CharField(label=__("Password"), widget=forms.PasswordInput) ...
8207d86b7b2a6e1f81454eefea4784d89c8674a8
resolver_test/django_test.py
resolver_test/django_test.py
from urlparse import urljoin from mock import Mock from resolver_test import ResolverTestMixins import django from django.conf import settings from django.contrib.auth.models import AnonymousUser, User from django.http import HttpRequest class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins): maxDiff = None class ResolverViewTestCase(ResolverDjangoTestCase): def setUp(self): self.user = User(username='cherie') self.user.save() self.request = HttpRequest() self.request.session = Mock() self.request.user = self.user self.client.force_login(self.user) def assert_login_required(self, view_to_call): self.owner = self.request.user = AnonymousUser() self.request.get_full_path = lambda: "my_path" self.request.build_absolute_uri = lambda: "my_path" response = view_to_call() self.assertEquals(response.status_code, 302) self.assertEquals( response['Location'], urljoin(settings.LOGIN_URL, '?next=my_path') )
from urlparse import urljoin from mock import Mock from resolver_test import ResolverTestMixins import django from django.conf import settings from django.contrib.auth.models import AnonymousUser, User from django.http import HttpRequest class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins): maxDiff = None usernumber = 0 class ResolverViewTestCase(ResolverDjangoTestCase): def setUp(self): global usernumber self.user = User.objects.create(username='cherie{}'.format(usernumber)) usernumber += 1 self.request = HttpRequest() self.request.session = Mock() self.request.user = self.user self.client.force_login(self.user) def assert_login_required(self, view_to_call): self.owner = self.request.user = AnonymousUser() self.request.get_full_path = lambda: "my_path" self.request.build_absolute_uri = lambda: "my_path" response = view_to_call() self.assertEquals(response.status_code, 302) self.assertEquals( response['Location'], urljoin(settings.LOGIN_URL, '?next=my_path') )
Use different usernames for each test. by: Glenn, Giles
Use different usernames for each test. by: Glenn, Giles
Python
mit
pythonanywhere/resolver_test
from urlparse import urljoin from mock import Mock from resolver_test import ResolverTestMixins import django from django.conf import settings from django.contrib.auth.models import AnonymousUser, User from django.http import HttpRequest class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins): maxDiff = None + + usernumber = 0 + class ResolverViewTestCase(ResolverDjangoTestCase): def setUp(self): - self.user = User(username='cherie') - self.user.save() + global usernumber + self.user = User.objects.create(username='cherie{}'.format(usernumber)) + usernumber += 1 self.request = HttpRequest() self.request.session = Mock() self.request.user = self.user self.client.force_login(self.user) def assert_login_required(self, view_to_call): self.owner = self.request.user = AnonymousUser() self.request.get_full_path = lambda: "my_path" self.request.build_absolute_uri = lambda: "my_path" response = view_to_call() self.assertEquals(response.status_code, 302) self.assertEquals( response['Location'], urljoin(settings.LOGIN_URL, '?next=my_path') )
Use different usernames for each test. by: Glenn, Giles
## Code Before: from urlparse import urljoin from mock import Mock from resolver_test import ResolverTestMixins import django from django.conf import settings from django.contrib.auth.models import AnonymousUser, User from django.http import HttpRequest class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins): maxDiff = None class ResolverViewTestCase(ResolverDjangoTestCase): def setUp(self): self.user = User(username='cherie') self.user.save() self.request = HttpRequest() self.request.session = Mock() self.request.user = self.user self.client.force_login(self.user) def assert_login_required(self, view_to_call): self.owner = self.request.user = AnonymousUser() self.request.get_full_path = lambda: "my_path" self.request.build_absolute_uri = lambda: "my_path" response = view_to_call() self.assertEquals(response.status_code, 302) self.assertEquals( response['Location'], urljoin(settings.LOGIN_URL, '?next=my_path') ) ## Instruction: Use different usernames for each test. by: Glenn, Giles ## Code After: from urlparse import urljoin from mock import Mock from resolver_test import ResolverTestMixins import django from django.conf import settings from django.contrib.auth.models import AnonymousUser, User from django.http import HttpRequest class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins): maxDiff = None usernumber = 0 class ResolverViewTestCase(ResolverDjangoTestCase): def setUp(self): global usernumber self.user = User.objects.create(username='cherie{}'.format(usernumber)) usernumber += 1 self.request = HttpRequest() self.request.session = Mock() self.request.user = self.user self.client.force_login(self.user) def assert_login_required(self, view_to_call): self.owner = self.request.user = AnonymousUser() self.request.get_full_path = lambda: "my_path" self.request.build_absolute_uri = lambda: "my_path" response = view_to_call() self.assertEquals(response.status_code, 302) self.assertEquals( response['Location'], urljoin(settings.LOGIN_URL, '?next=my_path') )
... usernumber = 0 class ResolverViewTestCase(ResolverDjangoTestCase): ... def setUp(self): global usernumber self.user = User.objects.create(username='cherie{}'.format(usernumber)) usernumber += 1 ...
fd061738d025b5371c1415a1f5466bcf5f6476b7
py2deb/config/__init__.py
py2deb/config/__init__.py
import os config_dir = os.path.dirname(os.path.abspath(__file__)) # Destination of built packages. PKG_REPO = '/tmp/'
import os config_dir = os.path.dirname(os.path.abspath(__file__)) # Destination of built packages. if os.getuid() == 0: PKG_REPO = '/var/repos/deb-repo/repository/pl-py2deb' else: PKG_REPO = '/tmp'
Make it work out of the box on the build-server and locally
Make it work out of the box on the build-server and locally
Python
mit
paylogic/py2deb,paylogic/py2deb
import os config_dir = os.path.dirname(os.path.abspath(__file__)) # Destination of built packages. + if os.getuid() == 0: + PKG_REPO = '/var/repos/deb-repo/repository/pl-py2deb' + else: - PKG_REPO = '/tmp/' + PKG_REPO = '/tmp'
Make it work out of the box on the build-server and locally
## Code Before: import os config_dir = os.path.dirname(os.path.abspath(__file__)) # Destination of built packages. PKG_REPO = '/tmp/' ## Instruction: Make it work out of the box on the build-server and locally ## Code After: import os config_dir = os.path.dirname(os.path.abspath(__file__)) # Destination of built packages. if os.getuid() == 0: PKG_REPO = '/var/repos/deb-repo/repository/pl-py2deb' else: PKG_REPO = '/tmp'
# ... existing code ... # Destination of built packages. if os.getuid() == 0: PKG_REPO = '/var/repos/deb-repo/repository/pl-py2deb' else: PKG_REPO = '/tmp' # ... rest of the code ...
ebbc68da19755097b2131d60bc9757ecb4dc6d4c
bundles/auth/models/token.py
bundles/auth/models/token.py
import hashlib import random import string from ext.aboard.model import * def set_value(token): """Randomly create and return a value.""" value = str(token.user) + "_" + str(token.timestamp) len_rand = random.randint(20, 40) to_pick = string.digits + string.ascii_letters + \ "_-+^$" for i in range(len_rand): value += random.choice(to_pick) print("Private value", value) # Hash the value hashed = hashlib.sha512(value.encode()) value = hashed.hexdigest() print("Public value", value) return value class Token(Model): """A token model.""" id = None user = Integer() timestamp = Integer() value = String(pkey=True, default=set_value)
import hashlib import random import string from ext.aboard.model import * class Token(Model): """A token model.""" id = None user = Integer() timestamp = Integer() value = String(pkey=True) def __init__(self, user=None, timestamp=None): value = None if user and timestamp: value = Token.get_token_value(user, timestamp) Model.__init__(self, user=user, timestamp=timestamp, value=value) @staticmethod def get_token_value(user, timestamp): """Randomly create and return a token value.""" value = str(user) + "_" + str(timestamp) len_rand = random.randint(20, 40) to_pick = string.digits + string.ascii_letters + \ "_-+^$" for i in range(len_rand): value += random.choice(to_pick) print("Private value", value) # Hash the value hashed = hashlib.sha512(value.encode()) value = hashed.hexdigest() print("Public value", value) return value
Use the Model constructor to generate a default value
[user] Use the Model constructor to generate a default value
Python
bsd-3-clause
v-legoff/pa-poc2,v-legoff/pa-poc2
import hashlib import random import string from ext.aboard.model import * - - def set_value(token): - """Randomly create and return a value.""" - value = str(token.user) + "_" + str(token.timestamp) - len_rand = random.randint(20, 40) - to_pick = string.digits + string.ascii_letters + \ - "_-+^$" - for i in range(len_rand): - value += random.choice(to_pick) - - print("Private value", value) - # Hash the value - hashed = hashlib.sha512(value.encode()) - value = hashed.hexdigest() - print("Public value", value) - return value class Token(Model): """A token model.""" id = None user = Integer() timestamp = Integer() - value = String(pkey=True, default=set_value) + value = String(pkey=True) + + def __init__(self, user=None, timestamp=None): + value = None + if user and timestamp: + value = Token.get_token_value(user, timestamp) + + Model.__init__(self, user=user, timestamp=timestamp, value=value) + + @staticmethod + def get_token_value(user, timestamp): + """Randomly create and return a token value.""" + value = str(user) + "_" + str(timestamp) + len_rand = random.randint(20, 40) + to_pick = string.digits + string.ascii_letters + \ + "_-+^$" + for i in range(len_rand): + value += random.choice(to_pick) + + print("Private value", value) + + # Hash the value + hashed = hashlib.sha512(value.encode()) + value = hashed.hexdigest() + print("Public value", value) + return value
Use the Model constructor to generate a default value
## Code Before: import hashlib import random import string from ext.aboard.model import * def set_value(token): """Randomly create and return a value.""" value = str(token.user) + "_" + str(token.timestamp) len_rand = random.randint(20, 40) to_pick = string.digits + string.ascii_letters + \ "_-+^$" for i in range(len_rand): value += random.choice(to_pick) print("Private value", value) # Hash the value hashed = hashlib.sha512(value.encode()) value = hashed.hexdigest() print("Public value", value) return value class Token(Model): """A token model.""" id = None user = Integer() timestamp = Integer() value = String(pkey=True, default=set_value) ## Instruction: Use the Model constructor to generate a default value ## Code After: import hashlib import random import string from ext.aboard.model import * class Token(Model): """A token model.""" id = None user = Integer() timestamp = Integer() value = String(pkey=True) def __init__(self, user=None, timestamp=None): value = None if user and timestamp: value = Token.get_token_value(user, timestamp) Model.__init__(self, user=user, timestamp=timestamp, value=value) @staticmethod def get_token_value(user, timestamp): """Randomly create and return a token value.""" value = str(user) + "_" + str(timestamp) len_rand = random.randint(20, 40) to_pick = string.digits + string.ascii_letters + \ "_-+^$" for i in range(len_rand): value += random.choice(to_pick) print("Private value", value) # Hash the value hashed = hashlib.sha512(value.encode()) value = hashed.hexdigest() print("Public value", value) return value
# ... existing code ... from ext.aboard.model import * # ... modified code ... timestamp = Integer() value = String(pkey=True) def __init__(self, user=None, timestamp=None): value = None if user and timestamp: value = Token.get_token_value(user, timestamp) Model.__init__(self, user=user, timestamp=timestamp, value=value) @staticmethod def get_token_value(user, timestamp): """Randomly create and return a token value.""" value = str(user) + "_" + str(timestamp) len_rand = random.randint(20, 40) to_pick = string.digits + string.ascii_letters + \ "_-+^$" for i in range(len_rand): value += random.choice(to_pick) print("Private value", value) # Hash the value hashed = hashlib.sha512(value.encode()) value = hashed.hexdigest() print("Public value", value) return value # ... rest of the code ...
bb229be50e37bb710c32541cec7b159da9508335
tests/functional/subcommands/test_subcommands.py
tests/functional/subcommands/test_subcommands.py
import subprocess def test_subcommand(): """ Test that a command from the example project is registered. """ output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT) assert b'testcommand' in output def test_subcommand_group(): """ Test that a command group is registered. """ output = subprocess.check_output(['textx', 'testgroup'], stderr=subprocess.STDOUT) assert b'groupcommand1' in output assert b'groupcommand2' in output
import sys import pytest import subprocess if (3, 6) <= sys.version_info < (3, 8): pytest.skip("Temporary workaround for Travis problems", allow_module_level=True) def test_subcommand(): """ Test that a command from the example project is registered. """ output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT) assert b'testcommand' in output def test_subcommand_group(): """ Test that a command group is registered. """ output = subprocess.check_output(['textx', 'testgroup'], stderr=subprocess.STDOUT) assert b'groupcommand1' in output assert b'groupcommand2' in output
Add workaround for Travis CI problems
Add workaround for Travis CI problems
Python
mit
igordejanovic/textX,igordejanovic/textX,igordejanovic/textX
+ import sys + import pytest import subprocess + + + if (3, 6) <= sys.version_info < (3, 8): + pytest.skip("Temporary workaround for Travis problems", allow_module_level=True) def test_subcommand(): """ Test that a command from the example project is registered. """ output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT) assert b'testcommand' in output def test_subcommand_group(): """ Test that a command group is registered. """ output = subprocess.check_output(['textx', 'testgroup'], stderr=subprocess.STDOUT) assert b'groupcommand1' in output assert b'groupcommand2' in output
Add workaround for Travis CI problems
## Code Before: import subprocess def test_subcommand(): """ Test that a command from the example project is registered. """ output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT) assert b'testcommand' in output def test_subcommand_group(): """ Test that a command group is registered. """ output = subprocess.check_output(['textx', 'testgroup'], stderr=subprocess.STDOUT) assert b'groupcommand1' in output assert b'groupcommand2' in output ## Instruction: Add workaround for Travis CI problems ## Code After: import sys import pytest import subprocess if (3, 6) <= sys.version_info < (3, 8): pytest.skip("Temporary workaround for Travis problems", allow_module_level=True) def test_subcommand(): """ Test that a command from the example project is registered. """ output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT) assert b'testcommand' in output def test_subcommand_group(): """ Test that a command group is registered. """ output = subprocess.check_output(['textx', 'testgroup'], stderr=subprocess.STDOUT) assert b'groupcommand1' in output assert b'groupcommand2' in output
// ... existing code ... import sys import pytest import subprocess if (3, 6) <= sys.version_info < (3, 8): pytest.skip("Temporary workaround for Travis problems", allow_module_level=True) // ... rest of the code ...
5d5b59bde655fbeb2d07bd5539c2ff9b29879d1d
pythontutorials/books/AutomateTheBoringStuff/Ch14/P2_writeCSV.py
pythontutorials/books/AutomateTheBoringStuff/Ch14/P2_writeCSV.py
import csv # Writer Objects outputFile = open("output.csv", "w", newline='') outputWriter = csv.writer(outputFile) print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow([1, 2, 3.141592, 4])) outputFile.close() # Delimiter and lineterminator Keyword Arguments csvFile = open("example.tsv", 'w', newline='') csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) csvFile.close()
def main(): import csv # Writer Objects outputFile = open("output.csv", "w", newline='') outputWriter = csv.writer(outputFile) print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow([1, 2, 3.141592, 4])) outputFile.close() # Delimiter and lineterminator Keyword Arguments csvFile = open("example.tsv", 'w', newline='') csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) csvFile.close() if __name__ == '__main__': main()
Update P1_writeCSV.py added docstring and wrapped in main function
Update P1_writeCSV.py added docstring and wrapped in main function
Python
mit
JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials
- import csv + def main(): + import csv - # Writer Objects - outputFile = open("output.csv", "w", newline='') - outputWriter = csv.writer(outputFile) - print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) - print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) - print(outputWriter.writerow([1, 2, 3.141592, 4])) - outputFile.close() + # Writer Objects + outputFile = open("output.csv", "w", newline='') + outputWriter = csv.writer(outputFile) - # Delimiter and lineterminator Keyword Arguments - csvFile = open("example.tsv", 'w', newline='') - csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') - print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) - print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) + print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) - print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) - csvFile.close() + print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) + print(outputWriter.writerow([1, 2, 3.141592, 4])) + outputFile.close() + # Delimiter and lineterminator Keyword Arguments + csvFile = open("example.tsv", 'w', newline='') + csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') + print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) + print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) + print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) + csvFile.close() + + + if __name__ == '__main__': + main() +
Update P1_writeCSV.py added docstring and wrapped in main function
## Code Before: import csv # Writer Objects outputFile = open("output.csv", "w", newline='') outputWriter = csv.writer(outputFile) print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow([1, 2, 3.141592, 4])) outputFile.close() # Delimiter and lineterminator Keyword Arguments csvFile = open("example.tsv", 'w', newline='') csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) csvFile.close() ## Instruction: Update P1_writeCSV.py added docstring and wrapped in main function ## Code After: def main(): import csv # Writer Objects outputFile = open("output.csv", "w", newline='') outputWriter = csv.writer(outputFile) print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow([1, 2, 3.141592, 4])) outputFile.close() # Delimiter and lineterminator Keyword Arguments csvFile = open("example.tsv", 'w', newline='') csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) csvFile.close() if __name__ == '__main__': main()
// ... existing code ... def main(): import csv # Writer Objects outputFile = open("output.csv", "w", newline='') outputWriter = csv.writer(outputFile) print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow([1, 2, 3.141592, 4])) outputFile.close() # Delimiter and lineterminator Keyword Arguments csvFile = open("example.tsv", 'w', newline='') csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) csvFile.close() if __name__ == '__main__': main() // ... rest of the code ...
301f22b9b2de2a27dd2e3faa27ccb9c70266e938
pybossa/api/project_stats.py
pybossa/api/project_stats.py
from flask import request from pybossa.model.project_stats import ProjectStats from api_base import APIBase class ProjectStatsAPI(APIBase): """Class for domain object ProjectStats.""" __class__ = ProjectStats def _select_attributes(self, stats_data): if request.args.get('full'): return stats_data stats_data['info'].pop('hours_stats', None) stats_data['info'].pop('dates_stats', None) stats_data['info'].pop('users_stats', None) return stats_data
import copy from flask import request from pybossa.model.project_stats import ProjectStats from api_base import APIBase class ProjectStatsAPI(APIBase): """Class for domain object ProjectStats.""" __class__ = ProjectStats def _select_attributes(self, stats_data): if not request.args.get('full'): tmp = copy.deepcopy(stats_data) tmp['info'].pop('hours_stats', None) tmp['info'].pop('dates_stats', None) tmp['info'].pop('users_stats', None) return tmp return stats_data
Fix _select_attributes from project api
Fix _select_attributes from project api
Python
agpl-3.0
PyBossa/pybossa,PyBossa/pybossa,Scifabric/pybossa,Scifabric/pybossa
+ import copy from flask import request from pybossa.model.project_stats import ProjectStats from api_base import APIBase class ProjectStatsAPI(APIBase): """Class for domain object ProjectStats.""" __class__ = ProjectStats def _select_attributes(self, stats_data): - if request.args.get('full'): + if not request.args.get('full'): - return stats_data + tmp = copy.deepcopy(stats_data) - stats_data['info'].pop('hours_stats', None) + tmp['info'].pop('hours_stats', None) - stats_data['info'].pop('dates_stats', None) + tmp['info'].pop('dates_stats', None) - stats_data['info'].pop('users_stats', None) + tmp['info'].pop('users_stats', None) + return tmp return stats_data
Fix _select_attributes from project api
## Code Before: from flask import request from pybossa.model.project_stats import ProjectStats from api_base import APIBase class ProjectStatsAPI(APIBase): """Class for domain object ProjectStats.""" __class__ = ProjectStats def _select_attributes(self, stats_data): if request.args.get('full'): return stats_data stats_data['info'].pop('hours_stats', None) stats_data['info'].pop('dates_stats', None) stats_data['info'].pop('users_stats', None) return stats_data ## Instruction: Fix _select_attributes from project api ## Code After: import copy from flask import request from pybossa.model.project_stats import ProjectStats from api_base import APIBase class ProjectStatsAPI(APIBase): """Class for domain object ProjectStats.""" __class__ = ProjectStats def _select_attributes(self, stats_data): if not request.args.get('full'): tmp = copy.deepcopy(stats_data) tmp['info'].pop('hours_stats', None) tmp['info'].pop('dates_stats', None) tmp['info'].pop('users_stats', None) return tmp return stats_data
// ... existing code ... import copy from flask import request // ... modified code ... def _select_attributes(self, stats_data): if not request.args.get('full'): tmp = copy.deepcopy(stats_data) tmp['info'].pop('hours_stats', None) tmp['info'].pop('dates_stats', None) tmp['info'].pop('users_stats', None) return tmp return stats_data // ... rest of the code ...
014b4905784f50fd13111ca8528fade9be4bd767
skimage/feature/__init__.py
skimage/feature/__init__.py
from ._hog import hog from ._greycomatrix import greycomatrix, greycoprops from .hog import hog from .texture import greycomatrix, greycoprops, local_binary_pattern from .peak import peak_local_max from ._harris import harris from .template import match_template
from ._hog import hog from .texture import greycomatrix, greycoprops, local_binary_pattern from .peak import peak_local_max from ._harris import harris from .template import match_template
Fix import bug due to rebase
Fix import bug due to rebase
Python
bsd-3-clause
blink1073/scikit-image,robintw/scikit-image,emon10005/scikit-image,newville/scikit-image,michaelaye/scikit-image,GaZ3ll3/scikit-image,chintak/scikit-image,chriscrosscutler/scikit-image,warmspringwinds/scikit-image,vighneshbirodkar/scikit-image,robintw/scikit-image,dpshelio/scikit-image,Hiyorimi/scikit-image,youprofit/scikit-image,youprofit/scikit-image,rjeli/scikit-image,michaelpacer/scikit-image,Britefury/scikit-image,SamHames/scikit-image,almarklein/scikit-image,SamHames/scikit-image,bennlich/scikit-image,Hiyorimi/scikit-image,WarrenWeckesser/scikits-image,almarklein/scikit-image,WarrenWeckesser/scikits-image,ClinicalGraphics/scikit-image,chriscrosscutler/scikit-image,Midafi/scikit-image,juliusbierk/scikit-image,oew1v07/scikit-image,juliusbierk/scikit-image,ajaybhat/scikit-image,newville/scikit-image,Britefury/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image,ClinicalGraphics/scikit-image,dpshelio/scikit-image,emmanuelle/scikits.image,jwiggins/scikit-image,bsipocz/scikit-image,ajaybhat/scikit-image,pratapvardhan/scikit-image,ofgulban/scikit-image,Midafi/scikit-image,rjeli/scikit-image,bsipocz/scikit-image,blink1073/scikit-image,almarklein/scikit-image,almarklein/scikit-image,emmanuelle/scikits.image,ofgulban/scikit-image,keflavich/scikit-image,keflavich/scikit-image,paalge/scikit-image,chintak/scikit-image,bennlich/scikit-image,warmspringwinds/scikit-image,emon10005/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,pratapvardhan/scikit-image,emmanuelle/scikits.image,jwiggins/scikit-image,GaZ3ll3/scikit-image,ofgulban/scikit-image,michaelaye/scikit-image,SamHames/scikit-image,rjeli/scikit-image,SamHames/scikit-image,emmanuelle/scikits.image,chintak/scikit-image,oew1v07/scikit-image,vighneshbirodkar/scikit-image,paalge/scikit-image
from ._hog import hog - from ._greycomatrix import greycomatrix, greycoprops - from .hog import hog from .texture import greycomatrix, greycoprops, local_binary_pattern from .peak import peak_local_max from ._harris import harris from .template import match_template
Fix import bug due to rebase
## Code Before: from ._hog import hog from ._greycomatrix import greycomatrix, greycoprops from .hog import hog from .texture import greycomatrix, greycoprops, local_binary_pattern from .peak import peak_local_max from ._harris import harris from .template import match_template ## Instruction: Fix import bug due to rebase ## Code After: from ._hog import hog from .texture import greycomatrix, greycoprops, local_binary_pattern from .peak import peak_local_max from ._harris import harris from .template import match_template
# ... existing code ... from ._hog import hog from .texture import greycomatrix, greycoprops, local_binary_pattern # ... rest of the code ...
b9b03c1f736b38d122baafdd57bbd96657de17af
valuenetwork/api/types/apps.py
valuenetwork/api/types/apps.py
from django.apps import AppConfig import valuenetwork.api.types as types class ApiTypesAppConfig(AppConfig): name = 'valuenetwork.api.types' verbose_name = "ApiTypes" def ready(self): #import pdb; pdb.set_trace() from valuenetwork.api.types.EconomicResource import EconomicResource, EconomicResourceCategory types.EconomicResource = EconomicResource types.EconomicResourceCategory = EconomicResourceCategory from valuenetwork.api.types.Agent import Agent types.Agent = Agent from valuenetwork.api.types.Process import Process types.Process = Process from valuenetwork.api.types.EconomicEvent import EconomicEvent types.EconomicEvent = EconomicEvent super(ApiTypesAppConfig, self).ready()
from django.apps import AppConfig import valuenetwork.api.types as types class ApiTypesAppConfig(AppConfig): name = 'valuenetwork.api.types' verbose_name = "ApiTypes" def ready(self): """ Source of this hack: https://stackoverflow.com/questions/37862725/django-1-9-how-to-import-in-init-py 'Adding from .models import CommentMixin imports CommentMixin so that you can use it inside the ready() method. It does not magically add it to the comment module so that you can access it as comments.CommentMixin You could assign it to the comments module in the ready() method.' from .models import CommentMixin comments.CommentMixin = CommentsMixin """ from valuenetwork.api.types.EconomicResource import EconomicResource, EconomicResourceCategory types.EconomicResource = EconomicResource types.EconomicResourceCategory = EconomicResourceCategory from valuenetwork.api.types.Agent import Agent types.Agent = Agent from valuenetwork.api.types.Process import Process types.Process = Process from valuenetwork.api.types.EconomicEvent import EconomicEvent types.EconomicEvent = EconomicEvent super(ApiTypesAppConfig, self).ready()
Add a comment about the source of the hack
Add a comment about the source of the hack
Python
agpl-3.0
FreedomCoop/valuenetwork,FreedomCoop/valuenetwork,FreedomCoop/valuenetwork,FreedomCoop/valuenetwork
from django.apps import AppConfig import valuenetwork.api.types as types class ApiTypesAppConfig(AppConfig): name = 'valuenetwork.api.types' verbose_name = "ApiTypes" def ready(self): - #import pdb; pdb.set_trace() + """ Source of this hack: + https://stackoverflow.com/questions/37862725/django-1-9-how-to-import-in-init-py + 'Adding from .models import CommentMixin imports CommentMixin so that you can use it + inside the ready() method. It does not magically add it to the comment module so that + you can access it as comments.CommentMixin + + You could assign it to the comments module in the ready() method.' + from .models import CommentMixin + comments.CommentMixin = CommentsMixin + """ from valuenetwork.api.types.EconomicResource import EconomicResource, EconomicResourceCategory types.EconomicResource = EconomicResource types.EconomicResourceCategory = EconomicResourceCategory from valuenetwork.api.types.Agent import Agent types.Agent = Agent from valuenetwork.api.types.Process import Process types.Process = Process from valuenetwork.api.types.EconomicEvent import EconomicEvent types.EconomicEvent = EconomicEvent super(ApiTypesAppConfig, self).ready()
Add a comment about the source of the hack
## Code Before: from django.apps import AppConfig import valuenetwork.api.types as types class ApiTypesAppConfig(AppConfig): name = 'valuenetwork.api.types' verbose_name = "ApiTypes" def ready(self): #import pdb; pdb.set_trace() from valuenetwork.api.types.EconomicResource import EconomicResource, EconomicResourceCategory types.EconomicResource = EconomicResource types.EconomicResourceCategory = EconomicResourceCategory from valuenetwork.api.types.Agent import Agent types.Agent = Agent from valuenetwork.api.types.Process import Process types.Process = Process from valuenetwork.api.types.EconomicEvent import EconomicEvent types.EconomicEvent = EconomicEvent super(ApiTypesAppConfig, self).ready() ## Instruction: Add a comment about the source of the hack ## Code After: from django.apps import AppConfig import valuenetwork.api.types as types class ApiTypesAppConfig(AppConfig): name = 'valuenetwork.api.types' verbose_name = "ApiTypes" def ready(self): """ Source of this hack: https://stackoverflow.com/questions/37862725/django-1-9-how-to-import-in-init-py 'Adding from .models import CommentMixin imports CommentMixin so that you can use it inside the ready() method. It does not magically add it to the comment module so that you can access it as comments.CommentMixin You could assign it to the comments module in the ready() method.' from .models import CommentMixin comments.CommentMixin = CommentsMixin """ from valuenetwork.api.types.EconomicResource import EconomicResource, EconomicResourceCategory types.EconomicResource = EconomicResource types.EconomicResourceCategory = EconomicResourceCategory from valuenetwork.api.types.Agent import Agent types.Agent = Agent from valuenetwork.api.types.Process import Process types.Process = Process from valuenetwork.api.types.EconomicEvent import EconomicEvent types.EconomicEvent = EconomicEvent super(ApiTypesAppConfig, self).ready()
... def ready(self): """ Source of this hack: https://stackoverflow.com/questions/37862725/django-1-9-how-to-import-in-init-py 'Adding from .models import CommentMixin imports CommentMixin so that you can use it inside the ready() method. It does not magically add it to the comment module so that you can access it as comments.CommentMixin You could assign it to the comments module in the ready() method.' from .models import CommentMixin comments.CommentMixin = CommentsMixin """ ...
1a10f21566f59c9f4f8171bc088af1e2a18d9702
prestoadmin/_version.py
prestoadmin/_version.py
"""Version information""" # This must be the last line in the file and the format must be maintained # even when the version is changed __version__ = '2.3'
"""Version information""" # This must be the last line in the file and the format must be maintained # even when the version is changed __version__ = '2.4-SNAPSHOT'
Prepare for the next development iteration
Prepare for the next development iteration
Python
apache-2.0
prestodb/presto-admin,prestodb/presto-admin
"""Version information""" # This must be the last line in the file and the format must be maintained # even when the version is changed - __version__ = '2.3' + __version__ = '2.4-SNAPSHOT'
Prepare for the next development iteration
## Code Before: """Version information""" # This must be the last line in the file and the format must be maintained # even when the version is changed __version__ = '2.3' ## Instruction: Prepare for the next development iteration ## Code After: """Version information""" # This must be the last line in the file and the format must be maintained # even when the version is changed __version__ = '2.4-SNAPSHOT'
... # even when the version is changed __version__ = '2.4-SNAPSHOT' ...
3f3818e4a21ffc4e1b8d4426093fc093396b5a5b
pandas_finance.py
pandas_finance.py
import datetime import scraperwiki import numpy import pandas.io.data as web def get_stock(stock, start, end, service): """ Return data frame of finance data for stock. Takes start and end datetimes, and service name of 'google' or 'yahoo'. """ return web.DataReader(stock, service, start, end) def parse_finance_frame(stock, start, end, service='google'): """ Return rows of dicts from a finance data frame for scraperwiki.sqlite. service can also be 'yahoo', start and end are datetimes. """ frame = get_stock(stock, start, end, service) rows = [] for idx in range(len(frame)): current_row_as_dict = frame.ix[idx].to_dict() # have to convert dates because these are Pandas timestamps and # dumptruck doesn't support them current_row_as_dict['Date'] = frame.index[idx].to_datetime() current_row_as_dict['Stock'] = stock # horrible hack because data values are numpy.float64 and dumptruck # doesn't support them for key in current_row_as_dict: if isinstance(current_row_as_dict[key], numpy.float64): current_row_as_dict[key] = float(current_row_as_dict[key]) rows.append(current_row_as_dict) return rows def main(): """ Dump stock data into scraperwiki.sqlite using pandas.io.data. """ # arbitrary start chosen start = datetime.datetime(2014, 3, 1) end = datetime.datetime.today() stock_list = ['TWTR', 'FB'] rows = [] for stock in stock_list: rows.extend(parse_finance_frame(stock, start, end)) scraperwiki.sqlite.save(data=rows, unique_keys=['Stock', 'Date']) if __name__ == '__main__': main()
import datetime import sqlite3 import pandas.io.data as web import pandas.io.sql as sql def get_stock(stock, start, end): """ Return data frame of Yahoo Finance data for stock. Takes start and end datetimes. """ return web.DataReader(stock, 'yahoo', start, end) def scrape_stock(stock, start, end): sqlite_db.execute("drop table if exists {};".format(stock)) frame = (get_stock(stock, start, end)) # make Date not an index so it appears in table frame = frame.reset_index() # force Date datetime to string frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat()) sql.write_frame(frame, stock, sqlite_db) def main(): global sqlite_db sqlite_db = sqlite3.connect("scraperwiki.sqlite") start = datetime.datetime(2014, 3, 1) end = datetime.datetime.today() for ticker in ['TWTR', 'FB']: scrape_stock(ticker, start, end) if __name__ == '__main__': main()
Use pandas native saving by forcing date to not be index, and be string
Use pandas native saving by forcing date to not be index, and be string
Python
agpl-3.0
scraperwiki/stock-tool,scraperwiki/stock-tool
import datetime + import sqlite3 - - import scraperwiki - import numpy import pandas.io.data as web + import pandas.io.sql as sql - def get_stock(stock, start, end, service): + def get_stock(stock, start, end): """ - Return data frame of finance data for stock. + Return data frame of Yahoo Finance data for stock. - Takes start and end datetimes, and service name of 'google' or 'yahoo'. + Takes start and end datetimes. """ - return web.DataReader(stock, service, start, end) + return web.DataReader(stock, 'yahoo', start, end) + def scrape_stock(stock, start, end): + sqlite_db.execute("drop table if exists {};".format(stock)) - - def parse_finance_frame(stock, start, end, service='google'): - """ - Return rows of dicts from a finance data frame for scraperwiki.sqlite. - - service can also be 'yahoo', start and end are datetimes. - """ - frame = get_stock(stock, start, end, service) + frame = (get_stock(stock, start, end)) + # make Date not an index so it appears in table + frame = frame.reset_index() + # force Date datetime to string + frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat()) + sql.write_frame(frame, stock, sqlite_db) - rows = [] - for idx in range(len(frame)): - current_row_as_dict = frame.ix[idx].to_dict() - # have to convert dates because these are Pandas timestamps and - # dumptruck doesn't support them - current_row_as_dict['Date'] = frame.index[idx].to_datetime() - current_row_as_dict['Stock'] = stock - # horrible hack because data values are numpy.float64 and dumptruck - # doesn't support them - for key in current_row_as_dict: - if isinstance(current_row_as_dict[key], numpy.float64): - current_row_as_dict[key] = float(current_row_as_dict[key]) - rows.append(current_row_as_dict) - return rows - def main(): + global sqlite_db + sqlite_db = sqlite3.connect("scraperwiki.sqlite") - """ - Dump stock data into scraperwiki.sqlite using pandas.io.data. - """ - # arbitrary start chosen start = datetime.datetime(2014, 3, 1) end = datetime.datetime.today() + for ticker in ['TWTR', 'FB']: + scrape_stock(ticker, start, end) + - - stock_list = ['TWTR', 'FB'] - rows = [] - for stock in stock_list: - rows.extend(parse_finance_frame(stock, start, end)) - scraperwiki.sqlite.save(data=rows, unique_keys=['Stock', 'Date']) - if __name__ == '__main__': main()
Use pandas native saving by forcing date to not be index, and be string
## Code Before: import datetime import scraperwiki import numpy import pandas.io.data as web def get_stock(stock, start, end, service): """ Return data frame of finance data for stock. Takes start and end datetimes, and service name of 'google' or 'yahoo'. """ return web.DataReader(stock, service, start, end) def parse_finance_frame(stock, start, end, service='google'): """ Return rows of dicts from a finance data frame for scraperwiki.sqlite. service can also be 'yahoo', start and end are datetimes. """ frame = get_stock(stock, start, end, service) rows = [] for idx in range(len(frame)): current_row_as_dict = frame.ix[idx].to_dict() # have to convert dates because these are Pandas timestamps and # dumptruck doesn't support them current_row_as_dict['Date'] = frame.index[idx].to_datetime() current_row_as_dict['Stock'] = stock # horrible hack because data values are numpy.float64 and dumptruck # doesn't support them for key in current_row_as_dict: if isinstance(current_row_as_dict[key], numpy.float64): current_row_as_dict[key] = float(current_row_as_dict[key]) rows.append(current_row_as_dict) return rows def main(): """ Dump stock data into scraperwiki.sqlite using pandas.io.data. """ # arbitrary start chosen start = datetime.datetime(2014, 3, 1) end = datetime.datetime.today() stock_list = ['TWTR', 'FB'] rows = [] for stock in stock_list: rows.extend(parse_finance_frame(stock, start, end)) scraperwiki.sqlite.save(data=rows, unique_keys=['Stock', 'Date']) if __name__ == '__main__': main() ## Instruction: Use pandas native saving by forcing date to not be index, and be string ## Code After: import datetime import sqlite3 import pandas.io.data as web import pandas.io.sql as sql def get_stock(stock, start, end): """ Return data frame of Yahoo Finance data for stock. Takes start and end datetimes. """ return web.DataReader(stock, 'yahoo', start, end) def scrape_stock(stock, start, end): sqlite_db.execute("drop table if exists {};".format(stock)) frame = (get_stock(stock, start, end)) # make Date not an index so it appears in table frame = frame.reset_index() # force Date datetime to string frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat()) sql.write_frame(frame, stock, sqlite_db) def main(): global sqlite_db sqlite_db = sqlite3.connect("scraperwiki.sqlite") start = datetime.datetime(2014, 3, 1) end = datetime.datetime.today() for ticker in ['TWTR', 'FB']: scrape_stock(ticker, start, end) if __name__ == '__main__': main()
... import datetime import sqlite3 import pandas.io.data as web import pandas.io.sql as sql ... def get_stock(stock, start, end): """ Return data frame of Yahoo Finance data for stock. Takes start and end datetimes. """ return web.DataReader(stock, 'yahoo', start, end) def scrape_stock(stock, start, end): sqlite_db.execute("drop table if exists {};".format(stock)) frame = (get_stock(stock, start, end)) # make Date not an index so it appears in table frame = frame.reset_index() # force Date datetime to string frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat()) sql.write_frame(frame, stock, sqlite_db) ... def main(): global sqlite_db sqlite_db = sqlite3.connect("scraperwiki.sqlite") start = datetime.datetime(2014, 3, 1) ... end = datetime.datetime.today() for ticker in ['TWTR', 'FB']: scrape_stock(ticker, start, end) if __name__ == '__main__': ...