{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); '\n\t\tprint(html_source)"},"new_contents":{"kind":"string","value":"import sublime, sublime_plugin\nimport os\nimport re\nfrom subprocess import call\n\nfrom .mistune import markdown\n\n\nclass CreateMinuteCommand(sublime_plugin.TextCommand):\n\tdef run(self, edit):\n\t\tregion = sublime.Region(0, self.view.size())\n\t\tmd_source = self.view.substr(region)\n\t\tmd_source.encode(encoding='UTF-8',errors='strict')\n\t\thtml_source = '' + markdown(md_source) + ''\n\t\t\n\t\tfile_name = self.view.file_name()\n\t\thtml_file, extension = os.path.splitext(file_name)\n\t\thtml_file += \".html\"\n\t\twith open(html_file, 'w+') as file_:\n\t\t\tfile_.write(html_source)\n\n\t\tprint(file_name)\n\t\tprint(html_file)"},"subject":{"kind":"string","value":"Save the created html in a HTML file."},"message":{"kind":"string","value":"Save the created html in a HTML file.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"Txarli/sublimetext-meeting-minutes,Txarli/sublimetext-meeting-minutes"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport sublime, sublime_plugin\nfrom .mistune import markdown\n\nclass CreateMinuteCommand(sublime_plugin.TextCommand):\n\tdef run(self, edit):\n\t\tregion = sublime.Region(0, self.view.size())\n\t\tmd_source = self.view.substr(region)\n\t\tmd_source.encode(encoding='UTF-8',errors='strict')\n\t\thtml_source = '' + markdown(md_source) + ''\n\t\tprint(html_source)\n## Instruction:\nSave the created html in a HTML file.\n\n## Code After:\nimport sublime, sublime_plugin\nimport os\nimport re\nfrom subprocess import call\n\nfrom .mistune import markdown\n\n\nclass CreateMinuteCommand(sublime_plugin.TextCommand):\n\tdef run(self, edit):\n\t\tregion = sublime.Region(0, self.view.size())\n\t\tmd_source = self.view.substr(region)\n\t\tmd_source.encode(encoding='UTF-8',errors='strict')\n\t\thtml_source = '' + markdown(md_source) + ''\n\t\t\n\t\tfile_name = self.view.file_name()\n\t\thtml_file, extension = os.path.splitext(file_name)\n\t\thtml_file += \".html\"\n\t\twith open(html_file, 'w+') as file_:\n\t\t\tfile_.write(html_source)\n\n\t\tprint(file_name)\n\t\tprint(html_file)"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nimport sublime, sublime_plugin\nimport os\nimport re\nfrom subprocess import call\n\nfrom .mistune import markdown\n\n\nclass CreateMinuteCommand(sublime_plugin.TextCommand):\n\tdef run(self, edit):\n\n\n ... \n\n\n\t\tmd_source = self.view.substr(region)\n\t\tmd_source.encode(encoding='UTF-8',errors='strict')\n\t\thtml_source = '' + markdown(md_source) + ''\n\t\t\n\t\tfile_name = self.view.file_name()\n\t\thtml_file, extension = os.path.splitext(file_name)\n\t\thtml_file += \".html\"\n\t\twith open(html_file, 'w+') as file_:\n\t\t\tfile_.write(html_source)\n\n\t\tprint(file_name)\n\t\tprint(html_file)\n\n\n ... "}}},{"rowIdx":2150,"cells":{"commit":{"kind":"string","value":"993c4c98fb9529946669b4d13e6c5a9ff4ab3f67"},"old_file":{"kind":"string","value":"tests/test_mpi.py"},"new_file":{"kind":"string","value":"tests/test_mpi.py"},"old_contents":{"kind":"string","value":"from mpi4py import MPI\n\nimport pytest\n\nfrom devito import Grid, Function, Distributor\n\n\n@pytest.mark.parallel(nprocs=2)\ndef test_hello_mpi():\n size = MPI.COMM_WORLD.Get_size()\n rank = MPI.COMM_WORLD.Get_rank()\n name = MPI.Get_processor_name()\n\n print(\"Hello, World! I am rank %d of %d on %s\" % (rank, size, name), flush=True)\n\n\n@pytest.mark.parallel(nprocs=2)\ndef test_basic_partitioning():\n grid = Grid(shape=(10, 10, 10)) # Gonna use a default distributor underneath\n f = Function(name='f', grid=grid)\n from IPython import embed; embed()\n"},"new_contents":{"kind":"string","value":"from mpi4py import MPI\n\nimport pytest\n\nfrom devito import Grid, Function\n\n\n@pytest.mark.parallel(nprocs=2)\ndef test_hello_mpi():\n size = MPI.COMM_WORLD.Get_size()\n rank = MPI.COMM_WORLD.Get_rank()\n name = MPI.Get_processor_name()\n\n print(\"Hello, World! I am rank %d of %d on %s\" % (rank, size, name), flush=True)\n\n\n@pytest.mark.parallel(nprocs=[2, 4])\ndef test_basic_partitioning():\n grid = Grid(shape=(15, 15)) # Gonna use a default distributor underneath\n f = Function(name='f', grid=grid)\n\n distributor = grid._distributor\n expected = { # nprocs -> [(rank0 shape), (rank1 shape), ...]\n 2: [(8, 15), (7, 15)],\n 4: [(8, 8), (8, 7), (7, 8), (7, 7)]\n }\n assert f.shape == expected[distributor.nprocs][distributor.rank]\n"},"subject":{"kind":"string","value":"Check domain decomposition over Functions"},"message":{"kind":"string","value":"tests: Check domain decomposition over Functions\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"opesci/devito,opesci/devito"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom mpi4py import MPI\n\nimport pytest\n\nfrom devito import Grid, Function, Distributor\n\n\n@pytest.mark.parallel(nprocs=2)\ndef test_hello_mpi():\n size = MPI.COMM_WORLD.Get_size()\n rank = MPI.COMM_WORLD.Get_rank()\n name = MPI.Get_processor_name()\n\n print(\"Hello, World! I am rank %d of %d on %s\" % (rank, size, name), flush=True)\n\n\n@pytest.mark.parallel(nprocs=2)\ndef test_basic_partitioning():\n grid = Grid(shape=(10, 10, 10)) # Gonna use a default distributor underneath\n f = Function(name='f', grid=grid)\n from IPython import embed; embed()\n\n## Instruction:\ntests: Check domain decomposition over Functions\n\n## Code After:\nfrom mpi4py import MPI\n\nimport pytest\n\nfrom devito import Grid, Function\n\n\n@pytest.mark.parallel(nprocs=2)\ndef test_hello_mpi():\n size = MPI.COMM_WORLD.Get_size()\n rank = MPI.COMM_WORLD.Get_rank()\n name = MPI.Get_processor_name()\n\n print(\"Hello, World! I am rank %d of %d on %s\" % (rank, size, name), flush=True)\n\n\n@pytest.mark.parallel(nprocs=[2, 4])\ndef test_basic_partitioning():\n grid = Grid(shape=(15, 15)) # Gonna use a default distributor underneath\n f = Function(name='f', grid=grid)\n\n distributor = grid._distributor\n expected = { # nprocs -> [(rank0 shape), (rank1 shape), ...]\n 2: [(8, 15), (7, 15)],\n 4: [(8, 8), (8, 7), (7, 8), (7, 7)]\n }\n assert f.shape == expected[distributor.nprocs][distributor.rank]\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\nimport pytest\n\nfrom devito import Grid, Function\n\n\n@pytest.mark.parallel(nprocs=2)\n\n\n// ... modified code ... \n\n\n print(\"Hello, World! I am rank %d of %d on %s\" % (rank, size, name), flush=True)\n\n\n@pytest.mark.parallel(nprocs=[2, 4])\ndef test_basic_partitioning():\n grid = Grid(shape=(15, 15)) # Gonna use a default distributor underneath\n f = Function(name='f', grid=grid)\n\n distributor = grid._distributor\n expected = { # nprocs -> [(rank0 shape), (rank1 shape), ...]\n 2: [(8, 15), (7, 15)],\n 4: [(8, 8), (8, 7), (7, 8), (7, 7)]\n }\n assert f.shape == expected[distributor.nprocs][distributor.rank]\n\n\n// ... rest of the code ..."}}},{"rowIdx":2151,"cells":{"commit":{"kind":"string","value":"fc94d60066692e6e8dc496bb854039bb66af3311"},"old_file":{"kind":"string","value":"scout.py"},"new_file":{"kind":"string","value":"scout.py"},"old_contents":{"kind":"string","value":"class Problem:\n def getStartState(self):\n return None\n\n def getEndState(self):\n return None\n\n def isValidState(self, state):\n return False\n\n def getSuccessors(self, state):\n return []\n\n def getStringRepr(self, state):\n return \"BadProblem\"\n\n\ndef search(problem):\n print \"Searching...\"\n\n\n\nif (__name__ == '__main__'):\n problem = Problem();\n search(problem)\n"},"new_contents":{"kind":"string","value":"class Problem:\n def getStartState(self):\n return None\n\n def getEndState(self):\n return None\n\n def isValidState(self, state):\n return False\n\n def getSuccessors(self, state):\n return []\n\n def getStringRepr(self, state):\n return \"BadProblem\"\n\n\nclass SquareProblem(Problem):\n def __init__(self, size):\n self.size = size\n\n def getStartState(self):\n return (0, 0)\n\n def getEndState(self):\n return (self.size, self.size)\n\n def isValidState(self, state):\n return 0 <= state[0] <= self.size and\n 0 <= state[1] <= self.size\n\n def getSuccessors(self, state):\n return [(state[0]+dx, state[1]+dy) for (dx, dy) in [(1, 0), (0, 1), (-1, 0), (0, -1)]]\n\n def getStringRepr(self, state):\n return \"(%d, %d)\" % state\n\n\ndef search(problem):\n print \"Searching...\"\n\n\n\nif (__name__ == '__main__'):\n problem = SquareProblem(2);\n search(problem)\n"},"subject":{"kind":"string","value":"Add a simple problem for testing"},"message":{"kind":"string","value":"Add a simple problem for testing\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"SpexGuy/Scout"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nclass Problem:\n def getStartState(self):\n return None\n\n def getEndState(self):\n return None\n\n def isValidState(self, state):\n return False\n\n def getSuccessors(self, state):\n return []\n\n def getStringRepr(self, state):\n return \"BadProblem\"\n\n\ndef search(problem):\n print \"Searching...\"\n\n\n\nif (__name__ == '__main__'):\n problem = Problem();\n search(problem)\n\n## Instruction:\nAdd a simple problem for testing\n\n## Code After:\nclass Problem:\n def getStartState(self):\n return None\n\n def getEndState(self):\n return None\n\n def isValidState(self, state):\n return False\n\n def getSuccessors(self, state):\n return []\n\n def getStringRepr(self, state):\n return \"BadProblem\"\n\n\nclass SquareProblem(Problem):\n def __init__(self, size):\n self.size = size\n\n def getStartState(self):\n return (0, 0)\n\n def getEndState(self):\n return (self.size, self.size)\n\n def isValidState(self, state):\n return 0 <= state[0] <= self.size and\n 0 <= state[1] <= self.size\n\n def getSuccessors(self, state):\n return [(state[0]+dx, state[1]+dy) for (dx, dy) in [(1, 0), (0, 1), (-1, 0), (0, -1)]]\n\n def getStringRepr(self, state):\n return \"(%d, %d)\" % state\n\n\ndef search(problem):\n print \"Searching...\"\n\n\n\nif (__name__ == '__main__'):\n problem = SquareProblem(2);\n search(problem)\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n return \"BadProblem\"\n\n\nclass SquareProblem(Problem):\n def __init__(self, size):\n self.size = size\n\n def getStartState(self):\n return (0, 0)\n\n def getEndState(self):\n return (self.size, self.size)\n\n def isValidState(self, state):\n return 0 <= state[0] <= self.size and\n 0 <= state[1] <= self.size\n\n def getSuccessors(self, state):\n return [(state[0]+dx, state[1]+dy) for (dx, dy) in [(1, 0), (0, 1), (-1, 0), (0, -1)]]\n\n def getStringRepr(self, state):\n return \"(%d, %d)\" % state\n\n\ndef search(problem):\n print \"Searching...\"\n\n\n\n ... \n\n\n\n\nif (__name__ == '__main__'):\n problem = SquareProblem(2);\n search(problem)\n\n\n ... "}}},{"rowIdx":2152,"cells":{"commit":{"kind":"string","value":"cdc43f6f6ee2d040675f10028af6372b0bf42a08"},"old_file":{"kind":"string","value":"msmbuilder/tests/__init__.py"},"new_file":{"kind":"string","value":"msmbuilder/tests/__init__.py"},"old_contents":{"kind":"string","value":"import sys\nimport warnings\nfrom warnings import warn as orig_warn\n\n\ndef my_warn(message, category=None, stacklevel=1):\n # taken from warnings module\n # Get context information\n try:\n caller = sys._getframe(stacklevel)\n except ValueError:\n globals = sys.__dict__\n lineno = 1\n else:\n globals = caller.f_globals\n lineno = caller.f_lineno\n module = globals['__name__']\n filename = globals.get('__file__')\n\n m = {\n 'argspec': 'inspect.getargspec() is deprecated'\n }\n\n if module == 'scipy._lib.decorator' and m['argspec'] in message:\n return\n\n if module == 'mdtraj.formats.hdf5' and m['argspec'] in message:\n return\n\n if module == 'statsmodels.base.wrapper' and m['argspec'] in message:\n return\n\n if module == 'nose.util' and m['argspec'] in message:\n return\n\n print(\"Warning: module: \", module)\n print(\"Warning: message: \", message)\n return orig_warn(message=message, category=category,\n stacklevel=stacklevel + 1)\n\n\nwarnings.warn = my_warn\n"},"new_contents":{"kind":"string","value":"import sys\nimport warnings\nfrom warnings import warn as orig_warn\n\n\ndef my_warn(message, category=None, stacklevel=1):\n # taken from warnings module\n # Get context information\n try:\n caller = sys._getframe(stacklevel)\n except ValueError:\n globals = sys.__dict__\n lineno = 1\n else:\n globals = caller.f_globals\n lineno = caller.f_lineno\n module = globals['__name__']\n filename = globals.get('__file__')\n\n m = {\n 'argspec': 'inspect.getargspec() is deprecated'\n }\n\n if module == 'scipy._lib.decorator' and m['argspec'] in message:\n return\n\n if module == 'mdtraj.formats.hdf5' and m['argspec'] in message:\n return\n\n if module == 'statsmodels.base.wrapper' and m['argspec'] in message:\n return\n\n if module == 'nose.util' and m['argspec'] in message:\n return\n\n print(\"Warning: module: \", module)\n print(\"Warning: message: \", message)\n\n # This explicit check is necessary for python < 3.5 maybe??\n if category is None:\n category = UserWarning\n\n return orig_warn(message=message, category=category,\n stacklevel=stacklevel + 1)\n\n\nwarnings.warn = my_warn\n"},"subject":{"kind":"string","value":"Fix for my nefarious `warn` replacement"},"message":{"kind":"string","value":"Fix for my nefarious `warn` replacement\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"lgpl-2.1"},"repos":{"kind":"string","value":"dr-nate/msmbuilder,brookehus/msmbuilder,cxhernandez/msmbuilder,msmbuilder/msmbuilder,dr-nate/msmbuilder,rafwiewiora/msmbuilder,Eigenstate/msmbuilder,rafwiewiora/msmbuilder,rafwiewiora/msmbuilder,msultan/msmbuilder,cxhernandez/msmbuilder,dr-nate/msmbuilder,msultan/msmbuilder,msmbuilder/msmbuilder,stephenliu1989/msmbuilder,msultan/msmbuilder,peastman/msmbuilder,brookehus/msmbuilder,peastman/msmbuilder,mpharrigan/mixtape,mpharrigan/mixtape,msmbuilder/msmbuilder,msmbuilder/msmbuilder,Eigenstate/msmbuilder,cxhernandez/msmbuilder,rafwiewiora/msmbuilder,mpharrigan/mixtape,stephenliu1989/msmbuilder,msultan/msmbuilder,peastman/msmbuilder,brookehus/msmbuilder,stephenliu1989/msmbuilder,brookehus/msmbuilder,mpharrigan/mixtape,dr-nate/msmbuilder,cxhernandez/msmbuilder,mpharrigan/mixtape,peastman/msmbuilder,stephenliu1989/msmbuilder,Eigenstate/msmbuilder,msultan/msmbuilder,Eigenstate/msmbuilder,cxhernandez/msmbuilder,brookehus/msmbuilder,peastman/msmbuilder,rafwiewiora/msmbuilder,dr-nate/msmbuilder,msmbuilder/msmbuilder,Eigenstate/msmbuilder"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport sys\nimport warnings\nfrom warnings import warn as orig_warn\n\n\ndef my_warn(message, category=None, stacklevel=1):\n # taken from warnings module\n # Get context information\n try:\n caller = sys._getframe(stacklevel)\n except ValueError:\n globals = sys.__dict__\n lineno = 1\n else:\n globals = caller.f_globals\n lineno = caller.f_lineno\n module = globals['__name__']\n filename = globals.get('__file__')\n\n m = {\n 'argspec': 'inspect.getargspec() is deprecated'\n }\n\n if module == 'scipy._lib.decorator' and m['argspec'] in message:\n return\n\n if module == 'mdtraj.formats.hdf5' and m['argspec'] in message:\n return\n\n if module == 'statsmodels.base.wrapper' and m['argspec'] in message:\n return\n\n if module == 'nose.util' and m['argspec'] in message:\n return\n\n print(\"Warning: module: \", module)\n print(\"Warning: message: \", message)\n return orig_warn(message=message, category=category,\n stacklevel=stacklevel + 1)\n\n\nwarnings.warn = my_warn\n\n## Instruction:\nFix for my nefarious `warn` replacement\n\n## Code After:\nimport sys\nimport warnings\nfrom warnings import warn as orig_warn\n\n\ndef my_warn(message, category=None, stacklevel=1):\n # taken from warnings module\n # Get context information\n try:\n caller = sys._getframe(stacklevel)\n except ValueError:\n globals = sys.__dict__\n lineno = 1\n else:\n globals = caller.f_globals\n lineno = caller.f_lineno\n module = globals['__name__']\n filename = globals.get('__file__')\n\n m = {\n 'argspec': 'inspect.getargspec() is deprecated'\n }\n\n if module == 'scipy._lib.decorator' and m['argspec'] in message:\n return\n\n if module == 'mdtraj.formats.hdf5' and m['argspec'] in message:\n return\n\n if module == 'statsmodels.base.wrapper' and m['argspec'] in message:\n return\n\n if module == 'nose.util' and m['argspec'] in message:\n return\n\n print(\"Warning: module: \", module)\n print(\"Warning: message: \", message)\n\n # This explicit check is necessary for python < 3.5 maybe??\n if category is None:\n category = UserWarning\n\n return orig_warn(message=message, category=category,\n stacklevel=stacklevel + 1)\n\n\nwarnings.warn = my_warn\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\n print(\"Warning: module: \", module)\n print(\"Warning: message: \", message)\n\n # This explicit check is necessary for python < 3.5 maybe??\n if category is None:\n category = UserWarning\n\n return orig_warn(message=message, category=category,\n stacklevel=stacklevel + 1)\n\n\n\n// ... rest of the code ..."}}},{"rowIdx":2153,"cells":{"commit":{"kind":"string","value":"35d80ac6af0a546f138f6db31511e9dade7aae8e"},"old_file":{"kind":"string","value":"feder/es_search/queries.py"},"new_file":{"kind":"string","value":"feder/es_search/queries.py"},"old_contents":{"kind":"string","value":"from elasticsearch_dsl import Search, Index\nfrom elasticsearch_dsl.query import MultiMatch, Match, Q, MoreLikeThis\nfrom elasticsearch_dsl.connections import get_connection, connections\nfrom .documents import LetterDocument\n\n\ndef serialize_document(doc):\n return {\n \"_id\": doc.__dict__[\"meta\"][\"id\"],\n \"_index\": doc.__dict__[\"meta\"][\"index\"],\n }\n\n\ndef search_keywords(query):\n q = MultiMatch(query=query, fields=[\"title\", \"body\", \"content\"])\n return LetterDocument.search().query(q).execute()\n\n\ndef more_like_this(doc):\n like = serialize_document(doc)\n q = MoreLikeThis(like=like, fields=[\"title\", \"body\"],)\n query = LetterDocument.search().query(q)\n print(query.to_dict())\n x = query.execute()\n print(x)\n return x\n"},"new_contents":{"kind":"string","value":"from elasticsearch_dsl import Search, Index\nfrom elasticsearch_dsl.query import MultiMatch, Match, Q, MoreLikeThis\nfrom elasticsearch_dsl.connections import get_connection, connections\nfrom .documents import LetterDocument\n\n\ndef serialize_document(doc):\n return {\n \"_id\": doc.__dict__[\"meta\"][\"id\"],\n \"_index\": doc.__dict__[\"meta\"][\"index\"],\n }\n\n\ndef search_keywords(query):\n q = MultiMatch(query=query, fields=[\"title\", \"body\", \"content\"])\n return LetterDocument.search().query(q).execute()\n\n\ndef more_like_this(doc):\n like = serialize_document(doc)\n q = MoreLikeThis(like=like, fields=[\"title\", \"body\"],)\n query = LetterDocument.search().query(q)\n # print(query.to_dict())\n return query.execute()\n"},"subject":{"kind":"string","value":"Reduce debug logging in more_like_this"},"message":{"kind":"string","value":"Reduce debug logging in more_like_this\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom elasticsearch_dsl import Search, Index\nfrom elasticsearch_dsl.query import MultiMatch, Match, Q, MoreLikeThis\nfrom elasticsearch_dsl.connections import get_connection, connections\nfrom .documents import LetterDocument\n\n\ndef serialize_document(doc):\n return {\n \"_id\": doc.__dict__[\"meta\"][\"id\"],\n \"_index\": doc.__dict__[\"meta\"][\"index\"],\n }\n\n\ndef search_keywords(query):\n q = MultiMatch(query=query, fields=[\"title\", \"body\", \"content\"])\n return LetterDocument.search().query(q).execute()\n\n\ndef more_like_this(doc):\n like = serialize_document(doc)\n q = MoreLikeThis(like=like, fields=[\"title\", \"body\"],)\n query = LetterDocument.search().query(q)\n print(query.to_dict())\n x = query.execute()\n print(x)\n return x\n\n## Instruction:\nReduce debug logging in more_like_this\n\n## Code After:\nfrom elasticsearch_dsl import Search, Index\nfrom elasticsearch_dsl.query import MultiMatch, Match, Q, MoreLikeThis\nfrom elasticsearch_dsl.connections import get_connection, connections\nfrom .documents import LetterDocument\n\n\ndef serialize_document(doc):\n return {\n \"_id\": doc.__dict__[\"meta\"][\"id\"],\n \"_index\": doc.__dict__[\"meta\"][\"index\"],\n }\n\n\ndef search_keywords(query):\n q = MultiMatch(query=query, fields=[\"title\", \"body\", \"content\"])\n return LetterDocument.search().query(q).execute()\n\n\ndef more_like_this(doc):\n like = serialize_document(doc)\n q = MoreLikeThis(like=like, fields=[\"title\", \"body\"],)\n query = LetterDocument.search().query(q)\n # print(query.to_dict())\n return query.execute()\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n like = serialize_document(doc)\n q = MoreLikeThis(like=like, fields=[\"title\", \"body\"],)\n query = LetterDocument.search().query(q)\n # print(query.to_dict())\n return query.execute()\n\n\n ... "}}},{"rowIdx":2154,"cells":{"commit":{"kind":"string","value":"b2bc77023ed3e19f6f7483645e2a11952c061de0"},"old_file":{"kind":"string","value":"tests/registryd/test_registry_startup.py"},"new_file":{"kind":"string","value":"tests/registryd/test_registry_startup.py"},"old_contents":{"kind":"string","value":"PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'\nACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'\n\ndef test_accessible_iface_properties(registry, session_manager):\n val = registry.Get(ACCESSIBLE_IFACE, 'Name', dbus_interface=PROPERTIES_IFACE)\n assert str(val) == 'main'\n"},"new_contents":{"kind":"string","value":"PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'\nACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'\n\ndef get_property(proxy, iface_name, prop_name):\n return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)\n\ndef test_accessible_iface_properties(registry, session_manager):\n values = [\n ('Name', 'main'),\n ('Description', ''),\n ]\n\n for prop_name, expected in values:\n assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected\n"},"subject":{"kind":"string","value":"Test the Description property of the registry's root"},"message":{"kind":"string","value":"Test the Description property of the registry's root\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"lgpl-2.1"},"repos":{"kind":"string","value":"GNOME/at-spi2-core,GNOME/at-spi2-core,GNOME/at-spi2-core"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nPROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'\nACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'\n\ndef test_accessible_iface_properties(registry, session_manager):\n val = registry.Get(ACCESSIBLE_IFACE, 'Name', dbus_interface=PROPERTIES_IFACE)\n assert str(val) == 'main'\n\n## Instruction:\nTest the Description property of the registry's root\n\n## Code After:\nPROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'\nACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'\n\ndef get_property(proxy, iface_name, prop_name):\n return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)\n\ndef test_accessible_iface_properties(registry, session_manager):\n values = [\n ('Name', 'main'),\n ('Description', ''),\n ]\n\n for prop_name, expected in values:\n assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nPROPERTIES_IFACE = 'org.freedesktop.DBus.Properties'\nACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible'\n\ndef get_property(proxy, iface_name, prop_name):\n return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE)\n\ndef test_accessible_iface_properties(registry, session_manager):\n values = [\n ('Name', 'main'),\n ('Description', ''),\n ]\n\n for prop_name, expected in values:\n assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected\n\n\n// ... rest of the code ..."}}},{"rowIdx":2155,"cells":{"commit":{"kind":"string","value":"e9fc291faca8af35398b958d046e951aa8471cbf"},"old_file":{"kind":"string","value":"apps/core/tests/test_factories.py"},"new_file":{"kind":"string","value":"apps/core/tests/test_factories.py"},"old_contents":{"kind":"string","value":"from .. import factories\nfrom . import CoreFixturesTestCase\n\n\nclass AnalysisFactoryTestCase(CoreFixturesTestCase):\n\n def test_new_factory_with_Experiments(self):\n\n experiments = factories.ExperimentFactory.create_batch(3)\n\n # build\n analysis = factories.AnalysisFactory.build(experiments=experiments)\n self.assertEqual(analysis.experiments.count(), 0)\n\n # create\n analysis = factories.AnalysisFactory(experiments=experiments)\n\n experiments_ids = list(\n analysis.experiments.values_list('id', flat=True)\n )\n expected_experiments_ids = [e.id for e in experiments]\n self.assertEqual(experiments_ids, expected_experiments_ids)\n"},"new_contents":{"kind":"string","value":"from .. import factories, models\nfrom . import CoreFixturesTestCase\n\n\nclass AnalysisFactoryTestCase(CoreFixturesTestCase):\n\n def test_new_factory_with_Experiments(self):\n\n experiments = factories.ExperimentFactory.create_batch(3)\n\n # build\n analysis = factories.AnalysisFactory.build(experiments=experiments)\n self.assertEqual(analysis.experiments.count(), 0)\n\n # create\n analysis = factories.AnalysisFactory(experiments=experiments)\n\n experiments_ids = analysis.experiments.values_list(\n 'id', flat=True\n )\n expected_experiments_ids = models.Experiment.objects.values_list(\n 'id', flat=True\n )\n self.assertEqual(\n list(experiments_ids),\n list(expected_experiments_ids)\n )\n"},"subject":{"kind":"string","value":"Fix broken test since models new default ordering"},"message":{"kind":"string","value":"Fix broken test since models new default ordering\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom .. import factories\nfrom . import CoreFixturesTestCase\n\n\nclass AnalysisFactoryTestCase(CoreFixturesTestCase):\n\n def test_new_factory_with_Experiments(self):\n\n experiments = factories.ExperimentFactory.create_batch(3)\n\n # build\n analysis = factories.AnalysisFactory.build(experiments=experiments)\n self.assertEqual(analysis.experiments.count(), 0)\n\n # create\n analysis = factories.AnalysisFactory(experiments=experiments)\n\n experiments_ids = list(\n analysis.experiments.values_list('id', flat=True)\n )\n expected_experiments_ids = [e.id for e in experiments]\n self.assertEqual(experiments_ids, expected_experiments_ids)\n\n## Instruction:\nFix broken test since models new default ordering\n\n## Code After:\nfrom .. import factories, models\nfrom . import CoreFixturesTestCase\n\n\nclass AnalysisFactoryTestCase(CoreFixturesTestCase):\n\n def test_new_factory_with_Experiments(self):\n\n experiments = factories.ExperimentFactory.create_batch(3)\n\n # build\n analysis = factories.AnalysisFactory.build(experiments=experiments)\n self.assertEqual(analysis.experiments.count(), 0)\n\n # create\n analysis = factories.AnalysisFactory(experiments=experiments)\n\n experiments_ids = analysis.experiments.values_list(\n 'id', flat=True\n )\n expected_experiments_ids = models.Experiment.objects.values_list(\n 'id', flat=True\n )\n self.assertEqual(\n list(experiments_ids),\n list(expected_experiments_ids)\n )\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nfrom .. import factories, models\nfrom . import CoreFixturesTestCase\n\n\n\n\n ... \n\n\n # create\n analysis = factories.AnalysisFactory(experiments=experiments)\n\n experiments_ids = analysis.experiments.values_list(\n 'id', flat=True\n )\n expected_experiments_ids = models.Experiment.objects.values_list(\n 'id', flat=True\n )\n self.assertEqual(\n list(experiments_ids),\n list(expected_experiments_ids)\n )\n\n\n ... "}}},{"rowIdx":2156,"cells":{"commit":{"kind":"string","value":"d21d090df0fe1d1daed089670b4df90c9aa9c126"},"old_file":{"kind":"string","value":"nsu-connect/src/main/java/ru/tulupov/nsuconnect/database/loader/ChatLoader.java"},"new_file":{"kind":"string","value":"nsu-connect/src/main/java/ru/tulupov/nsuconnect/database/loader/ChatLoader.java"},"old_contents":{"kind":"string","value":"package ru.tulupov.nsuconnect.database.loader;\n\n\nimport android.content.Context;\nimport android.support.v4.content.AsyncTaskLoader;\nimport android.util.Log;\n\nimport com.j256.ormlite.stmt.PreparedQuery;\nimport com.j256.ormlite.stmt.QueryBuilder;\n\nimport java.sql.SQLException;\nimport java.util.List;\n\nimport ru.tulupov.nsuconnect.database.DatabaseContract;\nimport ru.tulupov.nsuconnect.database.HelperFactory;\nimport ru.tulupov.nsuconnect.model.Chat;\nimport ru.tulupov.nsuconnect.model.Message;\n\n\npublic class ChatLoader extends AsyncTaskLoader> {\n private static final String TAG = ChatLoader.class.getSimpleName();\n\n\n public ChatLoader(Context context) {\n super(context);\n }\n\n @Override\n public List loadInBackground() {\n try {\n QueryBuilder queryBuilder = HelperFactory.getHelper().getChatDao().queryBuilder();\n queryBuilder.orderBy(DatabaseContract.Chat.DATE, false);\n PreparedQuery preparedQuery = queryBuilder.prepare();\n List chats = HelperFactory.getHelper().getChatDao().query(preparedQuery);\n\n return chats;\n } catch (SQLException e) {\n Log.e(TAG, \"Error\", e);\n }\n return null;\n }\n\n\n}\n"},"new_contents":{"kind":"string","value":"package ru.tulupov.nsuconnect.database.loader;\n\n\nimport android.content.Context;\nimport android.support.v4.content.AsyncTaskLoader;\nimport android.util.Log;\n\nimport com.j256.ormlite.stmt.PreparedQuery;\nimport com.j256.ormlite.stmt.QueryBuilder;\n\nimport java.sql.SQLException;\nimport java.util.List;\n\nimport ru.tulupov.nsuconnect.database.DatabaseContract;\nimport ru.tulupov.nsuconnect.database.HelperFactory;\nimport ru.tulupov.nsuconnect.model.Chat;\nimport ru.tulupov.nsuconnect.model.Message;\n\n\npublic class ChatLoader extends AsyncTaskLoader> {\n private static final String TAG = ChatLoader.class.getSimpleName();\n\n\n public ChatLoader(Context context) {\n super(context);\n }\n\n @Override\n public List loadInBackground() {\n try {\n QueryBuilder queryBuilder = HelperFactory.getHelper().getChatDao().queryBuilder();\n queryBuilder.orderBy(DatabaseContract.Chat.DATE, false);\n queryBuilder.orderBy(DatabaseContract.Chat.ACTIVE_FLAG, false);\n PreparedQuery preparedQuery = queryBuilder.prepare();\n List chats = HelperFactory.getHelper().getChatDao().query(preparedQuery);\n\n return chats;\n } catch (SQLException e) {\n Log.e(TAG, \"Error\", e);\n }\n return null;\n }\n\n\n}\n"},"subject":{"kind":"string","value":"Change sort order in chat list"},"message":{"kind":"string","value":"Change sort order in chat list\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"etulupov/nsu-connect-android,etulupov/nsu-connect-android"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage ru.tulupov.nsuconnect.database.loader;\n\n\nimport android.content.Context;\nimport android.support.v4.content.AsyncTaskLoader;\nimport android.util.Log;\n\nimport com.j256.ormlite.stmt.PreparedQuery;\nimport com.j256.ormlite.stmt.QueryBuilder;\n\nimport java.sql.SQLException;\nimport java.util.List;\n\nimport ru.tulupov.nsuconnect.database.DatabaseContract;\nimport ru.tulupov.nsuconnect.database.HelperFactory;\nimport ru.tulupov.nsuconnect.model.Chat;\nimport ru.tulupov.nsuconnect.model.Message;\n\n\npublic class ChatLoader extends AsyncTaskLoader> {\n private static final String TAG = ChatLoader.class.getSimpleName();\n\n\n public ChatLoader(Context context) {\n super(context);\n }\n\n @Override\n public List loadInBackground() {\n try {\n QueryBuilder queryBuilder = HelperFactory.getHelper().getChatDao().queryBuilder();\n queryBuilder.orderBy(DatabaseContract.Chat.DATE, false);\n PreparedQuery preparedQuery = queryBuilder.prepare();\n List chats = HelperFactory.getHelper().getChatDao().query(preparedQuery);\n\n return chats;\n } catch (SQLException e) {\n Log.e(TAG, \"Error\", e);\n }\n return null;\n }\n\n\n}\n\n## Instruction:\nChange sort order in chat list\n\n## Code After:\npackage ru.tulupov.nsuconnect.database.loader;\n\n\nimport android.content.Context;\nimport android.support.v4.content.AsyncTaskLoader;\nimport android.util.Log;\n\nimport com.j256.ormlite.stmt.PreparedQuery;\nimport com.j256.ormlite.stmt.QueryBuilder;\n\nimport java.sql.SQLException;\nimport java.util.List;\n\nimport ru.tulupov.nsuconnect.database.DatabaseContract;\nimport ru.tulupov.nsuconnect.database.HelperFactory;\nimport ru.tulupov.nsuconnect.model.Chat;\nimport ru.tulupov.nsuconnect.model.Message;\n\n\npublic class ChatLoader extends AsyncTaskLoader> {\n private static final String TAG = ChatLoader.class.getSimpleName();\n\n\n public ChatLoader(Context context) {\n super(context);\n }\n\n @Override\n public List loadInBackground() {\n try {\n QueryBuilder queryBuilder = HelperFactory.getHelper().getChatDao().queryBuilder();\n queryBuilder.orderBy(DatabaseContract.Chat.DATE, false);\n queryBuilder.orderBy(DatabaseContract.Chat.ACTIVE_FLAG, false);\n PreparedQuery preparedQuery = queryBuilder.prepare();\n List chats = HelperFactory.getHelper().getChatDao().query(preparedQuery);\n\n return chats;\n } catch (SQLException e) {\n Log.e(TAG, \"Error\", e);\n }\n return null;\n }\n\n\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n try {\n QueryBuilder queryBuilder = HelperFactory.getHelper().getChatDao().queryBuilder();\n queryBuilder.orderBy(DatabaseContract.Chat.DATE, false);\n queryBuilder.orderBy(DatabaseContract.Chat.ACTIVE_FLAG, false);\n PreparedQuery preparedQuery = queryBuilder.prepare();\n List chats = HelperFactory.getHelper().getChatDao().query(preparedQuery);\n\n\n\n# ... rest of the code ..."}}},{"rowIdx":2157,"cells":{"commit":{"kind":"string","value":"922db591ca726acae07e2628119b95aa705f414c"},"old_file":{"kind":"string","value":"leetcode/ds_string_word_pattern.py"},"new_file":{"kind":"string","value":"leetcode/ds_string_word_pattern.py"},"old_contents":{"kind":"string","value":"\n'''\nGiven a pattern and a string str, find if str follows the same pattern.\nHere follow means a full match, such that there is a bijection between \na letter in pattern and a non-empty word in str.\nExamples:\npattern = \"abba\", str = \"dog cat cat dog\" should return true.\npattern = \"abba\", str = \"dog cat cat fish\" should return false.\npattern = \"aaaa\", str = \"dog cat cat dog\" should return false.\npattern = \"abba\", str = \"dog dog dog dog\" should return false.\nNotes:\nYou may assume pattern contains only lowercase letters, and str contains\nlowercase letters separated by a single space.\n'''\n\n"},"new_contents":{"kind":"string","value":"\n'''\nGiven a pattern and a string str, find if str follows the same pattern.\nHere follow means a full match, such that there is a bijection between \na letter in pattern and a non-empty word in str.\nExamples:\npattern = \"abba\", str = \"dog cat cat dog\" should return true.\npattern = \"abba\", str = \"dog cat cat fish\" should return false.\npattern = \"aaaa\", str = \"dog cat cat dog\" should return false.\npattern = \"abba\", str = \"dog dog dog dog\" should return false.\nNotes:\nYou may assume pattern contains only lowercase letters, and str contains\nlowercase letters separated by a single space.\n'''\n\n# Approach 1:\ndef wordPattern(self, pattern, str):\n clist = pattern #treat string as a list of chars\n wlist = str.split() #split string into a list of words\n # map(function, sequence): map applies the given function to every element in the sequence and returns a list\n # index - finds the index of the first occurence of every element in both list and string\n return map(clist.index, clist) == map(wlist.index, wlist)\n \n# Approach 2:\ndef wordPattern(self, pattern, str):\n clist = pattern \n wlist = str.split()\n # zip returns a tuple, cpupling the ith elements from both lists\n return len(clist) == len(wlist) and len(set(clist)) == len(set(wlist)) == len(set(zip(clist, wlist)))\n \n# \"abba\", \"dog cat cat dog\", True.\n# \"abba\", \"dog cat cat fish\" False.\n# \"aaaa\", \"dog cat cat dog\" False.\n# \"abba\", \"dog dog dog dog\" False.\n"},"subject":{"kind":"string","value":"Add two approaches for string word pattern"},"message":{"kind":"string","value":"Add two approaches for string word pattern"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"ngovindaraj/Python"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\n'''\nGiven a pattern and a string str, find if str follows the same pattern.\nHere follow means a full match, such that there is a bijection between \na letter in pattern and a non-empty word in str.\nExamples:\npattern = \"abba\", str = \"dog cat cat dog\" should return true.\npattern = \"abba\", str = \"dog cat cat fish\" should return false.\npattern = \"aaaa\", str = \"dog cat cat dog\" should return false.\npattern = \"abba\", str = \"dog dog dog dog\" should return false.\nNotes:\nYou may assume pattern contains only lowercase letters, and str contains\nlowercase letters separated by a single space.\n'''\n\n\n## Instruction:\nAdd two approaches for string word pattern\n## Code After:\n\n'''\nGiven a pattern and a string str, find if str follows the same pattern.\nHere follow means a full match, such that there is a bijection between \na letter in pattern and a non-empty word in str.\nExamples:\npattern = \"abba\", str = \"dog cat cat dog\" should return true.\npattern = \"abba\", str = \"dog cat cat fish\" should return false.\npattern = \"aaaa\", str = \"dog cat cat dog\" should return false.\npattern = \"abba\", str = \"dog dog dog dog\" should return false.\nNotes:\nYou may assume pattern contains only lowercase letters, and str contains\nlowercase letters separated by a single space.\n'''\n\n# Approach 1:\ndef wordPattern(self, pattern, str):\n clist = pattern #treat string as a list of chars\n wlist = str.split() #split string into a list of words\n # map(function, sequence): map applies the given function to every element in the sequence and returns a list\n # index - finds the index of the first occurence of every element in both list and string\n return map(clist.index, clist) == map(wlist.index, wlist)\n \n# Approach 2:\ndef wordPattern(self, pattern, str):\n clist = pattern \n wlist = str.split()\n # zip returns a tuple, cpupling the ith elements from both lists\n return len(clist) == len(wlist) and len(set(clist)) == len(set(wlist)) == len(set(zip(clist, wlist)))\n \n# \"abba\", \"dog cat cat dog\", True.\n# \"abba\", \"dog cat cat fish\" False.\n# \"aaaa\", \"dog cat cat dog\" False.\n# \"abba\", \"dog dog dog dog\" False.\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nlowercase letters separated by a single space.\n'''\n\n# Approach 1:\ndef wordPattern(self, pattern, str):\n clist = pattern #treat string as a list of chars\n wlist = str.split() #split string into a list of words\n # map(function, sequence): map applies the given function to every element in the sequence and returns a list\n # index - finds the index of the first occurence of every element in both list and string\n return map(clist.index, clist) == map(wlist.index, wlist)\n \n# Approach 2:\ndef wordPattern(self, pattern, str):\n clist = pattern \n wlist = str.split()\n # zip returns a tuple, cpupling the ith elements from both lists\n return len(clist) == len(wlist) and len(set(clist)) == len(set(wlist)) == len(set(zip(clist, wlist)))\n \n# \"abba\", \"dog cat cat dog\", True.\n# \"abba\", \"dog cat cat fish\" False.\n# \"aaaa\", \"dog cat cat dog\" False.\n# \"abba\", \"dog dog dog dog\" False.\n\n\n// ... rest of the code ..."}}},{"rowIdx":2158,"cells":{"commit":{"kind":"string","value":"de441445dbdade4d937783626f1beeb9f439ee11"},"old_file":{"kind":"string","value":"helpers.py"},"new_file":{"kind":"string","value":"helpers.py"},"old_contents":{"kind":"string","value":"import feedparser\nimport datetime\n\nfrom .models import RssEntry\n\n\nclass RssSyncHelper(object):\n\n def __init__(self, feed):\n self.feed = feed\n\n def save_entry(self, result):\n pub_date = result.updated_parsed\n published = datetime.date(pub_date[0], pub_date[1], pub_date[2])\n return RssEntry.objects.get_or_create(\n title=result.title,\n feed=self.feed,\n summary=result.content[0]['value'],\n link=result.link,\n date=published,\n )\n\n def sync(self):\n feed = feedparser.parse(self.feed.url)\n for entry in feed.entries:\n self.save_entry(entry)\n\n def sync_wordpress_paginated(self, page):\n \"\"\"Sync a Wordpress paginated feed\"\"\"\n feed = feedparser.parse('%s&paged=%d' % (self.feed.url, page))\n for entry in feed.entries:\n self.save_entry(entry)\n"},"new_contents":{"kind":"string","value":"import feedparser\nimport datetime\n\nfrom .models import RssEntry\n\n\ndef add_custom_acceptable_elements(elements):\n \"\"\"\n Add custom acceptable elements so iframes and other potential video\n elements will get synched.\n \"\"\"\n elements += list(feedparser._HTMLSanitizer.acceptable_elements)\n feedparser._HTMLSanitizer.acceptable_elements = set(elements)\n\ncustom_acceptable_elements = ['iframe', 'embed', 'object',]\nadd_custom_acceptable_elements(custom_acceptable_elements)\n\n\nclass RssSyncHelper(object):\n\n def __init__(self, feed):\n self.feed = feed\n\n def save_entry(self, result):\n pub_date = result.updated_parsed\n published = datetime.date(pub_date[0], pub_date[1], pub_date[2])\n return RssEntry.objects.get_or_create(\n title=result.title,\n feed=self.feed,\n summary=result.content[0]['value'],\n link=result.link,\n date=published,\n )\n\n def sync(self):\n feed = feedparser.parse(self.feed.url)\n for entry in feed.entries:\n self.save_entry(entry)\n\n def sync_wordpress_paginated(self, page):\n \"\"\"Sync a Wordpress paginated feed\"\"\"\n feed = feedparser.parse('%s&paged=%d' % (self.feed.url, page))\n for entry in feed.entries:\n self.save_entry(entry)\n"},"subject":{"kind":"string","value":"Allow iframes to be synched"},"message":{"kind":"string","value":"Allow iframes to be synched\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"ebrelsford/django-rsssync"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport feedparser\nimport datetime\n\nfrom .models import RssEntry\n\n\nclass RssSyncHelper(object):\n\n def __init__(self, feed):\n self.feed = feed\n\n def save_entry(self, result):\n pub_date = result.updated_parsed\n published = datetime.date(pub_date[0], pub_date[1], pub_date[2])\n return RssEntry.objects.get_or_create(\n title=result.title,\n feed=self.feed,\n summary=result.content[0]['value'],\n link=result.link,\n date=published,\n )\n\n def sync(self):\n feed = feedparser.parse(self.feed.url)\n for entry in feed.entries:\n self.save_entry(entry)\n\n def sync_wordpress_paginated(self, page):\n \"\"\"Sync a Wordpress paginated feed\"\"\"\n feed = feedparser.parse('%s&paged=%d' % (self.feed.url, page))\n for entry in feed.entries:\n self.save_entry(entry)\n\n## Instruction:\nAllow iframes to be synched\n\n## Code After:\nimport feedparser\nimport datetime\n\nfrom .models import RssEntry\n\n\ndef add_custom_acceptable_elements(elements):\n \"\"\"\n Add custom acceptable elements so iframes and other potential video\n elements will get synched.\n \"\"\"\n elements += list(feedparser._HTMLSanitizer.acceptable_elements)\n feedparser._HTMLSanitizer.acceptable_elements = set(elements)\n\ncustom_acceptable_elements = ['iframe', 'embed', 'object',]\nadd_custom_acceptable_elements(custom_acceptable_elements)\n\n\nclass RssSyncHelper(object):\n\n def __init__(self, feed):\n self.feed = feed\n\n def save_entry(self, result):\n pub_date = result.updated_parsed\n published = datetime.date(pub_date[0], pub_date[1], pub_date[2])\n return RssEntry.objects.get_or_create(\n title=result.title,\n feed=self.feed,\n summary=result.content[0]['value'],\n link=result.link,\n date=published,\n )\n\n def sync(self):\n feed = feedparser.parse(self.feed.url)\n for entry in feed.entries:\n self.save_entry(entry)\n\n def sync_wordpress_paginated(self, page):\n \"\"\"Sync a Wordpress paginated feed\"\"\"\n feed = feedparser.parse('%s&paged=%d' % (self.feed.url, page))\n for entry in feed.entries:\n self.save_entry(entry)\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport datetime\n\nfrom .models import RssEntry\n\n\ndef add_custom_acceptable_elements(elements):\n \"\"\"\n Add custom acceptable elements so iframes and other potential video\n elements will get synched.\n \"\"\"\n elements += list(feedparser._HTMLSanitizer.acceptable_elements)\n feedparser._HTMLSanitizer.acceptable_elements = set(elements)\n\ncustom_acceptable_elements = ['iframe', 'embed', 'object',]\nadd_custom_acceptable_elements(custom_acceptable_elements)\n\n\nclass RssSyncHelper(object):\n\n\n// ... rest of the code ..."}}},{"rowIdx":2159,"cells":{"commit":{"kind":"string","value":"51b1f612ab8058da89cc8aaa6b1db99139c7eda0"},"old_file":{"kind":"string","value":"versions/settings.py"},"new_file":{"kind":"string","value":"versions/settings.py"},"old_contents":{"kind":"string","value":"from django.conf import settings\nfrom django.utils import importlib\n\n\ndef import_from_string(val, setting_name):\n \"\"\"\n Attempt to import a class from a string representation.\n Based on the method of the same name in Django Rest Framework.\n \"\"\"\n try:\n parts = val.split('.')\n module_path, class_name = '.'.join(parts[:-1]), parts[-1]\n module = importlib.import_module(module_path)\n return getattr(module, class_name)\n except ImportError as e:\n raise ImportError(\"Could not import '{}' for CleanerVersion setting '{}'. {}: {}.\".format(\n (val, setting_name, e.__class__.__name__, e)))\n\n_cache = {}\ndef get_versioned_delete_collector_class():\n \"\"\"\n Gets the class to use for deletion collection.\n\n This is done as a method instead of just defining a module-level variable because\n Django doesn't like attributes of the django.conf.settings object to be accessed\n in top-level module scope.\n\n :return: class\n \"\"\"\n key = 'VERSIONED_DELETE_COLLECTOR'\n try:\n cls = _cache[key]\n except KeyError:\n cls = import_from_string(getattr(settings, key, 'versions.deletion.VersionedCollector'), key)\n _cache[key] = cls\n return cls\n"},"new_contents":{"kind":"string","value":"from django.conf import settings\nimport importlib\n\n\ndef import_from_string(val, setting_name):\n \"\"\"\n Attempt to import a class from a string representation.\n Based on the method of the same name in Django Rest Framework.\n \"\"\"\n try:\n parts = val.split('.')\n module_path, class_name = '.'.join(parts[:-1]), parts[-1]\n module = importlib.import_module(module_path)\n return getattr(module, class_name)\n except ImportError as e:\n raise ImportError(\"Could not import '{}' for CleanerVersion setting '{}'. {}: {}.\".format(\n (val, setting_name, e.__class__.__name__, e)))\n\n_cache = {}\ndef get_versioned_delete_collector_class():\n \"\"\"\n Gets the class to use for deletion collection.\n\n This is done as a method instead of just defining a module-level variable because\n Django doesn't like attributes of the django.conf.settings object to be accessed\n in top-level module scope.\n\n :return: class\n \"\"\"\n key = 'VERSIONED_DELETE_COLLECTOR'\n try:\n cls = _cache[key]\n except KeyError:\n cls = import_from_string(getattr(settings, key, 'versions.deletion.VersionedCollector'), key)\n _cache[key] = cls\n return cls\n"},"subject":{"kind":"string","value":"Use python 2.7+ standard importlib instead of deprecated django importlib"},"message":{"kind":"string","value":"Use python 2.7+ standard importlib instead of deprecated django importlib\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"swisscom/cleanerversion,anfema/cleanerversion,anfema/cleanerversion,pretix/cleanerversion,pretix/cleanerversion,swisscom/cleanerversion,swisscom/cleanerversion,pretix/cleanerversion,anfema/cleanerversion"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom django.conf import settings\nfrom django.utils import importlib\n\n\ndef import_from_string(val, setting_name):\n \"\"\"\n Attempt to import a class from a string representation.\n Based on the method of the same name in Django Rest Framework.\n \"\"\"\n try:\n parts = val.split('.')\n module_path, class_name = '.'.join(parts[:-1]), parts[-1]\n module = importlib.import_module(module_path)\n return getattr(module, class_name)\n except ImportError as e:\n raise ImportError(\"Could not import '{}' for CleanerVersion setting '{}'. {}: {}.\".format(\n (val, setting_name, e.__class__.__name__, e)))\n\n_cache = {}\ndef get_versioned_delete_collector_class():\n \"\"\"\n Gets the class to use for deletion collection.\n\n This is done as a method instead of just defining a module-level variable because\n Django doesn't like attributes of the django.conf.settings object to be accessed\n in top-level module scope.\n\n :return: class\n \"\"\"\n key = 'VERSIONED_DELETE_COLLECTOR'\n try:\n cls = _cache[key]\n except KeyError:\n cls = import_from_string(getattr(settings, key, 'versions.deletion.VersionedCollector'), key)\n _cache[key] = cls\n return cls\n\n## Instruction:\nUse python 2.7+ standard importlib instead of deprecated django importlib\n\n## Code After:\nfrom django.conf import settings\nimport importlib\n\n\ndef import_from_string(val, setting_name):\n \"\"\"\n Attempt to import a class from a string representation.\n Based on the method of the same name in Django Rest Framework.\n \"\"\"\n try:\n parts = val.split('.')\n module_path, class_name = '.'.join(parts[:-1]), parts[-1]\n module = importlib.import_module(module_path)\n return getattr(module, class_name)\n except ImportError as e:\n raise ImportError(\"Could not import '{}' for CleanerVersion setting '{}'. {}: {}.\".format(\n (val, setting_name, e.__class__.__name__, e)))\n\n_cache = {}\ndef get_versioned_delete_collector_class():\n \"\"\"\n Gets the class to use for deletion collection.\n\n This is done as a method instead of just defining a module-level variable because\n Django doesn't like attributes of the django.conf.settings object to be accessed\n in top-level module scope.\n\n :return: class\n \"\"\"\n key = 'VERSIONED_DELETE_COLLECTOR'\n try:\n cls = _cache[key]\n except KeyError:\n cls = import_from_string(getattr(settings, key, 'versions.deletion.VersionedCollector'), key)\n _cache[key] = cls\n return cls\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nfrom django.conf import settings\nimport importlib\n\n\ndef import_from_string(val, setting_name):\n\n\n// ... rest of the code ..."}}},{"rowIdx":2160,"cells":{"commit":{"kind":"string","value":"a89b6ec1bda46c63c0ff0e0a8bb44eb3eda41c1b"},"old_file":{"kind":"string","value":"repo_health/gh_issues/serializers/GhIssueStatsSerializer.py"},"new_file":{"kind":"string","value":"repo_health/gh_issues/serializers/GhIssueStatsSerializer.py"},"old_contents":{"kind":"string","value":"\nfrom rest_framework import serializers as s\nfrom ..models import GhIssueEvent\nfrom repo_health.index.mixins import CountForPastYearMixin\n\n\nclass GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin):\n\n _label_names = None\n\n issues_count = s.SerializerMethodField()\n issues_closed_last_year = s.SerializerMethodField()\n issues_opened_last_year = s.SerializerMethodField()\n merged_count = s.SerializerMethodField()\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n repo = args[0]\n self._label_names = repo.labels.values_list('name', flat=True)\n\n def get_issues_count(self, repo):\n return repo.issues_count\n\n def get_issues_closed_last_year(self, repo):\n return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct())\n\n def get_issues_opened_last_year(self, repo):\n return self.get_count_list_for_year(repo.issues)\n"},"new_contents":{"kind":"string","value":"\nfrom rest_framework import serializers as s\nfrom ..models import GhIssueEvent\nfrom repo_health.index.mixins import CountForPastYearMixin\n\n\nclass GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin):\n\n _label_names = None\n\n issues_count = s.SerializerMethodField()\n issues_closed_last_year = s.SerializerMethodField()\n issues_opened_last_year = s.SerializerMethodField()\n merged_count = s.SerializerMethodField()\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n repo = args[0]\n self._label_names = repo.labels.values_list('name', flat=True)\n\n def get_issues_count(self, repo):\n return repo.issues_count\n\n def get_issues_closed_last_year(self, repo):\n return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct())\n\n def get_issues_opened_last_year(self, repo):\n return self.get_count_list_for_year(repo.issues)\n\n def get_merged_count(self, repo):\n return repo.issues.filter(events__action=GhIssueEvent.MERGED_ACTION).count()\n"},"subject":{"kind":"string","value":"Add get merged count method."},"message":{"kind":"string","value":"Add get merged count method.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"jakeharding/repo-health,jakeharding/repo-health,jakeharding/repo-health,jakeharding/repo-health"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nfrom rest_framework import serializers as s\nfrom ..models import GhIssueEvent\nfrom repo_health.index.mixins import CountForPastYearMixin\n\n\nclass GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin):\n\n _label_names = None\n\n issues_count = s.SerializerMethodField()\n issues_closed_last_year = s.SerializerMethodField()\n issues_opened_last_year = s.SerializerMethodField()\n merged_count = s.SerializerMethodField()\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n repo = args[0]\n self._label_names = repo.labels.values_list('name', flat=True)\n\n def get_issues_count(self, repo):\n return repo.issues_count\n\n def get_issues_closed_last_year(self, repo):\n return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct())\n\n def get_issues_opened_last_year(self, repo):\n return self.get_count_list_for_year(repo.issues)\n\n## Instruction:\nAdd get merged count method.\n\n## Code After:\n\nfrom rest_framework import serializers as s\nfrom ..models import GhIssueEvent\nfrom repo_health.index.mixins import CountForPastYearMixin\n\n\nclass GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin):\n\n _label_names = None\n\n issues_count = s.SerializerMethodField()\n issues_closed_last_year = s.SerializerMethodField()\n issues_opened_last_year = s.SerializerMethodField()\n merged_count = s.SerializerMethodField()\n\n def __init__(self, *args, **kwargs):\n super().__init__(*args, **kwargs)\n repo = args[0]\n self._label_names = repo.labels.values_list('name', flat=True)\n\n def get_issues_count(self, repo):\n return repo.issues_count\n\n def get_issues_closed_last_year(self, repo):\n return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct())\n\n def get_issues_opened_last_year(self, repo):\n return self.get_count_list_for_year(repo.issues)\n\n def get_merged_count(self, repo):\n return repo.issues.filter(events__action=GhIssueEvent.MERGED_ACTION).count()\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\n def get_issues_opened_last_year(self, repo):\n return self.get_count_list_for_year(repo.issues)\n\n def get_merged_count(self, repo):\n return repo.issues.filter(events__action=GhIssueEvent.MERGED_ACTION).count()\n\n\n# ... rest of the code ..."}}},{"rowIdx":2161,"cells":{"commit":{"kind":"string","value":"b0ffd67ae29d88641bb8d90b577eafe847aa5eff"},"old_file":{"kind":"string","value":"OTCAnalyser/src/uk/ac/cam/cstibhotel/otcanalyser/gui/GUI.java"},"new_file":{"kind":"string","value":"OTCAnalyser/src/uk/ac/cam/cstibhotel/otcanalyser/gui/GUI.java"},"old_contents":{"kind":"string","value":"package uk.ac.cam.cstibhotel.otcanalyser.gui;\n\nimport java.awt.BorderLayout;\n\nimport javax.swing.JFrame;\n\nimport uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchListener;\nimport uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchResult;\n\npublic class GUI extends JFrame implements SearchListener {\n\n\tprivate static final long serialVersionUID = 1L;\n\t\n\tpublic StatusBar statusBar;\n\t\n\tpublic static GUI gui;\n\tstatic SearchWindow searchWindow;\n\tDataViewer dataViewer;\n\t\n\tpublic static GUI getInstance() {\n\t\tif (gui==null) {\n\t\t\tgui = new GUI();\n\t\t}\n\t\treturn gui;\n\t}\n\t\n\tpublic GUI() {\n\t\tsetTitle(\"OTC Analyser\");\n\t\tsetSize(1000,600);\n\t\tsetDefaultCloseOperation(EXIT_ON_CLOSE);\n\t\tsearchWindow = SearchWindow.getInstance();\n\t\tadd(searchWindow,BorderLayout.WEST);\n\t\tsearchWindow.setVisible(true);\n\t\tstatusBar = StatusBar.getInstance();\n\t\tadd(statusBar,BorderLayout.SOUTH);\n\t\tstatusBar.setVisible(true);\n\t\tdataViewer = DataViewer.dataViewer;\n\t\tthis.add(dataViewer);\n\t\tdataViewer.setVisible(true);\n\t\tthis.setVisible(true);\n\t}\n\t\n\t@Override\n\tpublic void getSearchResult(SearchResult s) {\n\t\tDataViewer.addTrades(s.getResultData());\n\t}\n\n}\n"},"new_contents":{"kind":"string","value":"package uk.ac.cam.cstibhotel.otcanalyser.gui;\n\nimport java.awt.BorderLayout;\n\nimport javax.swing.JFrame;\n\nimport uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchListener;\nimport uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchResult;\n\npublic class GUI extends JFrame implements SearchListener {\n\n\tprivate static final long serialVersionUID = 1L;\n\t\n\tpublic StatusBar statusBar;\n\t\n\tpublic static GUI gui;\n\tstatic SearchWindow searchWindow;\n\tDataViewer dataViewer;\n\t\n\tpublic static GUI getInstance() {\n\t\tif (gui==null) {\n\t\t\tgui = new GUI();\n\t\t}\n\t\treturn gui;\n\t}\n\t\n\tpublic GUI() {\n\t\tsetTitle(\"OTC Analyser\");\n\t\tsetSize(1000,600);\n\t\tsetDefaultCloseOperation(EXIT_ON_CLOSE);\n\t\tsearchWindow = SearchWindow.getInstance();\n\t\tadd(searchWindow,BorderLayout.WEST);\n\t\tsearchWindow.setVisible(true);\n\t\tstatusBar = StatusBar.getInstance();\n\t\tadd(statusBar,BorderLayout.SOUTH);\n\t\tstatusBar.setVisible(true);\n\t\tdataViewer = DataViewer.dataViewer;\n\t\tthis.add(dataViewer);\n\t\tdataViewer.setVisible(true);\n\t\tthis.setVisible(true);\n\t}\n\t\n\t@Override\n\tpublic void getSearchResult(SearchResult s) {\n\t\tDataViewer.clearTrades();\n\t\tDataViewer.addTrades(s.getResultData());\n\t}\n\n}\n"},"subject":{"kind":"string","value":"Clear trades from old search from data viewer before adding new ones"},"message":{"kind":"string","value":"Clear trades from old search from data viewer before adding new ones\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"CSTIB-Hotel/OTCAnalyser"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage uk.ac.cam.cstibhotel.otcanalyser.gui;\n\nimport java.awt.BorderLayout;\n\nimport javax.swing.JFrame;\n\nimport uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchListener;\nimport uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchResult;\n\npublic class GUI extends JFrame implements SearchListener {\n\n\tprivate static final long serialVersionUID = 1L;\n\t\n\tpublic StatusBar statusBar;\n\t\n\tpublic static GUI gui;\n\tstatic SearchWindow searchWindow;\n\tDataViewer dataViewer;\n\t\n\tpublic static GUI getInstance() {\n\t\tif (gui==null) {\n\t\t\tgui = new GUI();\n\t\t}\n\t\treturn gui;\n\t}\n\t\n\tpublic GUI() {\n\t\tsetTitle(\"OTC Analyser\");\n\t\tsetSize(1000,600);\n\t\tsetDefaultCloseOperation(EXIT_ON_CLOSE);\n\t\tsearchWindow = SearchWindow.getInstance();\n\t\tadd(searchWindow,BorderLayout.WEST);\n\t\tsearchWindow.setVisible(true);\n\t\tstatusBar = StatusBar.getInstance();\n\t\tadd(statusBar,BorderLayout.SOUTH);\n\t\tstatusBar.setVisible(true);\n\t\tdataViewer = DataViewer.dataViewer;\n\t\tthis.add(dataViewer);\n\t\tdataViewer.setVisible(true);\n\t\tthis.setVisible(true);\n\t}\n\t\n\t@Override\n\tpublic void getSearchResult(SearchResult s) {\n\t\tDataViewer.addTrades(s.getResultData());\n\t}\n\n}\n\n## Instruction:\nClear trades from old search from data viewer before adding new ones\n\n## Code After:\npackage uk.ac.cam.cstibhotel.otcanalyser.gui;\n\nimport java.awt.BorderLayout;\n\nimport javax.swing.JFrame;\n\nimport uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchListener;\nimport uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchResult;\n\npublic class GUI extends JFrame implements SearchListener {\n\n\tprivate static final long serialVersionUID = 1L;\n\t\n\tpublic StatusBar statusBar;\n\t\n\tpublic static GUI gui;\n\tstatic SearchWindow searchWindow;\n\tDataViewer dataViewer;\n\t\n\tpublic static GUI getInstance() {\n\t\tif (gui==null) {\n\t\t\tgui = new GUI();\n\t\t}\n\t\treturn gui;\n\t}\n\t\n\tpublic GUI() {\n\t\tsetTitle(\"OTC Analyser\");\n\t\tsetSize(1000,600);\n\t\tsetDefaultCloseOperation(EXIT_ON_CLOSE);\n\t\tsearchWindow = SearchWindow.getInstance();\n\t\tadd(searchWindow,BorderLayout.WEST);\n\t\tsearchWindow.setVisible(true);\n\t\tstatusBar = StatusBar.getInstance();\n\t\tadd(statusBar,BorderLayout.SOUTH);\n\t\tstatusBar.setVisible(true);\n\t\tdataViewer = DataViewer.dataViewer;\n\t\tthis.add(dataViewer);\n\t\tdataViewer.setVisible(true);\n\t\tthis.setVisible(true);\n\t}\n\t\n\t@Override\n\tpublic void getSearchResult(SearchResult s) {\n\t\tDataViewer.clearTrades();\n\t\tDataViewer.addTrades(s.getResultData());\n\t}\n\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\t\n\t@Override\n\tpublic void getSearchResult(SearchResult s) {\n\t\tDataViewer.clearTrades();\n\t\tDataViewer.addTrades(s.getResultData());\n\t}\n\n\n\n# ... rest of the code ..."}}},{"rowIdx":2162,"cells":{"commit":{"kind":"string","value":"c32e87894d4baf404d5b300459fc68a6d9d973c8"},"old_file":{"kind":"string","value":"zun/db/__init__.py"},"new_file":{"kind":"string","value":"zun/db/__init__.py"},"old_contents":{"kind":"string","value":"\nfrom oslo_db import options\n\nfrom zun.common import paths\nimport zun.conf\n\n_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')\n\noptions.set_defaults(zun.conf.CONF)\noptions.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION, 'zun.sqlite')\n"},"new_contents":{"kind":"string","value":"\nfrom oslo_db import options\n\nfrom zun.common import paths\nimport zun.conf\n\n_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')\n\noptions.set_defaults(zun.conf.CONF)\noptions.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION)\n"},"subject":{"kind":"string","value":"Remove the duplicated config sqlite_db"},"message":{"kind":"string","value":"Remove the duplicated config sqlite_db\n\nThe config sqlite_db has been removed from oslo.db. See here:\nhttps://review.openstack.org/#/c/449437/\n\nChange-Id: I9197b08aeb7baabf2d3fdd4cf4bd06b57a6782ff\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"kevin-zhaoshuai/zun,kevin-zhaoshuai/zun,kevin-zhaoshuai/zun"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nfrom oslo_db import options\n\nfrom zun.common import paths\nimport zun.conf\n\n_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')\n\noptions.set_defaults(zun.conf.CONF)\noptions.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION, 'zun.sqlite')\n\n## Instruction:\nRemove the duplicated config sqlite_db\n\nThe config sqlite_db has been removed from oslo.db. See here:\nhttps://review.openstack.org/#/c/449437/\n\nChange-Id: I9197b08aeb7baabf2d3fdd4cf4bd06b57a6782ff\n\n## Code After:\n\nfrom oslo_db import options\n\nfrom zun.common import paths\nimport zun.conf\n\n_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')\n\noptions.set_defaults(zun.conf.CONF)\noptions.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION)\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n_DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite')\n\noptions.set_defaults(zun.conf.CONF)\noptions.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION)\n\n\n ... "}}},{"rowIdx":2163,"cells":{"commit":{"kind":"string","value":"162e975f5d1e7d435f1fb77356751e94cc81da6d"},"old_file":{"kind":"string","value":"inventi-wicket-bootstrap/src/main/java/lt/inventi/wicket/component/bootstrap/tab/RememberingTabbedPanel.java"},"new_file":{"kind":"string","value":"inventi-wicket-bootstrap/src/main/java/lt/inventi/wicket/component/bootstrap/tab/RememberingTabbedPanel.java"},"old_contents":{"kind":"string","value":"package lt.inventi.wicket.component.bootstrap.tab;\n\nimport java.util.List;\n\nimport org.apache.wicket.extensions.markup.html.tabs.ITab;\nimport org.apache.wicket.markup.html.WebMarkupContainer;\nimport org.apache.wicket.markup.html.link.Link;\nimport org.apache.wicket.model.IModel;\nimport org.apache.wicket.util.string.StringValue;\n\nimport de.agilecoders.wicket.markup.html.bootstrap.tabs.BootstrapTabbedPanel;\n\npublic class RememberingTabbedPanel extends BootstrapTabbedPanel {\n\n public RememberingTabbedPanel(String id, List tabs, IModel model) {\n super(id, tabs, model);\n }\n\n public RememberingTabbedPanel(String id, List tabs) {\n super(id, tabs);\n }\n\n @Override\n protected void onInitialize() {\n super.onInitialize();\n\n StringValue selectedTabId = getPage().getPageParameters().get(getId());\n if (selectedTabId.isEmpty()) {\n setSelectedTab(0);\n } else {\n setSelectedTab(selectedTabId.toInt());\n }\n }\n\n @Override\n protected WebMarkupContainer newLink(String linkId, final int index) {\n return new Link(linkId) {\n @Override\n public void onClick() {\n setSelectedTab(index);\n getPage().getPageParameters().set(RememberingTabbedPanel.this.getId(), String.valueOf(index));\n }\n };\n }\n}\n"},"new_contents":{"kind":"string","value":"package lt.inventi.wicket.component.bootstrap.tab;\n\nimport java.util.Arrays;\nimport java.util.List;\n\nimport org.apache.wicket.extensions.markup.html.tabs.ITab;\nimport org.apache.wicket.markup.html.WebMarkupContainer;\nimport org.apache.wicket.markup.html.link.Link;\nimport org.apache.wicket.model.IModel;\nimport org.apache.wicket.util.string.StringValue;\n\nimport de.agilecoders.wicket.markup.html.bootstrap.tabs.BootstrapTabbedPanel;\n\npublic class RememberingTabbedPanel extends BootstrapTabbedPanel {\n\n public RememberingTabbedPanel(String id, List tabs, IModel model) {\n super(id, tabs, model);\n }\n\n public RememberingTabbedPanel(String id, List tabs) {\n super(id, tabs);\n }\n\n public RememberingTabbedPanel(String id, T... tabs) {\n super(id, Arrays.asList(tabs));\n }\n\n @Override\n protected void onInitialize() {\n super.onInitialize();\n\n StringValue selectedTabId = getPage().getPageParameters().get(getId());\n if (selectedTabId.isEmpty()) {\n setSelectedTab(0);\n } else {\n setSelectedTab(selectedTabId.toInt());\n }\n }\n\n @Override\n protected WebMarkupContainer newLink(String linkId, final int index) {\n return new Link(linkId) {\n @Override\n public void onClick() {\n setSelectedTab(index);\n getPage().getPageParameters().set(RememberingTabbedPanel.this.getId(), String.valueOf(index));\n }\n };\n }\n}\n"},"subject":{"kind":"string","value":"Add a vararg constructor for the Tabbed Panel"},"message":{"kind":"string","value":"Add a vararg constructor for the Tabbed Panel\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"inventiLT/inventi-wicket,inventiLT/inventi-wicket,inventiLT/inventi-wicket"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage lt.inventi.wicket.component.bootstrap.tab;\n\nimport java.util.List;\n\nimport org.apache.wicket.extensions.markup.html.tabs.ITab;\nimport org.apache.wicket.markup.html.WebMarkupContainer;\nimport org.apache.wicket.markup.html.link.Link;\nimport org.apache.wicket.model.IModel;\nimport org.apache.wicket.util.string.StringValue;\n\nimport de.agilecoders.wicket.markup.html.bootstrap.tabs.BootstrapTabbedPanel;\n\npublic class RememberingTabbedPanel extends BootstrapTabbedPanel {\n\n public RememberingTabbedPanel(String id, List tabs, IModel model) {\n super(id, tabs, model);\n }\n\n public RememberingTabbedPanel(String id, List tabs) {\n super(id, tabs);\n }\n\n @Override\n protected void onInitialize() {\n super.onInitialize();\n\n StringValue selectedTabId = getPage().getPageParameters().get(getId());\n if (selectedTabId.isEmpty()) {\n setSelectedTab(0);\n } else {\n setSelectedTab(selectedTabId.toInt());\n }\n }\n\n @Override\n protected WebMarkupContainer newLink(String linkId, final int index) {\n return new Link(linkId) {\n @Override\n public void onClick() {\n setSelectedTab(index);\n getPage().getPageParameters().set(RememberingTabbedPanel.this.getId(), String.valueOf(index));\n }\n };\n }\n}\n\n## Instruction:\nAdd a vararg constructor for the Tabbed Panel\n\n## Code After:\npackage lt.inventi.wicket.component.bootstrap.tab;\n\nimport java.util.Arrays;\nimport java.util.List;\n\nimport org.apache.wicket.extensions.markup.html.tabs.ITab;\nimport org.apache.wicket.markup.html.WebMarkupContainer;\nimport org.apache.wicket.markup.html.link.Link;\nimport org.apache.wicket.model.IModel;\nimport org.apache.wicket.util.string.StringValue;\n\nimport de.agilecoders.wicket.markup.html.bootstrap.tabs.BootstrapTabbedPanel;\n\npublic class RememberingTabbedPanel extends BootstrapTabbedPanel {\n\n public RememberingTabbedPanel(String id, List tabs, IModel model) {\n super(id, tabs, model);\n }\n\n public RememberingTabbedPanel(String id, List tabs) {\n super(id, tabs);\n }\n\n public RememberingTabbedPanel(String id, T... tabs) {\n super(id, Arrays.asList(tabs));\n }\n\n @Override\n protected void onInitialize() {\n super.onInitialize();\n\n StringValue selectedTabId = getPage().getPageParameters().get(getId());\n if (selectedTabId.isEmpty()) {\n setSelectedTab(0);\n } else {\n setSelectedTab(selectedTabId.toInt());\n }\n }\n\n @Override\n protected WebMarkupContainer newLink(String linkId, final int index) {\n return new Link(linkId) {\n @Override\n public void onClick() {\n setSelectedTab(index);\n getPage().getPageParameters().set(RememberingTabbedPanel.this.getId(), String.valueOf(index));\n }\n };\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\npackage lt.inventi.wicket.component.bootstrap.tab;\n\nimport java.util.Arrays;\nimport java.util.List;\n\nimport org.apache.wicket.extensions.markup.html.tabs.ITab;\n\n\n// ... modified code ... \n\n\n\n public RememberingTabbedPanel(String id, List tabs) {\n super(id, tabs);\n }\n\n public RememberingTabbedPanel(String id, T... tabs) {\n super(id, Arrays.asList(tabs));\n }\n\n @Override\n\n\n// ... rest of the code ..."}}},{"rowIdx":2164,"cells":{"commit":{"kind":"string","value":"3a28eac6c49bdf2acfc5bde4ed7f6e317f39f3ab"},"old_file":{"kind":"string","value":"ui-tests/src/test/java/io/syndesis/qe/steps/other/DropBoxSteps.java"},"new_file":{"kind":"string","value":"ui-tests/src/test/java/io/syndesis/qe/steps/other/DropBoxSteps.java"},"old_contents":{"kind":"string","value":"package io.syndesis.qe.steps.other;\n\nimport com.dropbox.core.DbxException;\nimport cucumber.api.java.en.When;\nimport io.syndesis.qe.utils.DropBoxUtils;\nimport org.assertj.core.api.Assertions;\nimport org.springframework.beans.factory.annotation.Autowired;\nimport org.springframework.context.annotation.Lazy;\n\nimport java.io.IOException;\nimport java.util.concurrent.TimeoutException;\n\npublic class DropBoxSteps {\n @Lazy\n @Autowired\n private DropBoxUtils dropBoxUtils;\n\n @When(\"^.*uploads? file with path \\\"([^\\\"]*)\\\" and content \\\"([^\\\"]*)\\\" on Dropbox$\")\n public void uploadFile(String filePath, String content) throws IOException, DbxException, TimeoutException, InterruptedException {\n dropBoxUtils.uploadFile(filePath, content);\n }\n\n\n @When(\"^.*checks? that file with path \\\"([^\\\"]*)\\\" exists? on Dropbox$\")\n public void checkThatFileExists(String filePath) throws IOException, DbxException, TimeoutException, InterruptedException {\n Assertions.assertThat(dropBoxUtils.checkIfFileExists(filePath)).isTrue();\n }\n\n @When(\"^.*deletes? file with path \\\"([^\\\"]*)\\\" from Dropbox$\")\n public void deleteFile(String filePath) throws DbxException {\n dropBoxUtils.deleteFile(filePath);\n Assertions.assertThat(dropBoxUtils.checkIfFileExists(filePath)).isFalse();\n }\n}\n"},"new_contents":{"kind":"string","value":"package io.syndesis.qe.steps.other;\n\nimport com.dropbox.core.DbxException;\nimport cucumber.api.java.en.When;\nimport io.syndesis.qe.utils.DropBoxUtils;\nimport io.syndesis.qe.utils.TestUtils;\nimport org.assertj.core.api.Assertions;\nimport org.springframework.beans.factory.annotation.Autowired;\nimport org.springframework.context.annotation.Lazy;\n\nimport java.io.IOException;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.TimeoutException;\n\npublic class DropBoxSteps {\n @Lazy\n @Autowired\n private DropBoxUtils dropBoxUtils;\n\n @When(\"^.*uploads? file with path \\\"([^\\\"]*)\\\" and content \\\"([^\\\"]*)\\\" on Dropbox$\")\n public void uploadFile(String filePath, String content) throws IOException, DbxException, TimeoutException, InterruptedException {\n dropBoxUtils.uploadFile(filePath, content);\n }\n\n\n @When(\"^.*checks? that file with path \\\"([^\\\"]*)\\\" exists? on Dropbox$\")\n public void checkThatFileExists(String filePath) throws IOException, DbxException, TimeoutException, InterruptedException {\n boolean fileExists = TestUtils.waitForEvent(r -> r, () -> dropBoxUtils.checkIfFileExists(filePath), TimeUnit.MINUTES, 2, TimeUnit.SECONDS, 15);\n Assertions.assertThat(fileExists).isTrue();\n }\n\n @When(\"^.*deletes? file with path \\\"([^\\\"]*)\\\" from Dropbox$\")\n public void deleteFile(String filePath) throws DbxException {\n dropBoxUtils.deleteFile(filePath);\n Assertions.assertThat(dropBoxUtils.checkIfFileExists(filePath)).isFalse();\n }\n}\n"},"subject":{"kind":"string","value":"Add wait for file to appear in Dropbox"},"message":{"kind":"string","value":"fix(ui): Add wait for file to appear in Dropbox\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"mcada/syndesis-qe,mcada/syndesis-qe,mcada/syndesis-qe"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage io.syndesis.qe.steps.other;\n\nimport com.dropbox.core.DbxException;\nimport cucumber.api.java.en.When;\nimport io.syndesis.qe.utils.DropBoxUtils;\nimport org.assertj.core.api.Assertions;\nimport org.springframework.beans.factory.annotation.Autowired;\nimport org.springframework.context.annotation.Lazy;\n\nimport java.io.IOException;\nimport java.util.concurrent.TimeoutException;\n\npublic class DropBoxSteps {\n @Lazy\n @Autowired\n private DropBoxUtils dropBoxUtils;\n\n @When(\"^.*uploads? file with path \\\"([^\\\"]*)\\\" and content \\\"([^\\\"]*)\\\" on Dropbox$\")\n public void uploadFile(String filePath, String content) throws IOException, DbxException, TimeoutException, InterruptedException {\n dropBoxUtils.uploadFile(filePath, content);\n }\n\n\n @When(\"^.*checks? that file with path \\\"([^\\\"]*)\\\" exists? on Dropbox$\")\n public void checkThatFileExists(String filePath) throws IOException, DbxException, TimeoutException, InterruptedException {\n Assertions.assertThat(dropBoxUtils.checkIfFileExists(filePath)).isTrue();\n }\n\n @When(\"^.*deletes? file with path \\\"([^\\\"]*)\\\" from Dropbox$\")\n public void deleteFile(String filePath) throws DbxException {\n dropBoxUtils.deleteFile(filePath);\n Assertions.assertThat(dropBoxUtils.checkIfFileExists(filePath)).isFalse();\n }\n}\n\n## Instruction:\nfix(ui): Add wait for file to appear in Dropbox\n\n## Code After:\npackage io.syndesis.qe.steps.other;\n\nimport com.dropbox.core.DbxException;\nimport cucumber.api.java.en.When;\nimport io.syndesis.qe.utils.DropBoxUtils;\nimport io.syndesis.qe.utils.TestUtils;\nimport org.assertj.core.api.Assertions;\nimport org.springframework.beans.factory.annotation.Autowired;\nimport org.springframework.context.annotation.Lazy;\n\nimport java.io.IOException;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.TimeoutException;\n\npublic class DropBoxSteps {\n @Lazy\n @Autowired\n private DropBoxUtils dropBoxUtils;\n\n @When(\"^.*uploads? file with path \\\"([^\\\"]*)\\\" and content \\\"([^\\\"]*)\\\" on Dropbox$\")\n public void uploadFile(String filePath, String content) throws IOException, DbxException, TimeoutException, InterruptedException {\n dropBoxUtils.uploadFile(filePath, content);\n }\n\n\n @When(\"^.*checks? that file with path \\\"([^\\\"]*)\\\" exists? on Dropbox$\")\n public void checkThatFileExists(String filePath) throws IOException, DbxException, TimeoutException, InterruptedException {\n boolean fileExists = TestUtils.waitForEvent(r -> r, () -> dropBoxUtils.checkIfFileExists(filePath), TimeUnit.MINUTES, 2, TimeUnit.SECONDS, 15);\n Assertions.assertThat(fileExists).isTrue();\n }\n\n @When(\"^.*deletes? file with path \\\"([^\\\"]*)\\\" from Dropbox$\")\n public void deleteFile(String filePath) throws DbxException {\n dropBoxUtils.deleteFile(filePath);\n Assertions.assertThat(dropBoxUtils.checkIfFileExists(filePath)).isFalse();\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nimport com.dropbox.core.DbxException;\nimport cucumber.api.java.en.When;\nimport io.syndesis.qe.utils.DropBoxUtils;\nimport io.syndesis.qe.utils.TestUtils;\nimport org.assertj.core.api.Assertions;\nimport org.springframework.beans.factory.annotation.Autowired;\nimport org.springframework.context.annotation.Lazy;\n\nimport java.io.IOException;\nimport java.util.concurrent.TimeUnit;\nimport java.util.concurrent.TimeoutException;\n\npublic class DropBoxSteps {\n\n\n# ... modified code ... \n\n\n\n @When(\"^.*checks? that file with path \\\"([^\\\"]*)\\\" exists? on Dropbox$\")\n public void checkThatFileExists(String filePath) throws IOException, DbxException, TimeoutException, InterruptedException {\n boolean fileExists = TestUtils.waitForEvent(r -> r, () -> dropBoxUtils.checkIfFileExists(filePath), TimeUnit.MINUTES, 2, TimeUnit.SECONDS, 15);\n Assertions.assertThat(fileExists).isTrue();\n }\n\n @When(\"^.*deletes? file with path \\\"([^\\\"]*)\\\" from Dropbox$\")\n\n\n# ... rest of the code ..."}}},{"rowIdx":2165,"cells":{"commit":{"kind":"string","value":"8e7eb5dec20ee75d34b566341af3c22b57503dcb"},"old_file":{"kind":"string","value":"setup.py"},"new_file":{"kind":"string","value":"setup.py"},"old_contents":{"kind":"string","value":"from setuptools import setup\n\nsetup(\n name=\"setquery\",\n version=\"0.1\",\n description=\"Set arithmetic evaluator\",\n author=\"Paul Scott\",\n author_email=\"paul@duedil.com\",\n url=\"https://github.com/icio/setquery\",\n download_url=\"https://github.com/icio/setquery/tarball/0.1\",\n setup_requires=[\"nose\", \"rednose\"],\n py_modules=[\"setquery\"],\n license=\"MIT\",\n keywords=['set', 'expression', 'eval', 'evaluate'],\n classifiers=[],\n)\n"},"new_contents":{"kind":"string","value":"from setuptools import setup\n\nsetup(\n name=\"setquery\",\n version=\"0.1\",\n description=\"Set arithmetic evaluator\",\n author=\"Paul Scott\",\n author_email=\"paul@duedil.com\",\n url=\"https://github.com/icio/setquery\",\n download_url=\"https://github.com/icio/setquery/tarball/0.1\",\n setup_requires=[\"nose\", \"rednose\"],\n py_modules=[\"setquery\", \"test_setquery\"],\n license=\"MIT\",\n keywords=['set', 'expression', 'eval', 'evaluate'],\n classifiers=[],\n)\n"},"subject":{"kind":"string","value":"Include test_setquery module in distribution"},"message":{"kind":"string","value":"Include test_setquery module in distribution\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"icio/evil"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom setuptools import setup\n\nsetup(\n name=\"setquery\",\n version=\"0.1\",\n description=\"Set arithmetic evaluator\",\n author=\"Paul Scott\",\n author_email=\"paul@duedil.com\",\n url=\"https://github.com/icio/setquery\",\n download_url=\"https://github.com/icio/setquery/tarball/0.1\",\n setup_requires=[\"nose\", \"rednose\"],\n py_modules=[\"setquery\"],\n license=\"MIT\",\n keywords=['set', 'expression', 'eval', 'evaluate'],\n classifiers=[],\n)\n\n## Instruction:\nInclude test_setquery module in distribution\n\n## Code After:\nfrom setuptools import setup\n\nsetup(\n name=\"setquery\",\n version=\"0.1\",\n description=\"Set arithmetic evaluator\",\n author=\"Paul Scott\",\n author_email=\"paul@duedil.com\",\n url=\"https://github.com/icio/setquery\",\n download_url=\"https://github.com/icio/setquery/tarball/0.1\",\n setup_requires=[\"nose\", \"rednose\"],\n py_modules=[\"setquery\", \"test_setquery\"],\n license=\"MIT\",\n keywords=['set', 'expression', 'eval', 'evaluate'],\n classifiers=[],\n)\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n url=\"https://github.com/icio/setquery\",\n download_url=\"https://github.com/icio/setquery/tarball/0.1\",\n setup_requires=[\"nose\", \"rednose\"],\n py_modules=[\"setquery\", \"test_setquery\"],\n license=\"MIT\",\n keywords=['set', 'expression', 'eval', 'evaluate'],\n classifiers=[],\n\n\n ... "}}},{"rowIdx":2166,"cells":{"commit":{"kind":"string","value":"7350422a1364f996b7ac362e8457e2a5e04afc7c"},"old_file":{"kind":"string","value":"sympy/interactive/tests/test_ipythonprinting.py"},"new_file":{"kind":"string","value":"sympy/interactive/tests/test_ipythonprinting.py"},"old_contents":{"kind":"string","value":"\"\"\"Tests that the IPython printing module is properly loaded. \"\"\"\n\nfrom sympy.interactive.session import init_ipython_session\nfrom sympy.external import import_module\n\nipython = import_module(\"IPython\", min_module_version=\"0.11\")\n\n# disable tests if ipython is not present\nif not ipython:\n disabled = True\n\ndef test_ipythonprinting():\n # Initialize and setup IPython session\n app = init_ipython_session()\n app.run_cell(\"from IPython.core.interactiveshell import InteractiveShell\")\n app.run_cell(\"inst = InteractiveShell.instance()\")\n app.run_cell(\"format = inst.display_formatter.format\")\n app.run_cell(\"from sympy import Symbol\")\n\n # Printing without printing extension\n app.run_cell(\"a = format(Symbol('pi'))\")\n assert app.user_ns['a']['text/plain'] == \"pi\"\n\n # Load printing extension\n app.run_cell(\"%load_ext sympy.interactive.ipythonprinting\")\n # Printing with printing extension\n app.run_cell(\"a = format(Symbol('pi'))\")\n assert app.user_ns['a']['text/plain'] == u'\\u03c0'\n"},"new_contents":{"kind":"string","value":"\"\"\"Tests that the IPython printing module is properly loaded. \"\"\"\n\nfrom sympy.interactive.session import init_ipython_session\nfrom sympy.external import import_module\n\nipython = import_module(\"IPython\", min_module_version=\"0.11\")\n\n# disable tests if ipython is not present\nif not ipython:\n disabled = True\n\ndef test_ipythonprinting():\n # Initialize and setup IPython session\n app = init_ipython_session()\n app.run_cell(\"ip = get_ipython()\")\n app.run_cell(\"inst = ip.instance()\")\n app.run_cell(\"format = inst.display_formatter.format\")\n app.run_cell(\"from sympy import Symbol\")\n\n # Printing without printing extension\n app.run_cell(\"a = format(Symbol('pi'))\")\n assert app.user_ns['a']['text/plain'] == \"pi\"\n\n # Load printing extension\n app.run_cell(\"%load_ext sympy.interactive.ipythonprinting\")\n # Printing with printing extension\n app.run_cell(\"a = format(Symbol('pi'))\")\n assert app.user_ns['a']['text/plain'] == u'\\u03c0'\n"},"subject":{"kind":"string","value":"Make ipythonprinting test more robust"},"message":{"kind":"string","value":"Make ipythonprinting test more robust\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"Vishluck/sympy,Mitchkoens/sympy,Davidjohnwilson/sympy,pandeyadarsh/sympy,hrashk/sympy,Davidjohnwilson/sympy,sahmed95/sympy,yukoba/sympy,Sumith1896/sympy,jamesblunt/sympy,moble/sympy,chaffra/sympy,Mitchkoens/sympy,Shaswat27/sympy,saurabhjn76/sympy,abhiii5459/sympy,jerli/sympy,jaimahajan1997/sympy,ahhda/sympy,sunny94/temp,wanglongqi/sympy,meghana1995/sympy,jaimahajan1997/sympy,lindsayad/sympy,Sumith1896/sympy,atsao72/sympy,sampadsaha5/sympy,atreyv/sympy,kaichogami/sympy,Curious72/sympy,kumarkrishna/sympy,Designist/sympy,abhiii5459/sympy,mafiya69/sympy,lidavidm/sympy,meghana1995/sympy,maniteja123/sympy,amitjamadagni/sympy,Titan-C/sympy,yashsharan/sympy,debugger22/sympy,skidzo/sympy,ga7g08/sympy,ga7g08/sympy,liangjiaxing/sympy,skidzo/sympy,cswiercz/sympy,beni55/sympy,liangjiaxing/sympy,Curious72/sympy,MechCoder/sympy,cccfran/sympy,Designist/sympy,cswiercz/sympy,skidzo/sympy,kumarkrishna/sympy,Gadal/sympy,cswiercz/sympy,emon10005/sympy,ahhda/sympy,AkademieOlympia/sympy,mcdaniel67/sympy,ChristinaZografou/sympy,drufat/sympy,rahuldan/sympy,toolforger/sympy,Davidjohnwilson/sympy,rahuldan/sympy,wanglongqi/sympy,souravsingh/sympy,AkademieOlympia/sympy,MechCoder/sympy,kaushik94/sympy,kevalds51/sympy,AunShiLord/sympy,sampadsaha5/sympy,wyom/sympy,asm666/sympy,jbbskinny/sympy,kumarkrishna/sympy,shikil/sympy,sunny94/temp,dqnykamp/sympy,lidavidm/sympy,madan96/sympy,ahhda/sympy,AunShiLord/sympy,bukzor/sympy,saurabhjn76/sympy,MridulS/sympy,vipulroxx/sympy,shipci/sympy,Vishluck/sympy,Vishluck/sympy,ChristinaZografou/sympy,jamesblunt/sympy,hrashk/sympy,jerli/sympy,abloomston/sympy,oliverlee/sympy,Sumith1896/sympy,emon10005/sympy,farhaanbukhsh/sympy,sunny94/temp,abloomston/sympy,moble/sympy,madan96/sympy,grevutiu-gabriel/sympy,AunShiLord/sympy,cccfran/sympy,oliverlee/sympy,kevalds51/sympy,iamutkarshtiwari/sympy,shipci/sympy,souravsingh/sympy,farhaanbukhsh/sympy,vipulroxx/sympy,Gadal/sympy,hrashk/sympy,maniteja123/sympy,sahmed95/sympy,kmacinnis/sympy,jbbskinny/sympy,Titan-C/sympy,cccfran/sympy,VaibhavAgarwalVA/sympy,asm666/sympy,rahuldan/sympy,postvakje/sympy,emon10005/sympy,diofant/diofant,yukoba/sympy,aktech/sympy,sahilshekhawat/sympy,drufat/sympy,pandeyadarsh/sympy,MechCoder/sympy,Designist/sympy,MridulS/sympy,aktech/sympy,grevutiu-gabriel/sympy,garvitr/sympy,meghana1995/sympy,Gadal/sympy,moble/sympy,Curious72/sympy,jamesblunt/sympy,kmacinnis/sympy,lindsayad/sympy,postvakje/sympy,atsao72/sympy,iamutkarshtiwari/sympy,debugger22/sympy,toolforger/sympy,hargup/sympy,hargup/sympy,amitjamadagni/sympy,ga7g08/sympy,atreyv/sympy,Shaswat27/sympy,hargup/sympy,Arafatk/sympy,mcdaniel67/sympy,skirpichev/omg,pbrady/sympy,jerli/sympy,Mitchkoens/sympy,liangjiaxing/sympy,dqnykamp/sympy,sahilshekhawat/sympy,beni55/sympy,mafiya69/sympy,MridulS/sympy,Shaswat27/sympy,Arafatk/sympy,souravsingh/sympy,dqnykamp/sympy,jaimahajan1997/sympy,oliverlee/sympy,shipci/sympy,lindsayad/sympy,kaushik94/sympy,abloomston/sympy,atsao72/sympy,wanglongqi/sympy,kaushik94/sympy,toolforger/sympy,shikil/sympy,postvakje/sympy,Arafatk/sympy,VaibhavAgarwalVA/sympy,yukoba/sympy,maniteja123/sympy,beni55/sympy,yashsharan/sympy,mcdaniel67/sympy,pbrady/sympy,yashsharan/sympy,VaibhavAgarwalVA/sympy,farhaanbukhsh/sympy,wyom/sympy,jbbskinny/sympy,vipulroxx/sympy,lidavidm/sympy,iamutkarshtiwari/sympy,atreyv/sympy,garvitr/sympy,bukzor/sympy,kaichogami/sympy,AkademieOlympia/sympy,bukzor/sympy,chaffra/sympy,asm666/sympy,garvitr/sympy,grevutiu-gabriel/sympy,Titan-C/sympy,kmacinnis/sympy,aktech/sympy,madan96/sympy,abhiii5459/sympy,chaffra/sympy,drufat/sympy,saurabhjn76/sympy,pbrady/sympy,sampadsaha5/sympy,wyom/sympy,shikil/sympy,ChristinaZografou/sympy,pandeyadarsh/sympy,debugger22/sympy,kaichogami/sympy,sahmed95/sympy,kevalds51/sympy,sahilshekhawat/sympy,mafiya69/sympy"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\"\"\"Tests that the IPython printing module is properly loaded. \"\"\"\n\nfrom sympy.interactive.session import init_ipython_session\nfrom sympy.external import import_module\n\nipython = import_module(\"IPython\", min_module_version=\"0.11\")\n\n# disable tests if ipython is not present\nif not ipython:\n disabled = True\n\ndef test_ipythonprinting():\n # Initialize and setup IPython session\n app = init_ipython_session()\n app.run_cell(\"from IPython.core.interactiveshell import InteractiveShell\")\n app.run_cell(\"inst = InteractiveShell.instance()\")\n app.run_cell(\"format = inst.display_formatter.format\")\n app.run_cell(\"from sympy import Symbol\")\n\n # Printing without printing extension\n app.run_cell(\"a = format(Symbol('pi'))\")\n assert app.user_ns['a']['text/plain'] == \"pi\"\n\n # Load printing extension\n app.run_cell(\"%load_ext sympy.interactive.ipythonprinting\")\n # Printing with printing extension\n app.run_cell(\"a = format(Symbol('pi'))\")\n assert app.user_ns['a']['text/plain'] == u'\\u03c0'\n\n## Instruction:\nMake ipythonprinting test more robust\n\n## Code After:\n\"\"\"Tests that the IPython printing module is properly loaded. \"\"\"\n\nfrom sympy.interactive.session import init_ipython_session\nfrom sympy.external import import_module\n\nipython = import_module(\"IPython\", min_module_version=\"0.11\")\n\n# disable tests if ipython is not present\nif not ipython:\n disabled = True\n\ndef test_ipythonprinting():\n # Initialize and setup IPython session\n app = init_ipython_session()\n app.run_cell(\"ip = get_ipython()\")\n app.run_cell(\"inst = ip.instance()\")\n app.run_cell(\"format = inst.display_formatter.format\")\n app.run_cell(\"from sympy import Symbol\")\n\n # Printing without printing extension\n app.run_cell(\"a = format(Symbol('pi'))\")\n assert app.user_ns['a']['text/plain'] == \"pi\"\n\n # Load printing extension\n app.run_cell(\"%load_ext sympy.interactive.ipythonprinting\")\n # Printing with printing extension\n app.run_cell(\"a = format(Symbol('pi'))\")\n assert app.user_ns['a']['text/plain'] == u'\\u03c0'\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\ndef test_ipythonprinting():\n # Initialize and setup IPython session\n app = init_ipython_session()\n app.run_cell(\"ip = get_ipython()\")\n app.run_cell(\"inst = ip.instance()\")\n app.run_cell(\"format = inst.display_formatter.format\")\n app.run_cell(\"from sympy import Symbol\")\n\n\n\n ... "}}},{"rowIdx":2167,"cells":{"commit":{"kind":"string","value":"1e66aba5a2c82b09a6485842948aad49c654efb4"},"old_file":{"kind":"string","value":"scripts/load_topics_to_mongodb.py"},"new_file":{"kind":"string","value":"scripts/load_topics_to_mongodb.py"},"old_contents":{"kind":"string","value":"import os\nimport csv\nfrom pymongo import MongoClient\n\nprint('Parsing topics')\ntopics = {}\nwith open('topics.csv', 'rb') as csvfile:\n reader = csv.reader(csvfile)\n for line in reader:\n if line[0] == 1:\n continue\n topics[line[0]] = line[1:]\n\nprint('Connecting to MongoDB')\nmongodb_client = MongoClient(os.environ['MONGODB_URL'])\ndb = mongodb_client.tvrain\narticles = db.articles\n\nfor article in topics:\n articles.update({'_id': article}, {'$set': {\n 'topics': topics[article]\n }})\n"},"new_contents":{"kind":"string","value":"import os\nimport sys\nimport csv\nfrom pymongo import MongoClient\n\nprint('Parsing topics')\ntopics = {}\nwith open(sys.argv[1], 'r') as csvfile:\n reader = csv.reader(csvfile)\n for line in reader:\n if line[0] == 1:\n continue\n topics[line[0]] = line[1:]\n\nprint('Connecting to MongoDB')\nmongodb_client = MongoClient(os.environ['MONGODB_URL'])\ndb = mongodb_client.tvrain\narticles = db.articles\n\nfor article in topics:\n articles.update({'_id': article}, {'$set': {\n 'topics': topics[article]\n }})\n"},"subject":{"kind":"string","value":"Fix script for loading topics into mongodb"},"message":{"kind":"string","value":"Fix script for loading topics into mongodb"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"xenx/recommendation_system,xenx/recommendation_system"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport os\nimport csv\nfrom pymongo import MongoClient\n\nprint('Parsing topics')\ntopics = {}\nwith open('topics.csv', 'rb') as csvfile:\n reader = csv.reader(csvfile)\n for line in reader:\n if line[0] == 1:\n continue\n topics[line[0]] = line[1:]\n\nprint('Connecting to MongoDB')\nmongodb_client = MongoClient(os.environ['MONGODB_URL'])\ndb = mongodb_client.tvrain\narticles = db.articles\n\nfor article in topics:\n articles.update({'_id': article}, {'$set': {\n 'topics': topics[article]\n }})\n\n## Instruction:\nFix script for loading topics into mongodb\n## Code After:\nimport os\nimport sys\nimport csv\nfrom pymongo import MongoClient\n\nprint('Parsing topics')\ntopics = {}\nwith open(sys.argv[1], 'r') as csvfile:\n reader = csv.reader(csvfile)\n for line in reader:\n if line[0] == 1:\n continue\n topics[line[0]] = line[1:]\n\nprint('Connecting to MongoDB')\nmongodb_client = MongoClient(os.environ['MONGODB_URL'])\ndb = mongodb_client.tvrain\narticles = db.articles\n\nfor article in topics:\n articles.update({'_id': article}, {'$set': {\n 'topics': topics[article]\n }})\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport os\nimport sys\nimport csv\nfrom pymongo import MongoClient\n\nprint('Parsing topics')\ntopics = {}\nwith open(sys.argv[1], 'r') as csvfile:\n reader = csv.reader(csvfile)\n for line in reader:\n if line[0] == 1:\n\n\n// ... rest of the code ..."}}},{"rowIdx":2168,"cells":{"commit":{"kind":"string","value":"9e1eae8b5d63a046b0dbfdb738419abd2a8edd69"},"old_file":{"kind":"string","value":"setup.py"},"new_file":{"kind":"string","value":"setup.py"},"old_contents":{"kind":"string","value":"from os import path\n\nfrom setuptools import setup, find_packages\n\n\nhere = path.abspath(path.dirname(__file__))\n\nwith open(path.join(here, 'README.md'), encoding='utf-8') as f:\n long_description = f.read()\n\n\ninstall_requires = [\n 'tornado',\n 'pyserial'\n]\n\nsetup_requires = [\n 'pytest-runner'\n]\n\ntests_require = [\n 'pytest',\n 'coverage',\n 'pytest-cov'\n]\n\nextras_require = {\n 'tests': tests_require,\n 'all': install_requires + tests_require\n}\n\nsetup(\n name=\"dusty-acorn\",\n version=\"2.0\",\n description=\"Air Quality monitoring web application\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/niwa/dusty-acorn\",\n packages=find_packages(),\n python_requires='>=3.7',\n install_requires=install_requires,\n setup_requires=setup_requires,\n tests_require=tests_require,\n extras_require=extras_require,\n entry_points={\n 'console_scripts': [\n 'dusty-acorn=dusty_acorn:main'\n ]\n }\n)\n"},"new_contents":{"kind":"string","value":"from os import path\n\nfrom setuptools import setup, find_packages\n\nhere = path.abspath(path.dirname(__file__))\n\nwith open(path.join(here, 'README.md'), encoding='utf-8') as f:\n long_description = f.read()\n\ninstall_requires = [\n 'tornado',\n 'pyserial'\n]\n\nsetup_requires = [\n 'pytest-runner'\n]\n\ntests_require = [\n 'pytest',\n 'coverage',\n 'pytest-cov'\n]\n\nextras_require = {\n 'tests': tests_require,\n 'all': install_requires + tests_require\n}\n\nsetup(\n name=\"dusty-acorn\",\n version=\"2.0\",\n description=\"Air Quality monitoring web application\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/niwa/dusty-acorn\",\n packages=find_packages(),\n # find . -name \"*.*\" -exec sh -c 'echo \"${0##*.}\"' {} \\; | sort | uniq\n package_data={\n '': [\n '*.css',\n '*.eot',\n '*.html',\n '*.jpg',\n '*.js',\n '*.json',\n '*.mp3',\n '*.mp4',\n '*.ods',\n '*.otf',\n '*.png',\n '*.svg',\n '*.ttf',\n '*.woff',\n '*.woff2'\n ],\n },\n python_requires='>=3.7',\n install_requires=install_requires,\n setup_requires=setup_requires,\n tests_require=tests_require,\n extras_require=extras_require,\n entry_points={\n 'console_scripts': [\n 'dusty-acorn=dusty_acorn:main'\n ]\n }\n)\n"},"subject":{"kind":"string","value":"Include non-py files in the final package too"},"message":{"kind":"string","value":"Include non-py files in the final package too\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"guolivar/dusty-acorn,guolivar/dusty-acorn,guolivar/dusty-acorn,guolivar/dusty-acorn"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom os import path\n\nfrom setuptools import setup, find_packages\n\n\nhere = path.abspath(path.dirname(__file__))\n\nwith open(path.join(here, 'README.md'), encoding='utf-8') as f:\n long_description = f.read()\n\n\ninstall_requires = [\n 'tornado',\n 'pyserial'\n]\n\nsetup_requires = [\n 'pytest-runner'\n]\n\ntests_require = [\n 'pytest',\n 'coverage',\n 'pytest-cov'\n]\n\nextras_require = {\n 'tests': tests_require,\n 'all': install_requires + tests_require\n}\n\nsetup(\n name=\"dusty-acorn\",\n version=\"2.0\",\n description=\"Air Quality monitoring web application\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/niwa/dusty-acorn\",\n packages=find_packages(),\n python_requires='>=3.7',\n install_requires=install_requires,\n setup_requires=setup_requires,\n tests_require=tests_require,\n extras_require=extras_require,\n entry_points={\n 'console_scripts': [\n 'dusty-acorn=dusty_acorn:main'\n ]\n }\n)\n\n## Instruction:\nInclude non-py files in the final package too\n\n## Code After:\nfrom os import path\n\nfrom setuptools import setup, find_packages\n\nhere = path.abspath(path.dirname(__file__))\n\nwith open(path.join(here, 'README.md'), encoding='utf-8') as f:\n long_description = f.read()\n\ninstall_requires = [\n 'tornado',\n 'pyserial'\n]\n\nsetup_requires = [\n 'pytest-runner'\n]\n\ntests_require = [\n 'pytest',\n 'coverage',\n 'pytest-cov'\n]\n\nextras_require = {\n 'tests': tests_require,\n 'all': install_requires + tests_require\n}\n\nsetup(\n name=\"dusty-acorn\",\n version=\"2.0\",\n description=\"Air Quality monitoring web application\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/niwa/dusty-acorn\",\n packages=find_packages(),\n # find . -name \"*.*\" -exec sh -c 'echo \"${0##*.}\"' {} \\; | sort | uniq\n package_data={\n '': [\n '*.css',\n '*.eot',\n '*.html',\n '*.jpg',\n '*.js',\n '*.json',\n '*.mp3',\n '*.mp4',\n '*.ods',\n '*.otf',\n '*.png',\n '*.svg',\n '*.ttf',\n '*.woff',\n '*.woff2'\n ],\n },\n python_requires='>=3.7',\n install_requires=install_requires,\n setup_requires=setup_requires,\n tests_require=tests_require,\n extras_require=extras_require,\n entry_points={\n 'console_scripts': [\n 'dusty-acorn=dusty_acorn:main'\n ]\n }\n)\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nfrom os import path\n\nfrom setuptools import setup, find_packages\n\nhere = path.abspath(path.dirname(__file__))\n\nwith open(path.join(here, 'README.md'), encoding='utf-8') as f:\n long_description = f.read()\n\ninstall_requires = [\n 'tornado',\n\n\n// ... modified code ... \n\n\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/niwa/dusty-acorn\",\n packages=find_packages(),\n # find . -name \"*.*\" -exec sh -c 'echo \"${0##*.}\"' {} \\; | sort | uniq\n package_data={\n '': [\n '*.css',\n '*.eot',\n '*.html',\n '*.jpg',\n '*.js',\n '*.json',\n '*.mp3',\n '*.mp4',\n '*.ods',\n '*.otf',\n '*.png',\n '*.svg',\n '*.ttf',\n '*.woff',\n '*.woff2'\n ],\n },\n python_requires='>=3.7',\n install_requires=install_requires,\n setup_requires=setup_requires,\n\n\n// ... rest of the code ..."}}},{"rowIdx":2169,"cells":{"commit":{"kind":"string","value":"c76734ea034f2a48de0eab995c5db5667086e0c8"},"old_file":{"kind":"string","value":"common/util/log.py"},"new_file":{"kind":"string","value":"common/util/log.py"},"old_contents":{"kind":"string","value":"import sublime\n\n\ndef universal_newlines(string):\n return string.replace('\\r\\n', '\\n').replace('\\r', '\\n')\n\n\ndef panel(message, run_async=True):\n message = universal_newlines(str(message))\n view = sublime.active_window().active_view()\n if run_async:\n sublime.set_timeout_async(\n lambda: view.run_command(\"gs_display_panel\", {\"msg\": message})\n )\n else:\n view.run_command(\"gs_display_panel\", {\"msg\": message})\n\n\ndef panel_append(message, run_async=True):\n message = universal_newlines(str(message))\n view = sublime.active_window().active_view()\n if run_async:\n sublime.set_timeout_async(\n lambda: view.run_command(\"gs_append_panel\", {\"msg\": message})\n )\n else:\n view.run_command(\"gs_append_panel\", {\"msg\": message})\n"},"new_contents":{"kind":"string","value":"import re\nimport sublime\n\n\nANSI_ESCAPE_RE = re.compile(r'\\x1B\\[[0-?]*[ -/]*[@-~]')\n\n\ndef normalize(string):\n return ANSI_ESCAPE_RE.sub('', string.replace('\\r\\n', '\\n').replace('\\r', '\\n'))\n\n\ndef panel(message, run_async=True):\n message = normalize(str(message))\n view = sublime.active_window().active_view()\n if run_async:\n sublime.set_timeout_async(\n lambda: view.run_command(\"gs_display_panel\", {\"msg\": message})\n )\n else:\n view.run_command(\"gs_display_panel\", {\"msg\": message})\n\n\ndef panel_append(message, run_async=True):\n message = normalize(str(message))\n view = sublime.active_window().active_view()\n if run_async:\n sublime.set_timeout_async(\n lambda: view.run_command(\"gs_append_panel\", {\"msg\": message})\n )\n else:\n view.run_command(\"gs_append_panel\", {\"msg\": message})\n"},"subject":{"kind":"string","value":"Remove ANSI escape sequences from panel output"},"message":{"kind":"string","value":"Remove ANSI escape sequences from panel output\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"divmain/GitSavvy,divmain/GitSavvy,divmain/GitSavvy"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport sublime\n\n\ndef universal_newlines(string):\n return string.replace('\\r\\n', '\\n').replace('\\r', '\\n')\n\n\ndef panel(message, run_async=True):\n message = universal_newlines(str(message))\n view = sublime.active_window().active_view()\n if run_async:\n sublime.set_timeout_async(\n lambda: view.run_command(\"gs_display_panel\", {\"msg\": message})\n )\n else:\n view.run_command(\"gs_display_panel\", {\"msg\": message})\n\n\ndef panel_append(message, run_async=True):\n message = universal_newlines(str(message))\n view = sublime.active_window().active_view()\n if run_async:\n sublime.set_timeout_async(\n lambda: view.run_command(\"gs_append_panel\", {\"msg\": message})\n )\n else:\n view.run_command(\"gs_append_panel\", {\"msg\": message})\n\n## Instruction:\nRemove ANSI escape sequences from panel output\n\n## Code After:\nimport re\nimport sublime\n\n\nANSI_ESCAPE_RE = re.compile(r'\\x1B\\[[0-?]*[ -/]*[@-~]')\n\n\ndef normalize(string):\n return ANSI_ESCAPE_RE.sub('', string.replace('\\r\\n', '\\n').replace('\\r', '\\n'))\n\n\ndef panel(message, run_async=True):\n message = normalize(str(message))\n view = sublime.active_window().active_view()\n if run_async:\n sublime.set_timeout_async(\n lambda: view.run_command(\"gs_display_panel\", {\"msg\": message})\n )\n else:\n view.run_command(\"gs_display_panel\", {\"msg\": message})\n\n\ndef panel_append(message, run_async=True):\n message = normalize(str(message))\n view = sublime.active_window().active_view()\n if run_async:\n sublime.set_timeout_async(\n lambda: view.run_command(\"gs_append_panel\", {\"msg\": message})\n )\n else:\n view.run_command(\"gs_append_panel\", {\"msg\": message})\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport re\nimport sublime\n\n\nANSI_ESCAPE_RE = re.compile(r'\\x1B\\[[0-?]*[ -/]*[@-~]')\n\n\ndef normalize(string):\n return ANSI_ESCAPE_RE.sub('', string.replace('\\r\\n', '\\n').replace('\\r', '\\n'))\n\n\ndef panel(message, run_async=True):\n message = normalize(str(message))\n view = sublime.active_window().active_view()\n if run_async:\n sublime.set_timeout_async(\n\n\n// ... modified code ... \n\n\n\n\ndef panel_append(message, run_async=True):\n message = normalize(str(message))\n view = sublime.active_window().active_view()\n if run_async:\n sublime.set_timeout_async(\n\n\n// ... rest of the code ..."}}},{"rowIdx":2170,"cells":{"commit":{"kind":"string","value":"266027514c740c30c0efae5fcd1e2932f1be9933"},"old_file":{"kind":"string","value":"perfrunner/tests/ycsb2.py"},"new_file":{"kind":"string","value":"perfrunner/tests/ycsb2.py"},"old_contents":{"kind":"string","value":"from perfrunner.helpers.cbmonitor import with_stats\nfrom perfrunner.helpers.local import clone_ycsb\nfrom perfrunner.helpers.worker import ycsb_data_load_task, ycsb_task\nfrom perfrunner.tests import PerfTest\nfrom perfrunner.tests.n1ql import N1QLTest\n\n\nclass YCSBTest(PerfTest):\n\n def download_ycsb(self):\n clone_ycsb(repo=self.test_config.ycsb_settings.repo,\n branch=self.test_config.ycsb_settings.branch)\n\n def load(self, *args, **kwargs):\n PerfTest.load(self, task=ycsb_data_load_task)\n self.check_num_items()\n\n @with_stats\n def access(self, *args, **kwargs):\n PerfTest.access(self, task=ycsb_task)\n\n def _report_kpi(self):\n self.reporter.post_to_sf(\n self.metric_helper.parse_ycsb_throughput()\n )\n\n def run(self):\n self.download_ycsb()\n\n self.load()\n self.wait_for_persistence()\n\n self.access()\n\n self.report_kpi()\n\n\nclass YCSBN1QLTest(YCSBTest, N1QLTest):\n\n def run(self):\n self.download_ycsb()\n\n self.load()\n self.wait_for_persistence()\n\n self.build_index()\n\n self.access()\n\n self.report_kpi()\n"},"new_contents":{"kind":"string","value":"from perfrunner.helpers.cbmonitor import with_stats\nfrom perfrunner.helpers.local import clone_ycsb\nfrom perfrunner.helpers.worker import ycsb_data_load_task, ycsb_task\nfrom perfrunner.tests import PerfTest\nfrom perfrunner.tests.n1ql import N1QLTest\n\n\nclass YCSBTest(PerfTest):\n\n def download_ycsb(self):\n clone_ycsb(repo=self.test_config.ycsb_settings.repo,\n branch=self.test_config.ycsb_settings.branch)\n\n def load(self, *args, **kwargs):\n PerfTest.load(self, task=ycsb_data_load_task)\n\n @with_stats\n def access(self, *args, **kwargs):\n PerfTest.access(self, task=ycsb_task)\n\n def _report_kpi(self):\n self.reporter.post_to_sf(\n self.metric_helper.parse_ycsb_throughput()\n )\n\n def run(self):\n self.download_ycsb()\n\n self.load()\n self.wait_for_persistence()\n self.check_num_items()\n\n self.access()\n\n self.report_kpi()\n\n\nclass YCSBN1QLTest(YCSBTest, N1QLTest):\n\n def run(self):\n self.download_ycsb()\n\n self.load()\n self.wait_for_persistence()\n self.check_num_items()\n\n self.build_index()\n\n self.access()\n\n self.report_kpi()\n"},"subject":{"kind":"string","value":"Check the number of items a little bit later"},"message":{"kind":"string","value":"Check the number of items a little bit later\n\nDue to MB-22749\n\nChange-Id: Icffe46201223efa5645644ca40b99dffe4f0fb31\nReviewed-on: http://review.couchbase.org/76413\nTested-by: Build Bot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>\nReviewed-by: Pavel Paulau \n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom perfrunner.helpers.cbmonitor import with_stats\nfrom perfrunner.helpers.local import clone_ycsb\nfrom perfrunner.helpers.worker import ycsb_data_load_task, ycsb_task\nfrom perfrunner.tests import PerfTest\nfrom perfrunner.tests.n1ql import N1QLTest\n\n\nclass YCSBTest(PerfTest):\n\n def download_ycsb(self):\n clone_ycsb(repo=self.test_config.ycsb_settings.repo,\n branch=self.test_config.ycsb_settings.branch)\n\n def load(self, *args, **kwargs):\n PerfTest.load(self, task=ycsb_data_load_task)\n self.check_num_items()\n\n @with_stats\n def access(self, *args, **kwargs):\n PerfTest.access(self, task=ycsb_task)\n\n def _report_kpi(self):\n self.reporter.post_to_sf(\n self.metric_helper.parse_ycsb_throughput()\n )\n\n def run(self):\n self.download_ycsb()\n\n self.load()\n self.wait_for_persistence()\n\n self.access()\n\n self.report_kpi()\n\n\nclass YCSBN1QLTest(YCSBTest, N1QLTest):\n\n def run(self):\n self.download_ycsb()\n\n self.load()\n self.wait_for_persistence()\n\n self.build_index()\n\n self.access()\n\n self.report_kpi()\n\n## Instruction:\nCheck the number of items a little bit later\n\nDue to MB-22749\n\nChange-Id: Icffe46201223efa5645644ca40b99dffe4f0fb31\nReviewed-on: http://review.couchbase.org/76413\nTested-by: Build Bot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>\nReviewed-by: Pavel Paulau \n\n## Code After:\nfrom perfrunner.helpers.cbmonitor import with_stats\nfrom perfrunner.helpers.local import clone_ycsb\nfrom perfrunner.helpers.worker import ycsb_data_load_task, ycsb_task\nfrom perfrunner.tests import PerfTest\nfrom perfrunner.tests.n1ql import N1QLTest\n\n\nclass YCSBTest(PerfTest):\n\n def download_ycsb(self):\n clone_ycsb(repo=self.test_config.ycsb_settings.repo,\n branch=self.test_config.ycsb_settings.branch)\n\n def load(self, *args, **kwargs):\n PerfTest.load(self, task=ycsb_data_load_task)\n\n @with_stats\n def access(self, *args, **kwargs):\n PerfTest.access(self, task=ycsb_task)\n\n def _report_kpi(self):\n self.reporter.post_to_sf(\n self.metric_helper.parse_ycsb_throughput()\n )\n\n def run(self):\n self.download_ycsb()\n\n self.load()\n self.wait_for_persistence()\n self.check_num_items()\n\n self.access()\n\n self.report_kpi()\n\n\nclass YCSBN1QLTest(YCSBTest, N1QLTest):\n\n def run(self):\n self.download_ycsb()\n\n self.load()\n self.wait_for_persistence()\n self.check_num_items()\n\n self.build_index()\n\n self.access()\n\n self.report_kpi()\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\n def load(self, *args, **kwargs):\n PerfTest.load(self, task=ycsb_data_load_task)\n\n @with_stats\n def access(self, *args, **kwargs):\n\n\n# ... modified code ... \n\n\n\n self.load()\n self.wait_for_persistence()\n self.check_num_items()\n\n self.access()\n\n\n\n ... \n\n\n\n self.load()\n self.wait_for_persistence()\n self.check_num_items()\n\n self.build_index()\n\n\n\n# ... rest of the code ..."}}},{"rowIdx":2171,"cells":{"commit":{"kind":"string","value":"5db82bfd52cdc4778384b1291b35d1d8791b3046"},"old_file":{"kind":"string","value":"app/src/main/java/com/pr0gramm/app/util/Affiliate.kt"},"new_file":{"kind":"string","value":"app/src/main/java/com/pr0gramm/app/util/Affiliate.kt"},"old_contents":{"kind":"string","value":"package com.pr0gramm.app.util\n\nimport android.net.Uri\n\nobject Affiliate {\n private val paramsHubTraffic = mapOf(\n \"utm_source\" to \"paid\",\n \"utm_medium\" to \"hubtraffic\",\n \"utm_campaign\" to \"hubtraffic_pr0grammapp\")\n\n private val reHubTraffic = \"(?:pornhub|redtube|tube8|youporn|xtube|spankwire|keezmovies|extremetube)\\\\.com\".toRegex()\n\n private fun hubTraffic(url: String): String {\n val uri = Uri.parse(url)\n\n val updated = uri.buildUpon().clearQuery()\n\n for (name in uri.queryParameterNames - paramsHubTraffic.keys) {\n val value = uri.getQueryParameter(name) ?: continue\n updated.appendQueryParameter(name, value)\n }\n\n for ((name, value) in paramsHubTraffic) {\n updated.appendQueryParameter(name, value)\n }\n\n return updated.build().toString()\n }\n\n fun get(url: String): String? {\n return when {\n reHubTraffic.containsMatchIn(url) -> hubTraffic(url)\n\n // no affiliate url\n else -> null\n }\n }\n}"},"new_contents":{"kind":"string","value":"package com.pr0gramm.app.util\n\nimport android.util.Base64\n\nobject Affiliate {\n private val paramsHubTraffic = mapOf(\n \"utm_source\" to \"paid\",\n \"utm_medium\" to \"hubtraffic\",\n \"utm_campaign\" to \"hubtraffic_pr0grammapp\")\n\n private val reHubTraffic = \"(?:pornhub|redtube|tube8|youporn|xtube|spankwire|keezmovies|extremetube)\\\\.com\".toRegex()\n\n private fun hubTraffic(url: String): String {\n val encoded = Base64.encodeToString(url.toByteArray(), Base64.URL_SAFE)\n return \"https://app.pr0gramm.com/redirect.html?u=$encoded\"\n }\n\n fun get(url: String): String? {\n return when {\n reHubTraffic.containsMatchIn(url) -> hubTraffic(url)\n\n // no affiliate url\n else -> null\n }\n }\n}"},"subject":{"kind":"string","value":"Fix hubtraffic by redirecting over a app.pr0gramm.com domain."},"message":{"kind":"string","value":"Fix hubtraffic by redirecting over a app.pr0gramm.com domain.\n"},"lang":{"kind":"string","value":"Kotlin"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"mopsalarm/Pr0,mopsalarm/Pr0,mopsalarm/Pr0"},"config":{"kind":"string","value":"kotlin"},"content":{"kind":"string","value":"## Code Before:\npackage com.pr0gramm.app.util\n\nimport android.net.Uri\n\nobject Affiliate {\n private val paramsHubTraffic = mapOf(\n \"utm_source\" to \"paid\",\n \"utm_medium\" to \"hubtraffic\",\n \"utm_campaign\" to \"hubtraffic_pr0grammapp\")\n\n private val reHubTraffic = \"(?:pornhub|redtube|tube8|youporn|xtube|spankwire|keezmovies|extremetube)\\\\.com\".toRegex()\n\n private fun hubTraffic(url: String): String {\n val uri = Uri.parse(url)\n\n val updated = uri.buildUpon().clearQuery()\n\n for (name in uri.queryParameterNames - paramsHubTraffic.keys) {\n val value = uri.getQueryParameter(name) ?: continue\n updated.appendQueryParameter(name, value)\n }\n\n for ((name, value) in paramsHubTraffic) {\n updated.appendQueryParameter(name, value)\n }\n\n return updated.build().toString()\n }\n\n fun get(url: String): String? {\n return when {\n reHubTraffic.containsMatchIn(url) -> hubTraffic(url)\n\n // no affiliate url\n else -> null\n }\n }\n}\n## Instruction:\nFix hubtraffic by redirecting over a app.pr0gramm.com domain.\n\n## Code After:\npackage com.pr0gramm.app.util\n\nimport android.util.Base64\n\nobject Affiliate {\n private val paramsHubTraffic = mapOf(\n \"utm_source\" to \"paid\",\n \"utm_medium\" to \"hubtraffic\",\n \"utm_campaign\" to \"hubtraffic_pr0grammapp\")\n\n private val reHubTraffic = \"(?:pornhub|redtube|tube8|youporn|xtube|spankwire|keezmovies|extremetube)\\\\.com\".toRegex()\n\n private fun hubTraffic(url: String): String {\n val encoded = Base64.encodeToString(url.toByteArray(), Base64.URL_SAFE)\n return \"https://app.pr0gramm.com/redirect.html?u=$encoded\"\n }\n\n fun get(url: String): String? {\n return when {\n reHubTraffic.containsMatchIn(url) -> hubTraffic(url)\n\n // no affiliate url\n else -> null\n }\n }\n}"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\npackage com.pr0gramm.app.util\n\nimport android.util.Base64\n\nobject Affiliate {\n private val paramsHubTraffic = mapOf(\n\n\n// ... modified code ... \n\n\n private val reHubTraffic = \"(?:pornhub|redtube|tube8|youporn|xtube|spankwire|keezmovies|extremetube)\\\\.com\".toRegex()\n\n private fun hubTraffic(url: String): String {\n val encoded = Base64.encodeToString(url.toByteArray(), Base64.URL_SAFE)\n return \"https://app.pr0gramm.com/redirect.html?u=$encoded\"\n }\n\n fun get(url: String): String? {\n\n\n// ... rest of the code ..."}}},{"rowIdx":2172,"cells":{"commit":{"kind":"string","value":"16b07dd961cbe55ee452ed6057048ec452ffbd72"},"old_file":{"kind":"string","value":"custom/icds/management/commands/copy_icds_app.py"},"new_file":{"kind":"string","value":"custom/icds/management/commands/copy_icds_app.py"},"old_contents":{"kind":"string","value":"from __future__ import absolute_import, print_function, unicode_literals\n\nfrom django.core.management import BaseCommand\n\nfrom corehq.apps.app_manager.dbaccessors import get_build_doc_by_version, wrap_app\nfrom corehq.apps.app_manager.models import import_app\n\n\nclass Command(BaseCommand):\n help = \"Make a copy of a specific version of an application on the same domain\"\n\n def add_arguments(self, parser):\n parser.add_argument('domain')\n parser.add_argument('app_id')\n parser.add_argument('version')\n parser.add_argument('new_name')\n\n def handle(self, domain, app_id, version, new_name, **options):\n old_app = get_build_doc_by_version(domain, app_id, version)\n if not old_app:\n raise Exception(\"No app found with id '{}' and version '{}', on '{}'\"\n .format(app_id, version, domain))\n old_app = wrap_app(old_app)\n old_app.convert_build_to_app()\n new_app = import_app(old_app.to_json(), domain, source_properties={'name': new_name})\n"},"new_contents":{"kind":"string","value":"from __future__ import absolute_import, print_function, unicode_literals\n\nfrom django.core.management import BaseCommand\n\nfrom corehq.apps.app_manager.dbaccessors import get_build_doc_by_version, wrap_app\nfrom corehq.apps.app_manager.models import import_app\n\n\nclass Command(BaseCommand):\n help = \"Make a copy of a specific version of an application on the same domain\"\n\n def add_arguments(self, parser):\n parser.add_argument('domain')\n parser.add_argument('app_id')\n parser.add_argument('version')\n parser.add_argument('new_name')\n\n def handle(self, domain, app_id, version, new_name, **options):\n old_app = get_build_doc_by_version(domain, app_id, version)\n if not old_app:\n raise Exception(\"No app found with id '{}' and version '{}', on '{}'\"\n .format(app_id, version, domain))\n old_app = wrap_app(old_app)\n old_app.convert_build_to_app()\n new_app = import_app(old_app.to_json(), domain, source_properties={'name': new_name})\n\n old_to_new = get_old_to_new_config_ids(old_app, new_app)\n for form in new_app.get_forms():\n for old_id, new_id in old_to_new:\n form.source = form.source.replace(old_id, new_id)\n\n new_app.save()\n\n\ndef get_old_to_new_config_ids(old_app, new_app):\n return [\n (old_config.uuid, new_config.uuid)\n for old_module, new_module in zip(old_app.get_report_modules(), new_app.get_report_modules())\n for old_config, new_config in zip(old_module.report_configs, new_module.report_configs)\n ]\n"},"subject":{"kind":"string","value":"Replace old config IDs with the new ones"},"message":{"kind":"string","value":"Replace old config IDs with the new ones\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom __future__ import absolute_import, print_function, unicode_literals\n\nfrom django.core.management import BaseCommand\n\nfrom corehq.apps.app_manager.dbaccessors import get_build_doc_by_version, wrap_app\nfrom corehq.apps.app_manager.models import import_app\n\n\nclass Command(BaseCommand):\n help = \"Make a copy of a specific version of an application on the same domain\"\n\n def add_arguments(self, parser):\n parser.add_argument('domain')\n parser.add_argument('app_id')\n parser.add_argument('version')\n parser.add_argument('new_name')\n\n def handle(self, domain, app_id, version, new_name, **options):\n old_app = get_build_doc_by_version(domain, app_id, version)\n if not old_app:\n raise Exception(\"No app found with id '{}' and version '{}', on '{}'\"\n .format(app_id, version, domain))\n old_app = wrap_app(old_app)\n old_app.convert_build_to_app()\n new_app = import_app(old_app.to_json(), domain, source_properties={'name': new_name})\n\n## Instruction:\nReplace old config IDs with the new ones\n\n## Code After:\nfrom __future__ import absolute_import, print_function, unicode_literals\n\nfrom django.core.management import BaseCommand\n\nfrom corehq.apps.app_manager.dbaccessors import get_build_doc_by_version, wrap_app\nfrom corehq.apps.app_manager.models import import_app\n\n\nclass Command(BaseCommand):\n help = \"Make a copy of a specific version of an application on the same domain\"\n\n def add_arguments(self, parser):\n parser.add_argument('domain')\n parser.add_argument('app_id')\n parser.add_argument('version')\n parser.add_argument('new_name')\n\n def handle(self, domain, app_id, version, new_name, **options):\n old_app = get_build_doc_by_version(domain, app_id, version)\n if not old_app:\n raise Exception(\"No app found with id '{}' and version '{}', on '{}'\"\n .format(app_id, version, domain))\n old_app = wrap_app(old_app)\n old_app.convert_build_to_app()\n new_app = import_app(old_app.to_json(), domain, source_properties={'name': new_name})\n\n old_to_new = get_old_to_new_config_ids(old_app, new_app)\n for form in new_app.get_forms():\n for old_id, new_id in old_to_new:\n form.source = form.source.replace(old_id, new_id)\n\n new_app.save()\n\n\ndef get_old_to_new_config_ids(old_app, new_app):\n return [\n (old_config.uuid, new_config.uuid)\n for old_module, new_module in zip(old_app.get_report_modules(), new_app.get_report_modules())\n for old_config, new_config in zip(old_module.report_configs, new_module.report_configs)\n ]\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n old_app = wrap_app(old_app)\n old_app.convert_build_to_app()\n new_app = import_app(old_app.to_json(), domain, source_properties={'name': new_name})\n\n old_to_new = get_old_to_new_config_ids(old_app, new_app)\n for form in new_app.get_forms():\n for old_id, new_id in old_to_new:\n form.source = form.source.replace(old_id, new_id)\n\n new_app.save()\n\n\ndef get_old_to_new_config_ids(old_app, new_app):\n return [\n (old_config.uuid, new_config.uuid)\n for old_module, new_module in zip(old_app.get_report_modules(), new_app.get_report_modules())\n for old_config, new_config in zip(old_module.report_configs, new_module.report_configs)\n ]\n\n\n# ... rest of the code ..."}}},{"rowIdx":2173,"cells":{"commit":{"kind":"string","value":"9808e97747785c27387ad1ce9ffc3e9a05c80f08"},"old_file":{"kind":"string","value":"enigma.py"},"new_file":{"kind":"string","value":"enigma.py"},"old_contents":{"kind":"string","value":"import string\n\n\nclass Steckerbrett:\n def __init__(self):\n pass\n\n\nclass Walzen:\n def __init__(self):\n pass\n\n\nclass Enigma:\n def __init__(self):\n pass\n\n def cipher(self, message):\n pass"},"new_contents":{"kind":"string","value":"import string\n\n\nclass Steckerbrett:\n def __init__(self):\n pass\n\n\nclass Umkehrwalze:\n def __init__(self, wiring):\n self.wiring = wiring\n\n def encode(self, letter):\n return self.wiring[string.ascii_uppercase.index(letter)]\n\n\nclass Walzen:\n def __init__(self):\n pass\n\n\nclass Enigma:\n def __init__(self):\n pass\n\n def cipher(self, message):\n pass"},"subject":{"kind":"string","value":"Create class for the reflectors"},"message":{"kind":"string","value":"Create class for the reflectors\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"ranisalt/enigma"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport string\n\n\nclass Steckerbrett:\n def __init__(self):\n pass\n\n\nclass Walzen:\n def __init__(self):\n pass\n\n\nclass Enigma:\n def __init__(self):\n pass\n\n def cipher(self, message):\n pass\n## Instruction:\nCreate class for the reflectors\n\n## Code After:\nimport string\n\n\nclass Steckerbrett:\n def __init__(self):\n pass\n\n\nclass Umkehrwalze:\n def __init__(self, wiring):\n self.wiring = wiring\n\n def encode(self, letter):\n return self.wiring[string.ascii_uppercase.index(letter)]\n\n\nclass Walzen:\n def __init__(self):\n pass\n\n\nclass Enigma:\n def __init__(self):\n pass\n\n def cipher(self, message):\n pass"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nclass Steckerbrett:\n def __init__(self):\n pass\n\n\nclass Umkehrwalze:\n def __init__(self, wiring):\n self.wiring = wiring\n\n def encode(self, letter):\n return self.wiring[string.ascii_uppercase.index(letter)]\n\n\nclass Walzen:\n\n\n// ... rest of the code ..."}}},{"rowIdx":2174,"cells":{"commit":{"kind":"string","value":"545f688f0dd59df009e2392cbf27ef06865a4b89"},"old_file":{"kind":"string","value":"src/azure/cli/__main__.py"},"new_file":{"kind":"string","value":"src/azure/cli/__main__.py"},"old_contents":{"kind":"string","value":"import sys\n\nimport azure.cli.main\n\nfrom azure.cli._telemetry import init_telemetry, user_agrees_to_telemetry, telemetry_flush\n\ntry:\n try:\n if user_agrees_to_telemetry():\n init_telemetry()\n except Exception: #pylint: disable=broad-except\n pass\n\n sys.exit(azure.cli.main.main(sys.argv[1:]))\nfinally:\n telemetry_flush()\n"},"new_contents":{"kind":"string","value":"import sys\nimport os\n\nimport azure.cli.main\n\nfrom azure.cli._telemetry import init_telemetry, user_agrees_to_telemetry, telemetry_flush\n\ntry:\n try:\n if user_agrees_to_telemetry():\n init_telemetry()\n except Exception: #pylint: disable=broad-except\n pass\n\n args = sys.argv[1:]\n \n # Check if we are in argcomplete mode - if so, we\n # need to pick up our args from environment variables\n if os.environ.get('_ARGCOMPLETE'):\n comp_line = os.environ.get('COMP_LINE')\n if comp_line:\n args = comp_line.split()[1:]\n \n sys.exit(azure.cli.main.main(args))\nfinally:\n telemetry_flush()\n"},"subject":{"kind":"string","value":"Speed up argument completions by not loading all command packages unless we have to..."},"message":{"kind":"string","value":"Speed up argument completions by not loading all command packages unless we have to...\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"yugangw-msft/azure-cli,BurtBiel/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,QingChenmsft/azure-cli,samedder/azure-cli,BurtBiel/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli,QingChenmsft/azure-cli,samedder/azure-cli,QingChenmsft/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport sys\n\nimport azure.cli.main\n\nfrom azure.cli._telemetry import init_telemetry, user_agrees_to_telemetry, telemetry_flush\n\ntry:\n try:\n if user_agrees_to_telemetry():\n init_telemetry()\n except Exception: #pylint: disable=broad-except\n pass\n\n sys.exit(azure.cli.main.main(sys.argv[1:]))\nfinally:\n telemetry_flush()\n\n## Instruction:\nSpeed up argument completions by not loading all command packages unless we have to...\n\n## Code After:\nimport sys\nimport os\n\nimport azure.cli.main\n\nfrom azure.cli._telemetry import init_telemetry, user_agrees_to_telemetry, telemetry_flush\n\ntry:\n try:\n if user_agrees_to_telemetry():\n init_telemetry()\n except Exception: #pylint: disable=broad-except\n pass\n\n args = sys.argv[1:]\n \n # Check if we are in argcomplete mode - if so, we\n # need to pick up our args from environment variables\n if os.environ.get('_ARGCOMPLETE'):\n comp_line = os.environ.get('COMP_LINE')\n if comp_line:\n args = comp_line.split()[1:]\n \n sys.exit(azure.cli.main.main(args))\nfinally:\n telemetry_flush()\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nimport sys\nimport os\n\nimport azure.cli.main\n\n\n\n# ... modified code ... \n\n\n except Exception: #pylint: disable=broad-except\n pass\n\n args = sys.argv[1:]\n \n # Check if we are in argcomplete mode - if so, we\n # need to pick up our args from environment variables\n if os.environ.get('_ARGCOMPLETE'):\n comp_line = os.environ.get('COMP_LINE')\n if comp_line:\n args = comp_line.split()[1:]\n \n sys.exit(azure.cli.main.main(args))\nfinally:\n telemetry_flush()\n\n\n# ... rest of the code ..."}}},{"rowIdx":2175,"cells":{"commit":{"kind":"string","value":"ae5b93c4e12f732a8c56de80b39f227c90ef4809"},"old_file":{"kind":"string","value":"polls/models.py"},"new_file":{"kind":"string","value":"polls/models.py"},"old_contents":{"kind":"string","value":"from django.db import models\n\n\nclass Question(models.Model):\n question_text = models.CharField(max_length=140)\n published_at = models.DateTimeField(auto_now_add=True)\n\n class Meta:\n get_latest_by = 'published_at'\n\n def __str__(self):\n return self.question_text\n\n\nclass Choice(models.Model):\n question = models.ForeignKey(Question, related_name='choices')\n choice_text = models.CharField(max_length=140)\n\n def __str__(self):\n return self.choice_text\n\n def vote(self):\n \"\"\"\n Create a vote on this choice.\n \"\"\"\n return Vote.objects.create(choice=self)\n\nclass Vote(models.Model):\n choice = models.ForeignKey(Choice, related_name='votes')\n"},"new_contents":{"kind":"string","value":"from django.db import models\n\n\nclass Question(models.Model):\n question_text = models.CharField(max_length=140)\n published_at = models.DateTimeField(auto_now_add=True)\n\n class Meta:\n get_latest_by = 'published_at'\n ordering = ('-published_at',)\n\n def __str__(self):\n return self.question_text\n\n\nclass Choice(models.Model):\n question = models.ForeignKey(Question, related_name='choices')\n choice_text = models.CharField(max_length=140)\n\n def __str__(self):\n return self.choice_text\n\n def vote(self):\n \"\"\"\n Create a vote on this choice.\n \"\"\"\n return Vote.objects.create(choice=self)\n\nclass Vote(models.Model):\n choice = models.ForeignKey(Choice, related_name='votes')\n"},"subject":{"kind":"string","value":"Order questions by published date"},"message":{"kind":"string","value":"Order questions by published date\n\nCloses #23\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"apiaryio/polls-api"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom django.db import models\n\n\nclass Question(models.Model):\n question_text = models.CharField(max_length=140)\n published_at = models.DateTimeField(auto_now_add=True)\n\n class Meta:\n get_latest_by = 'published_at'\n\n def __str__(self):\n return self.question_text\n\n\nclass Choice(models.Model):\n question = models.ForeignKey(Question, related_name='choices')\n choice_text = models.CharField(max_length=140)\n\n def __str__(self):\n return self.choice_text\n\n def vote(self):\n \"\"\"\n Create a vote on this choice.\n \"\"\"\n return Vote.objects.create(choice=self)\n\nclass Vote(models.Model):\n choice = models.ForeignKey(Choice, related_name='votes')\n\n## Instruction:\nOrder questions by published date\n\nCloses #23\n\n## Code After:\nfrom django.db import models\n\n\nclass Question(models.Model):\n question_text = models.CharField(max_length=140)\n published_at = models.DateTimeField(auto_now_add=True)\n\n class Meta:\n get_latest_by = 'published_at'\n ordering = ('-published_at',)\n\n def __str__(self):\n return self.question_text\n\n\nclass Choice(models.Model):\n question = models.ForeignKey(Question, related_name='choices')\n choice_text = models.CharField(max_length=140)\n\n def __str__(self):\n return self.choice_text\n\n def vote(self):\n \"\"\"\n Create a vote on this choice.\n \"\"\"\n return Vote.objects.create(choice=self)\n\nclass Vote(models.Model):\n choice = models.ForeignKey(Choice, related_name='votes')\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\n class Meta:\n get_latest_by = 'published_at'\n ordering = ('-published_at',)\n\n def __str__(self):\n return self.question_text\n\n\n// ... rest of the code ..."}}},{"rowIdx":2176,"cells":{"commit":{"kind":"string","value":"0b53adc34259fedc23e42e7576517fb62f4cb33e"},"old_file":{"kind":"string","value":"base_contact/models/ir_model.py"},"new_file":{"kind":"string","value":"base_contact/models/ir_model.py"},"old_contents":{"kind":"string","value":"\nimport logging\nfrom openerp import api, models\n\n_logger = logging.getLogger(__name__)\n\n\nclass IrModel(models.Model):\n _inherit = \"ir.model\"\n\n @api.cr\n def _register_hook(self, cr):\n \"\"\"Only warn in installed instances.\"\"\"\n _logger.warning(\"This module is DEPRECATED. See README.\")\n return super(IrModel, self)._register_hook(cr)\n"},"new_contents":{"kind":"string","value":"\nimport logging\nfrom openerp import api, models\n\n_logger = logging.getLogger(__name__)\n\n\nclass IrModel(models.Model):\n _inherit = \"ir.model\"\n\n @api.cr\n def _register_hook(self, cr):\n \"\"\"Only warn in installed instances.\"\"\"\n _logger.info(\"WARNING: This module is DEPRECATED. See README.\")\n return super(IrModel, self)._register_hook(cr)\n"},"subject":{"kind":"string","value":"Downgrade to INFO, since runbots install this."},"message":{"kind":"string","value":"Downgrade to INFO, since runbots install this.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"open-synergy/partner-contact,acsone/partner-contact,diagramsoftware/partner-contact"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nimport logging\nfrom openerp import api, models\n\n_logger = logging.getLogger(__name__)\n\n\nclass IrModel(models.Model):\n _inherit = \"ir.model\"\n\n @api.cr\n def _register_hook(self, cr):\n \"\"\"Only warn in installed instances.\"\"\"\n _logger.warning(\"This module is DEPRECATED. See README.\")\n return super(IrModel, self)._register_hook(cr)\n\n## Instruction:\nDowngrade to INFO, since runbots install this.\n\n## Code After:\n\nimport logging\nfrom openerp import api, models\n\n_logger = logging.getLogger(__name__)\n\n\nclass IrModel(models.Model):\n _inherit = \"ir.model\"\n\n @api.cr\n def _register_hook(self, cr):\n \"\"\"Only warn in installed instances.\"\"\"\n _logger.info(\"WARNING: This module is DEPRECATED. See README.\")\n return super(IrModel, self)._register_hook(cr)\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n @api.cr\n def _register_hook(self, cr):\n \"\"\"Only warn in installed instances.\"\"\"\n _logger.info(\"WARNING: This module is DEPRECATED. See README.\")\n return super(IrModel, self)._register_hook(cr)\n\n\n// ... rest of the code ..."}}},{"rowIdx":2177,"cells":{"commit":{"kind":"string","value":"cbc69077016885ebf2b481eebd2f11511c8184ce"},"old_file":{"kind":"string","value":"nbgrader/tests/apps/test_nbgrader.py"},"new_file":{"kind":"string","value":"nbgrader/tests/apps/test_nbgrader.py"},"old_contents":{"kind":"string","value":"import os\nimport sys\n\nfrom .. import run_nbgrader, run_command\nfrom .base import BaseTestApp\n\n\nclass TestNbGrader(BaseTestApp):\n\n def test_help(self):\n \"\"\"Does the help display without error?\"\"\"\n run_nbgrader([\"--help-all\"])\n\n def test_no_subapp(self):\n \"\"\"Is the help displayed when no subapp is given?\"\"\"\n run_nbgrader([], retcode=0)\n\n def test_check_version(self, capfd):\n \"\"\"Is the version the same regardless of how we run nbgrader?\"\"\"\n out1 = '\\n'.join(\n run_command([sys.executable, \"-m\", \"nbgrader\", \"--version\"]).splitlines()[-3:]\n ).strip()\n out2 = '\\n'.join(\n run_nbgrader([\"--version\"], stdout=True).splitlines()[-3:]\n ).strip()\n assert out1 == out2\n\n def test_logfile(self):\n # by default, there should be no logfile created\n files_before = set(os.listdir())\n run_nbgrader([])\n files_after = set(os.listdir())\n assert files_before == files_after\n\n # if we specify a logfile, it should get used\n run_nbgrader([\"--NbGrader.logfile=log.txt\"])\n assert os.path.exists(\"log.txt\")\n"},"new_contents":{"kind":"string","value":"import os\nimport sys\n\nfrom .. import run_nbgrader, run_command\nfrom .base import BaseTestApp\n\n\nclass TestNbGrader(BaseTestApp):\n\n def test_help(self):\n \"\"\"Does the help display without error?\"\"\"\n run_nbgrader([\"--help-all\"])\n\n def test_no_subapp(self):\n \"\"\"Is the help displayed when no subapp is given?\"\"\"\n run_nbgrader([], retcode=0)\n\n def test_check_version(self, capfd):\n \"\"\"Is the version the same regardless of how we run nbgrader?\"\"\"\n out1 = '\\n'.join(\n run_command([sys.executable, \"-m\", \"nbgrader\", \"--version\"]).splitlines()[-3:]\n ).strip()\n out2 = '\\n'.join(\n run_nbgrader([\"--version\"], stdout=True).splitlines()[-3:]\n ).strip()\n assert out1 == out2\n\n def test_logfile(self):\n # by default, there should be no logfile created\n cwd = os.getcwd()\n files_before = set(os.listdir(cwd))\n run_nbgrader([])\n files_after = set(os.listdir(cwd))\n assert files_before == files_after\n\n # if we specify a logfile, it should get used\n run_nbgrader([\"--NbGrader.logfile=log.txt\"])\n assert os.path.exists(\"log.txt\")\n"},"subject":{"kind":"string","value":"Include directory name for python 2 compatibility"},"message":{"kind":"string","value":"Include directory name for python 2 compatibility\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"jhamrick/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport os\nimport sys\n\nfrom .. import run_nbgrader, run_command\nfrom .base import BaseTestApp\n\n\nclass TestNbGrader(BaseTestApp):\n\n def test_help(self):\n \"\"\"Does the help display without error?\"\"\"\n run_nbgrader([\"--help-all\"])\n\n def test_no_subapp(self):\n \"\"\"Is the help displayed when no subapp is given?\"\"\"\n run_nbgrader([], retcode=0)\n\n def test_check_version(self, capfd):\n \"\"\"Is the version the same regardless of how we run nbgrader?\"\"\"\n out1 = '\\n'.join(\n run_command([sys.executable, \"-m\", \"nbgrader\", \"--version\"]).splitlines()[-3:]\n ).strip()\n out2 = '\\n'.join(\n run_nbgrader([\"--version\"], stdout=True).splitlines()[-3:]\n ).strip()\n assert out1 == out2\n\n def test_logfile(self):\n # by default, there should be no logfile created\n files_before = set(os.listdir())\n run_nbgrader([])\n files_after = set(os.listdir())\n assert files_before == files_after\n\n # if we specify a logfile, it should get used\n run_nbgrader([\"--NbGrader.logfile=log.txt\"])\n assert os.path.exists(\"log.txt\")\n\n## Instruction:\nInclude directory name for python 2 compatibility\n\n## Code After:\nimport os\nimport sys\n\nfrom .. import run_nbgrader, run_command\nfrom .base import BaseTestApp\n\n\nclass TestNbGrader(BaseTestApp):\n\n def test_help(self):\n \"\"\"Does the help display without error?\"\"\"\n run_nbgrader([\"--help-all\"])\n\n def test_no_subapp(self):\n \"\"\"Is the help displayed when no subapp is given?\"\"\"\n run_nbgrader([], retcode=0)\n\n def test_check_version(self, capfd):\n \"\"\"Is the version the same regardless of how we run nbgrader?\"\"\"\n out1 = '\\n'.join(\n run_command([sys.executable, \"-m\", \"nbgrader\", \"--version\"]).splitlines()[-3:]\n ).strip()\n out2 = '\\n'.join(\n run_nbgrader([\"--version\"], stdout=True).splitlines()[-3:]\n ).strip()\n assert out1 == out2\n\n def test_logfile(self):\n # by default, there should be no logfile created\n cwd = os.getcwd()\n files_before = set(os.listdir(cwd))\n run_nbgrader([])\n files_after = set(os.listdir(cwd))\n assert files_before == files_after\n\n # if we specify a logfile, it should get used\n run_nbgrader([\"--NbGrader.logfile=log.txt\"])\n assert os.path.exists(\"log.txt\")\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\n def test_logfile(self):\n # by default, there should be no logfile created\n cwd = os.getcwd()\n files_before = set(os.listdir(cwd))\n run_nbgrader([])\n files_after = set(os.listdir(cwd))\n assert files_before == files_after\n\n # if we specify a logfile, it should get used\n\n\n# ... rest of the code ..."}}},{"rowIdx":2178,"cells":{"commit":{"kind":"string","value":"0d572d60522ae0e80105330981a66bc541434b99"},"old_file":{"kind":"string","value":"rip/filter_operators.py"},"new_file":{"kind":"string","value":"rip/filter_operators.py"},"old_contents":{"kind":"string","value":"\nEQUALS = 'equals'\nGT = 'gt'\nLT = 'lt'\n\nOPERATOR_SEPARATOR = '__'\nREVERSE_ORDER = '-'\n\nALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1}\n\n\ndef split_to_field_and_filter_type(filter_name):\n filter_split = filter_name.split(OPERATOR_SEPARATOR)\n filter_type = filter_split[-1] if len(filter_split) > 0 else None\n\n if filter_type in ALL_OPERATORS:\n return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type\n else:\n return filter_name, None\n\n\ndef split_to_field_and_order_type(field_name_with_operator):\n if field_name_with_operator.startswith(REVERSE_ORDER):\n return field_name_with_operator[1:], REVERSE_ORDER\n else:\n return field_name_with_operator, None\n\n\ndef transform_to_list(val):\n if isinstance(val, (list, tuple)):\n return val\n else:\n return [val]"},"new_contents":{"kind":"string","value":"\nEQUALS = 'equals'\nGT = 'gt'\nLT = 'lt'\nIN = 'in'\n\nOPERATOR_SEPARATOR = '__'\nREVERSE_ORDER = '-'\n\nALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1, IN: 1}\n\n\ndef split_to_field_and_filter_type(filter_name):\n filter_split = filter_name.split(OPERATOR_SEPARATOR)\n filter_type = filter_split[-1] if len(filter_split) > 0 else None\n\n if filter_type in ALL_OPERATORS:\n return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type\n else:\n return filter_name, None\n\n\ndef split_to_field_and_order_type(field_name_with_operator):\n if field_name_with_operator.startswith(REVERSE_ORDER):\n return field_name_with_operator[1:], REVERSE_ORDER\n else:\n return field_name_with_operator, None\n\n\ndef transform_to_list(val):\n if isinstance(val, (list, tuple)):\n return val\n else:\n return [val]"},"subject":{"kind":"string","value":"Support __in as operator for backwards comp"},"message":{"kind":"string","value":"Support __in as operator for backwards comp\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"Aplopio/rip,Aplopio/django_rip"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nEQUALS = 'equals'\nGT = 'gt'\nLT = 'lt'\n\nOPERATOR_SEPARATOR = '__'\nREVERSE_ORDER = '-'\n\nALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1}\n\n\ndef split_to_field_and_filter_type(filter_name):\n filter_split = filter_name.split(OPERATOR_SEPARATOR)\n filter_type = filter_split[-1] if len(filter_split) > 0 else None\n\n if filter_type in ALL_OPERATORS:\n return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type\n else:\n return filter_name, None\n\n\ndef split_to_field_and_order_type(field_name_with_operator):\n if field_name_with_operator.startswith(REVERSE_ORDER):\n return field_name_with_operator[1:], REVERSE_ORDER\n else:\n return field_name_with_operator, None\n\n\ndef transform_to_list(val):\n if isinstance(val, (list, tuple)):\n return val\n else:\n return [val]\n## Instruction:\nSupport __in as operator for backwards comp\n\n## Code After:\n\nEQUALS = 'equals'\nGT = 'gt'\nLT = 'lt'\nIN = 'in'\n\nOPERATOR_SEPARATOR = '__'\nREVERSE_ORDER = '-'\n\nALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1, IN: 1}\n\n\ndef split_to_field_and_filter_type(filter_name):\n filter_split = filter_name.split(OPERATOR_SEPARATOR)\n filter_type = filter_split[-1] if len(filter_split) > 0 else None\n\n if filter_type in ALL_OPERATORS:\n return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type\n else:\n return filter_name, None\n\n\ndef split_to_field_and_order_type(field_name_with_operator):\n if field_name_with_operator.startswith(REVERSE_ORDER):\n return field_name_with_operator[1:], REVERSE_ORDER\n else:\n return field_name_with_operator, None\n\n\ndef transform_to_list(val):\n if isinstance(val, (list, tuple)):\n return val\n else:\n return [val]"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nEQUALS = 'equals'\nGT = 'gt'\nLT = 'lt'\nIN = 'in'\n\nOPERATOR_SEPARATOR = '__'\nREVERSE_ORDER = '-'\n\nALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1, IN: 1}\n\n\ndef split_to_field_and_filter_type(filter_name):\n\n\n ... "}}},{"rowIdx":2179,"cells":{"commit":{"kind":"string","value":"179930370aabb17284a5de081eb6db42ab3d2c54"},"old_file":{"kind":"string","value":"android/app/src/main/java/com/funnyhatsoftware/spacedock/activity/DetailsActivity.java"},"new_file":{"kind":"string","value":"android/app/src/main/java/com/funnyhatsoftware/spacedock/activity/DetailsActivity.java"},"old_contents":{"kind":"string","value":"package com.funnyhatsoftware.spacedock.activity;\n\nimport android.content.Context;\nimport android.content.Intent;\nimport android.support.v4.app.Fragment;\n\nimport com.funnyhatsoftware.spacedock.fragment.DetailsFragment;\n\npublic class DetailsActivity extends SinglePaneActivity {\n private static final String EXTRA_TYPE = \"browsetype\";\n private static final String EXTRA_ITEM = \"displayitem\";\n\n public static Intent getIntent(Context context, String itemType, String itemId) {\n if (itemType == null || itemId == null) {\n throw new IllegalArgumentException();\n }\n\n Intent intent = new Intent(context, DetailsActivity.class);\n intent.putExtra(EXTRA_TYPE, itemType);\n intent.putExtra(EXTRA_ITEM, itemId);\n return intent;\n }\n\n public Fragment getFragment() {\n String itemType = getIntent().getStringExtra(EXTRA_TYPE);\n String itemId = getIntent().getStringExtra(EXTRA_ITEM);\n return DetailsFragment.newInstance(itemType, itemId);\n }\n}\n"},"new_contents":{"kind":"string","value":"package com.funnyhatsoftware.spacedock.activity;\n\nimport android.content.Context;\nimport android.content.Intent;\nimport android.support.v4.app.Fragment;\n\nimport com.funnyhatsoftware.spacedock.fragment.DetailsFragment;\nimport com.funnyhatsoftware.spacedock.holder.ExpansionHolder;\n\npublic class DetailsActivity extends SinglePaneActivity {\n private static final String EXTRA_TYPE = \"browsetype\";\n private static final String EXTRA_ITEM = \"displayitem\";\n\n public static Intent getIntent(Context context, String itemType, String itemId) {\n if (itemType == null || itemId == null) {\n throw new IllegalArgumentException();\n }\n\n if (itemType.equals(ExpansionHolder.TYPE_STRING)) {\n return ExpansionDetailsActivity.getIntent(context, itemId);\n }\n\n Intent intent = new Intent(context, DetailsActivity.class);\n intent.putExtra(EXTRA_TYPE, itemType);\n intent.putExtra(EXTRA_ITEM, itemId);\n return intent;\n }\n\n public Fragment getFragment() {\n String itemType = getIntent().getStringExtra(EXTRA_TYPE);\n String itemId = getIntent().getStringExtra(EXTRA_ITEM);\n return DetailsFragment.newInstance(itemType, itemId);\n }\n}\n"},"subject":{"kind":"string","value":"Fix expansion detail display on phones"},"message":{"kind":"string","value":"Fix expansion detail display on phones\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"tblackwe/spacedock,tblackwe/spacedock,spacedockapp/spacedock,spacedockapp/spacedock,spacedockapp/spacedock,spacedockapp/spacedock,tblackwe/spacedock,spacedockapp/spacedock,tblackwe/spacedock,spacedockapp/spacedock,tblackwe/spacedock,tblackwe/spacedock"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage com.funnyhatsoftware.spacedock.activity;\n\nimport android.content.Context;\nimport android.content.Intent;\nimport android.support.v4.app.Fragment;\n\nimport com.funnyhatsoftware.spacedock.fragment.DetailsFragment;\n\npublic class DetailsActivity extends SinglePaneActivity {\n private static final String EXTRA_TYPE = \"browsetype\";\n private static final String EXTRA_ITEM = \"displayitem\";\n\n public static Intent getIntent(Context context, String itemType, String itemId) {\n if (itemType == null || itemId == null) {\n throw new IllegalArgumentException();\n }\n\n Intent intent = new Intent(context, DetailsActivity.class);\n intent.putExtra(EXTRA_TYPE, itemType);\n intent.putExtra(EXTRA_ITEM, itemId);\n return intent;\n }\n\n public Fragment getFragment() {\n String itemType = getIntent().getStringExtra(EXTRA_TYPE);\n String itemId = getIntent().getStringExtra(EXTRA_ITEM);\n return DetailsFragment.newInstance(itemType, itemId);\n }\n}\n\n## Instruction:\nFix expansion detail display on phones\n\n## Code After:\npackage com.funnyhatsoftware.spacedock.activity;\n\nimport android.content.Context;\nimport android.content.Intent;\nimport android.support.v4.app.Fragment;\n\nimport com.funnyhatsoftware.spacedock.fragment.DetailsFragment;\nimport com.funnyhatsoftware.spacedock.holder.ExpansionHolder;\n\npublic class DetailsActivity extends SinglePaneActivity {\n private static final String EXTRA_TYPE = \"browsetype\";\n private static final String EXTRA_ITEM = \"displayitem\";\n\n public static Intent getIntent(Context context, String itemType, String itemId) {\n if (itemType == null || itemId == null) {\n throw new IllegalArgumentException();\n }\n\n if (itemType.equals(ExpansionHolder.TYPE_STRING)) {\n return ExpansionDetailsActivity.getIntent(context, itemId);\n }\n\n Intent intent = new Intent(context, DetailsActivity.class);\n intent.putExtra(EXTRA_TYPE, itemType);\n intent.putExtra(EXTRA_ITEM, itemId);\n return intent;\n }\n\n public Fragment getFragment() {\n String itemType = getIntent().getStringExtra(EXTRA_TYPE);\n String itemId = getIntent().getStringExtra(EXTRA_ITEM);\n return DetailsFragment.newInstance(itemType, itemId);\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nimport android.support.v4.app.Fragment;\n\nimport com.funnyhatsoftware.spacedock.fragment.DetailsFragment;\nimport com.funnyhatsoftware.spacedock.holder.ExpansionHolder;\n\npublic class DetailsActivity extends SinglePaneActivity {\n private static final String EXTRA_TYPE = \"browsetype\";\n\n\n ... \n\n\n public static Intent getIntent(Context context, String itemType, String itemId) {\n if (itemType == null || itemId == null) {\n throw new IllegalArgumentException();\n }\n\n if (itemType.equals(ExpansionHolder.TYPE_STRING)) {\n return ExpansionDetailsActivity.getIntent(context, itemId);\n }\n\n Intent intent = new Intent(context, DetailsActivity.class);\n\n\n ... "}}},{"rowIdx":2180,"cells":{"commit":{"kind":"string","value":"e893a860f4a8ad9682f400507948ee20fce1c328"},"old_file":{"kind":"string","value":"healthcheck/contrib/django/status_endpoint/views.py"},"new_file":{"kind":"string","value":"healthcheck/contrib/django/status_endpoint/views.py"},"old_contents":{"kind":"string","value":"import json\n\nfrom django.conf import settings\nfrom django.views.decorators.http import require_http_methods\nfrom django.http import HttpResponse, HttpResponseServerError\n\nfrom healthcheck.healthcheck import (\n DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)\n\n\n@require_http_methods(['GET'])\ndef status(request):\n checks = []\n\n if getattr(settings, 'STATUS_CHECK_DBS', True):\n checks.append(DjangoDBsHealthCheck())\n\n files_to_check = getattr(\n settings, 'STATUS_CHECK_FILES')\n if files_to_check:\n checks.append(\n FilesDontExistHealthCheck(\n files_to_check, check_id=\"quiesce file doesn't exist\"))\n\n ok, details = HealthChecker(checks)()\n\n if not ok:\n return HttpResponseServerError((json.dumps(details)))\n\n return HttpResponse(json.dumps(details))\n"},"new_contents":{"kind":"string","value":"import json\n\nfrom django.conf import settings\nfrom django.views.decorators.http import require_http_methods\nfrom django.http import HttpResponse\n\nfrom healthcheck.healthcheck import (\n DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)\n\n\nclass JsonResponse(HttpResponse):\n def __init__(self, data, **kwargs):\n kwargs.setdefault('content_type', 'application/json')\n data = json.dumps(data)\n super(JsonResponse, self).__init__(content=data, **kwargs)\n\n\nclass JsonResponseServerError(JsonResponse):\n status_code = 500\n\n\n@require_http_methods(['GET'])\ndef status(request):\n checks = []\n\n if getattr(settings, 'STATUS_CHECK_DBS', True):\n checks.append(DjangoDBsHealthCheck())\n\n files_to_check = getattr(settings, 'STATUS_CHECK_FILES')\n if files_to_check:\n checks.append(FilesDontExistHealthCheck(\n files_to_check, check_id=\"quiesce file doesn't exist\"))\n\n ok, details = HealthChecker(checks)()\n\n if not ok:\n return JsonResponseServerError(json.dumps(details))\n\n return JsonResponse(details)\n"},"subject":{"kind":"string","value":"Fix content_type for JSON responses"},"message":{"kind":"string","value":"Fix content_type for JSON responses\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"yola/healthcheck"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport json\n\nfrom django.conf import settings\nfrom django.views.decorators.http import require_http_methods\nfrom django.http import HttpResponse, HttpResponseServerError\n\nfrom healthcheck.healthcheck import (\n DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)\n\n\n@require_http_methods(['GET'])\ndef status(request):\n checks = []\n\n if getattr(settings, 'STATUS_CHECK_DBS', True):\n checks.append(DjangoDBsHealthCheck())\n\n files_to_check = getattr(\n settings, 'STATUS_CHECK_FILES')\n if files_to_check:\n checks.append(\n FilesDontExistHealthCheck(\n files_to_check, check_id=\"quiesce file doesn't exist\"))\n\n ok, details = HealthChecker(checks)()\n\n if not ok:\n return HttpResponseServerError((json.dumps(details)))\n\n return HttpResponse(json.dumps(details))\n\n## Instruction:\nFix content_type for JSON responses\n\n## Code After:\nimport json\n\nfrom django.conf import settings\nfrom django.views.decorators.http import require_http_methods\nfrom django.http import HttpResponse\n\nfrom healthcheck.healthcheck import (\n DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)\n\n\nclass JsonResponse(HttpResponse):\n def __init__(self, data, **kwargs):\n kwargs.setdefault('content_type', 'application/json')\n data = json.dumps(data)\n super(JsonResponse, self).__init__(content=data, **kwargs)\n\n\nclass JsonResponseServerError(JsonResponse):\n status_code = 500\n\n\n@require_http_methods(['GET'])\ndef status(request):\n checks = []\n\n if getattr(settings, 'STATUS_CHECK_DBS', True):\n checks.append(DjangoDBsHealthCheck())\n\n files_to_check = getattr(settings, 'STATUS_CHECK_FILES')\n if files_to_check:\n checks.append(FilesDontExistHealthCheck(\n files_to_check, check_id=\"quiesce file doesn't exist\"))\n\n ok, details = HealthChecker(checks)()\n\n if not ok:\n return JsonResponseServerError(json.dumps(details))\n\n return JsonResponse(details)\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n\nfrom django.conf import settings\nfrom django.views.decorators.http import require_http_methods\nfrom django.http import HttpResponse\n\nfrom healthcheck.healthcheck import (\n DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)\n\n\nclass JsonResponse(HttpResponse):\n def __init__(self, data, **kwargs):\n kwargs.setdefault('content_type', 'application/json')\n data = json.dumps(data)\n super(JsonResponse, self).__init__(content=data, **kwargs)\n\n\nclass JsonResponseServerError(JsonResponse):\n status_code = 500\n\n\n@require_http_methods(['GET'])\n\n\n ... \n\n\n if getattr(settings, 'STATUS_CHECK_DBS', True):\n checks.append(DjangoDBsHealthCheck())\n\n files_to_check = getattr(settings, 'STATUS_CHECK_FILES')\n if files_to_check:\n checks.append(FilesDontExistHealthCheck(\n files_to_check, check_id=\"quiesce file doesn't exist\"))\n\n ok, details = HealthChecker(checks)()\n\n if not ok:\n return JsonResponseServerError(json.dumps(details))\n\n return JsonResponse(details)\n\n\n ... "}}},{"rowIdx":2181,"cells":{"commit":{"kind":"string","value":"c5946e378147f6d4d42c7a3e531388e6203f29e4"},"old_file":{"kind":"string","value":"fantasyStocks/static/stockCleaner.py"},"new_file":{"kind":"string","value":"fantasyStocks/static/stockCleaner.py"},"old_contents":{"kind":"string","value":"import json\nwith open(\"stocks.json\") as f:\n \n"},"new_contents":{"kind":"string","value":"from pprint import pprint\nimport json\nimport re\nREGEXP = re.compile(\"(?P[A-Z]{1,4}).*\")\nwith open(\"stocks.json\") as f:\n l = json.loads(f.read())\n out = []\n for i in l:\n if not \"^\" in i[\"symbol\"]:\n out.append(i)\n with open(\"newStocks.json\", \"w\") as w:\n w.write(json.dumps(out))\n"},"subject":{"kind":"string","value":"Write script to remove duplicates from stocks.json"},"message":{"kind":"string","value":"Write script to remove duplicates from stocks.json\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"ddsnowboard/FantasyStocks,ddsnowboard/FantasyStocks,ddsnowboard/FantasyStocks"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport json\nwith open(\"stocks.json\") as f:\n \n\n## Instruction:\nWrite script to remove duplicates from stocks.json\n\n## Code After:\nfrom pprint import pprint\nimport json\nimport re\nREGEXP = re.compile(\"(?P[A-Z]{1,4}).*\")\nwith open(\"stocks.json\") as f:\n l = json.loads(f.read())\n out = []\n for i in l:\n if not \"^\" in i[\"symbol\"]:\n out.append(i)\n with open(\"newStocks.json\", \"w\") as w:\n w.write(json.dumps(out))\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nfrom pprint import pprint\nimport json\nimport re\nREGEXP = re.compile(\"(?P[A-Z]{1,4}).*\")\nwith open(\"stocks.json\") as f:\n l = json.loads(f.read())\n out = []\n for i in l:\n if not \"^\" in i[\"symbol\"]:\n out.append(i)\n with open(\"newStocks.json\", \"w\") as w:\n w.write(json.dumps(out))\n\n\n ... "}}},{"rowIdx":2182,"cells":{"commit":{"kind":"string","value":"98925a82dfb45a4c76496cd11af8d1483a678e6e"},"old_file":{"kind":"string","value":"sigh/views/api.py"},"new_file":{"kind":"string","value":"sigh/views/api.py"},"old_contents":{"kind":"string","value":"import json\nfrom functools import wraps\n\nfrom flask import Blueprint\nfrom flask import Response\n\nfrom ..models import Tag\n\n\napi_views = Blueprint('api', __name__, url_prefix='/api/')\n\n\ndef jsonify(func):\n @wraps(func)\n def _(*args, **kwargs):\n result = func(*args, **kwargs)\n return Response(json.dumps(result), mimetype='application/json')\n return _\n\n\n@api_views.route('tag/autocompletion/')\n@jsonify\ndef autocomplete_tag(q):\n tags = Tag.query.filter(Tag.searchable_name.ilike(u'%{}%'.format(q.lower()))).all()\n tags = [tag.to_dict('id_', 'display_name') for tag in tags]\n return tags\n"},"new_contents":{"kind":"string","value":"import json\nfrom functools import wraps\n\nfrom flask import Blueprint\nfrom flask import Response\n\nfrom ..models import Tag\nfrom ..models import User\n\n\napi_views = Blueprint('api', __name__, url_prefix='/api/')\n\n\ndef jsonify(func):\n @wraps(func)\n def _(*args, **kwargs):\n result = func(*args, **kwargs)\n return Response(json.dumps(result), mimetype='application/json')\n return _\n\n\n@api_views.route('tag/autocompletion/')\n@jsonify\ndef autocomplete_tag(q):\n tags = Tag.query.filter(Tag.searchable_name.ilike(u'%{}%'.format(q.lower()))).all()\n tags = [tag.to_dict('id_', 'display_name') for tag in tags]\n return tags\n\n\n@api_views.route('user/autocompletion/')\n@jsonify\ndef autocomplete_user(q):\n users = User.query.filter(User.username.ilike(u'%{}%'.format(q.lower()))).all()\n users = [user.to_dict('id_', 'name', 'username', 'avatar') for user in users]\n return users\n"},"subject":{"kind":"string","value":"Create a new API for User autocompletion"},"message":{"kind":"string","value":"Create a new API for User autocompletion\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"kxxoling/Programmer-Sign,kxxoling/Programmer-Sign,kxxoling/Programmer-Sign"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport json\nfrom functools import wraps\n\nfrom flask import Blueprint\nfrom flask import Response\n\nfrom ..models import Tag\n\n\napi_views = Blueprint('api', __name__, url_prefix='/api/')\n\n\ndef jsonify(func):\n @wraps(func)\n def _(*args, **kwargs):\n result = func(*args, **kwargs)\n return Response(json.dumps(result), mimetype='application/json')\n return _\n\n\n@api_views.route('tag/autocompletion/')\n@jsonify\ndef autocomplete_tag(q):\n tags = Tag.query.filter(Tag.searchable_name.ilike(u'%{}%'.format(q.lower()))).all()\n tags = [tag.to_dict('id_', 'display_name') for tag in tags]\n return tags\n\n## Instruction:\nCreate a new API for User autocompletion\n\n## Code After:\nimport json\nfrom functools import wraps\n\nfrom flask import Blueprint\nfrom flask import Response\n\nfrom ..models import Tag\nfrom ..models import User\n\n\napi_views = Blueprint('api', __name__, url_prefix='/api/')\n\n\ndef jsonify(func):\n @wraps(func)\n def _(*args, **kwargs):\n result = func(*args, **kwargs)\n return Response(json.dumps(result), mimetype='application/json')\n return _\n\n\n@api_views.route('tag/autocompletion/')\n@jsonify\ndef autocomplete_tag(q):\n tags = Tag.query.filter(Tag.searchable_name.ilike(u'%{}%'.format(q.lower()))).all()\n tags = [tag.to_dict('id_', 'display_name') for tag in tags]\n return tags\n\n\n@api_views.route('user/autocompletion/')\n@jsonify\ndef autocomplete_user(q):\n users = User.query.filter(User.username.ilike(u'%{}%'.format(q.lower()))).all()\n users = [user.to_dict('id_', 'name', 'username', 'avatar') for user in users]\n return users\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nfrom flask import Response\n\nfrom ..models import Tag\nfrom ..models import User\n\n\napi_views = Blueprint('api', __name__, url_prefix='/api/')\n\n\n# ... modified code ... \n\n\n tags = Tag.query.filter(Tag.searchable_name.ilike(u'%{}%'.format(q.lower()))).all()\n tags = [tag.to_dict('id_', 'display_name') for tag in tags]\n return tags\n\n\n@api_views.route('user/autocompletion/')\n@jsonify\ndef autocomplete_user(q):\n users = User.query.filter(User.username.ilike(u'%{}%'.format(q.lower()))).all()\n users = [user.to_dict('id_', 'name', 'username', 'avatar') for user in users]\n return users\n\n\n# ... rest of the code ..."}}},{"rowIdx":2183,"cells":{"commit":{"kind":"string","value":"d05c44ffd8bf5b61f9ab160323170cca42240346"},"old_file":{"kind":"string","value":"samples/copy/build.gradle.kts"},"new_file":{"kind":"string","value":"samples/copy/build.gradle.kts"},"old_contents":{"kind":"string","value":"import org.gradle.api.file.*\nimport org.gradle.api.tasks.*\nimport org.apache.tools.ant.filters.*\n\n//for including in the copy task\nval dataContent = copySpec {\n it.from(\"src/data\")\n it.include(\"*.data\")\n}\n\ntask(\"initConfig\") {\n\n from(\"src/main/config\").let { it as CopySpec }.apply {\n include(\"**/*.properties\")\n include(\"**/*.xml\")\n filter(\n \"tokens\" to mapOf(\"version\" to \"2.3.1\"))\n }\n\n from(\"src/main/languages\").let { it as CopySpec }.apply {\n rename(\"EN_US_(.*)\", \"$1\")\n }\n\n into(\"build/target/config\")\n exclude(\"**/*.bak\")\n includeEmptyDirs = false\n with(dataContent)\n}\n\ntask(\"clean\") {\n delete(buildDir)\n}\n"},"new_contents":{"kind":"string","value":"import org.gradle.api.tasks.*\nimport org.apache.tools.ant.filters.*\n\n//for including in the copy task\nval dataContent = copySpec {\n it.from(\"src/data\")\n it.include(\"*.data\")\n}\n\ntask(\"initConfig\") {\n\n from(\"src/main/config\") {\n it.include(\"**/*.properties\")\n it.include(\"**/*.xml\")\n it.filter(\n \"tokens\" to mapOf(\"version\" to \"2.3.1\"))\n }\n\n from(\"src/main/languages\") {\n it.rename(\"EN_US_(.*)\", \"$1\")\n }\n\n into(\"build/target/config\")\n exclude(\"**/*.bak\")\n includeEmptyDirs = false\n with(dataContent)\n}\n\ntask(\"clean\") {\n delete(buildDir)\n}\n"},"subject":{"kind":"string","value":"Revert \"Fix copy sample to work against latest Gradle API\""},"message":{"kind":"string","value":"Revert \"Fix copy sample to work against latest Gradle API\"\n\nThis reverts commit d49facf0025572e3a66987c48af4ed8914aeeb88 since the\nmotivating changes in the Gradle API were also reverted.\n"},"lang":{"kind":"string","value":"Kotlin"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"gradle/gradle,gradle/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,robinverduijn/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle-script-kotlin,robinverduijn/gradle,blindpirate/gradle,gradle/gradle-script-kotlin,robinverduijn/gradle,gradle/gradle"},"config":{"kind":"string","value":"kotlin"},"content":{"kind":"string","value":"## Code Before:\nimport org.gradle.api.file.*\nimport org.gradle.api.tasks.*\nimport org.apache.tools.ant.filters.*\n\n//for including in the copy task\nval dataContent = copySpec {\n it.from(\"src/data\")\n it.include(\"*.data\")\n}\n\ntask(\"initConfig\") {\n\n from(\"src/main/config\").let { it as CopySpec }.apply {\n include(\"**/*.properties\")\n include(\"**/*.xml\")\n filter(\n \"tokens\" to mapOf(\"version\" to \"2.3.1\"))\n }\n\n from(\"src/main/languages\").let { it as CopySpec }.apply {\n rename(\"EN_US_(.*)\", \"$1\")\n }\n\n into(\"build/target/config\")\n exclude(\"**/*.bak\")\n includeEmptyDirs = false\n with(dataContent)\n}\n\ntask(\"clean\") {\n delete(buildDir)\n}\n\n## Instruction:\nRevert \"Fix copy sample to work against latest Gradle API\"\n\nThis reverts commit d49facf0025572e3a66987c48af4ed8914aeeb88 since the\nmotivating changes in the Gradle API were also reverted.\n\n## Code After:\nimport org.gradle.api.tasks.*\nimport org.apache.tools.ant.filters.*\n\n//for including in the copy task\nval dataContent = copySpec {\n it.from(\"src/data\")\n it.include(\"*.data\")\n}\n\ntask(\"initConfig\") {\n\n from(\"src/main/config\") {\n it.include(\"**/*.properties\")\n it.include(\"**/*.xml\")\n it.filter(\n \"tokens\" to mapOf(\"version\" to \"2.3.1\"))\n }\n\n from(\"src/main/languages\") {\n it.rename(\"EN_US_(.*)\", \"$1\")\n }\n\n into(\"build/target/config\")\n exclude(\"**/*.bak\")\n includeEmptyDirs = false\n with(dataContent)\n}\n\ntask(\"clean\") {\n delete(buildDir)\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nimport org.gradle.api.tasks.*\nimport org.apache.tools.ant.filters.*\n\n\n\n# ... modified code ... \n\n\n\ntask(\"initConfig\") {\n\n from(\"src/main/config\") {\n it.include(\"**/*.properties\")\n it.include(\"**/*.xml\")\n it.filter(\n \"tokens\" to mapOf(\"version\" to \"2.3.1\"))\n }\n\n from(\"src/main/languages\") {\n it.rename(\"EN_US_(.*)\", \"$1\")\n }\n\n into(\"build/target/config\")\n\n\n# ... rest of the code ..."}}},{"rowIdx":2184,"cells":{"commit":{"kind":"string","value":"a2c92c0be31e1d7a31625878e7bc68e23930224c"},"old_file":{"kind":"string","value":"loop.py"},"new_file":{"kind":"string","value":"loop.py"},"old_contents":{"kind":"string","value":"\nimport speech\nimport sys\n\ninputs = [\"hi\", \"foo\", \"lemon\", \"hello world\"]\noutput = []\ncurrent_run = []\n\ndef callback(phrase, listener):\n speech.say(phrase)\n if phrase == \"turn off\":\n speech.say(\"Goodbye.\")\n listener.stoplistening()\n sys.exit()\n\nprint \"Anything you type, speech will say back.\"\nprint \"Anything you say, speech will print out.\"\nprint \"Say or type 'turn off' to quit.\"\nprint\n\nlistener = speech.listenforanything(callback)\n\nwhile listener.islistening():\n for i in range(0, len(inputs)):\n speech.say(str(inputs[i]))\n\n text = raw_input(\"> \")\n if text == \"turn off\":\n listener.stoplistening()\n sys.exit()\n else:\n speech.say(text)\n"},"new_contents":{"kind":"string","value":"\nimport speech\nimport sys\nimport time\n\nlemon = \"lemon\"\noutput = []\ncurrent_run = []\n\nwaiting = False\nhasDetect = False\n\nprint \"Say something.\"\n\ndef callback(phrase, listener):\n speech.say(phrase)\n print phrase\n lemon = str(phrase)\n print lemon\n hasDetect = True\n waiting = False\n\nlistener = speech.listenforanything(callback)\n\nwhile listener.islistening():\n if not waiting and not hasDetect:\n waiting = True\n speech.say(lemon)\n"},"subject":{"kind":"string","value":"Change lemon if speech is detected"},"message":{"kind":"string","value":"Change lemon if speech is detected\n\nIf speech is detected, change lemon to whatever was detected. Also print\nit.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"powderblock/SpeechLooper"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nimport speech\nimport sys\n\ninputs = [\"hi\", \"foo\", \"lemon\", \"hello world\"]\noutput = []\ncurrent_run = []\n\ndef callback(phrase, listener):\n speech.say(phrase)\n if phrase == \"turn off\":\n speech.say(\"Goodbye.\")\n listener.stoplistening()\n sys.exit()\n\nprint \"Anything you type, speech will say back.\"\nprint \"Anything you say, speech will print out.\"\nprint \"Say or type 'turn off' to quit.\"\nprint\n\nlistener = speech.listenforanything(callback)\n\nwhile listener.islistening():\n for i in range(0, len(inputs)):\n speech.say(str(inputs[i]))\n\n text = raw_input(\"> \")\n if text == \"turn off\":\n listener.stoplistening()\n sys.exit()\n else:\n speech.say(text)\n\n## Instruction:\nChange lemon if speech is detected\n\nIf speech is detected, change lemon to whatever was detected. Also print\nit.\n\n## Code After:\n\nimport speech\nimport sys\nimport time\n\nlemon = \"lemon\"\noutput = []\ncurrent_run = []\n\nwaiting = False\nhasDetect = False\n\nprint \"Say something.\"\n\ndef callback(phrase, listener):\n speech.say(phrase)\n print phrase\n lemon = str(phrase)\n print lemon\n hasDetect = True\n waiting = False\n\nlistener = speech.listenforanything(callback)\n\nwhile listener.islistening():\n if not waiting and not hasDetect:\n waiting = True\n speech.say(lemon)\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\nimport speech\nimport sys\nimport time\n\nlemon = \"lemon\"\noutput = []\ncurrent_run = []\n\nwaiting = False\nhasDetect = False\n\nprint \"Say something.\"\n\ndef callback(phrase, listener):\n speech.say(phrase)\n print phrase\n lemon = str(phrase)\n print lemon\n hasDetect = True\n waiting = False\n\nlistener = speech.listenforanything(callback)\n\nwhile listener.islistening():\n if not waiting and not hasDetect:\n waiting = True\n speech.say(lemon)\n\n\n# ... rest of the code ..."}}},{"rowIdx":2185,"cells":{"commit":{"kind":"string","value":"b8ccf5a8022a002571a40900e6f725a0c8b0d860"},"old_file":{"kind":"string","value":"src/udon2xml.c"},"new_file":{"kind":"string","value":"src/udon2xml.c"},"old_contents":{"kind":"string","value":"\nint main (int argc, char *argv[]) {\n int i;\n int found = 0;\n pstate *state = init_from_file(\"../sjson-examples/big.txt\");\n for(i=0; i<10000; i++) {\n found += parse(state);\n reset_state(state);\n }\n free_state(state);\n printf(\"%d\\n\", found);\n}\n\n"},"new_contents":{"kind":"string","value":"\nint main (int argc, char *argv[]) {\n int i;\n int found = 0;\n if(argc < 2) return 1;\n pstate *state = init_from_file(argv[1]);\n for(i=0; i<10000; i++) {\n found += parse(state);\n reset_state(state);\n }\n free_state(state);\n printf(\"%d\\n\", found);\n}\n\n"},"subject":{"kind":"string","value":"Remove hardcoded file path. Still awkward and not generally usable, but _more_ general."},"message":{"kind":"string","value":"Remove hardcoded file path. Still awkward and not generally usable, but _more_\ngeneral.\n"},"lang":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"josephwecker/udon-c,josephwecker/udon-c,josephwecker/udon-c,josephwecker/udon-c"},"config":{"kind":"string","value":"c"},"content":{"kind":"string","value":"## Code Before:\n\nint main (int argc, char *argv[]) {\n int i;\n int found = 0;\n pstate *state = init_from_file(\"../sjson-examples/big.txt\");\n for(i=0; i<10000; i++) {\n found += parse(state);\n reset_state(state);\n }\n free_state(state);\n printf(\"%d\\n\", found);\n}\n\n\n## Instruction:\nRemove hardcoded file path. Still awkward and not generally usable, but _more_\ngeneral.\n\n## Code After:\n\nint main (int argc, char *argv[]) {\n int i;\n int found = 0;\n if(argc < 2) return 1;\n pstate *state = init_from_file(argv[1]);\n for(i=0; i<10000; i++) {\n found += parse(state);\n reset_state(state);\n }\n free_state(state);\n printf(\"%d\\n\", found);\n}\n\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nint main (int argc, char *argv[]) {\n int i;\n int found = 0;\n if(argc < 2) return 1;\n pstate *state = init_from_file(argv[1]);\n for(i=0; i<10000; i++) {\n found += parse(state);\n reset_state(state);\n\n\n# ... rest of the code ..."}}},{"rowIdx":2186,"cells":{"commit":{"kind":"string","value":"c5d2682b8fa25e5234b8452c861e609225e34c6f"},"old_file":{"kind":"string","value":"src/test/java/guitests/RedoCommandTest.java"},"new_file":{"kind":"string","value":"src/test/java/guitests/RedoCommandTest.java"},"old_contents":{"kind":"string","value":"package guitests;\n\npublic class RedoCommandTest extends TaskManagerGuiTest {\n\n}\n"},"new_contents":{"kind":"string","value":"package guitests;\n\nimport org.junit.Test;\n\nimport seedu.address.logic.commands.UndoCommand;\nimport seedu.address.testutil.TestTask;\nimport seedu.address.testutil.TestUtil;\n\npublic class RedoCommandTest extends TaskManagerGuiTest {\n\n TestTask[] expectedList = td.getTypicalTasks();\n TestTask[] currentList = expectedList;\n\n\n public void redo() {\n redoAdd();\n }\n\n /**\n * Tries to redo an add command\n */\n @Test\n public void redoAdd() {\n TestTask taskToAdd = td.alice;\n commandBox.runCommand(taskToAdd.getAddCommand());\n commandBox.runCommand(UndoCommand.COMMAND_WORD);\n expectedList = TestUtil.addTasksToList(expectedList, taskToAdd);\n assertRedoSuccess(currentList, expectedList);\n }\n\n}\n"},"subject":{"kind":"string","value":"Test case for redo add command"},"message":{"kind":"string","value":"Test case for redo add command\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"CS2103JAN2017-W13-B2/main,CS2103JAN2017-W13-B2/main"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage guitests;\n\npublic class RedoCommandTest extends TaskManagerGuiTest {\n\n}\n\n## Instruction:\nTest case for redo add command\n\n## Code After:\npackage guitests;\n\nimport org.junit.Test;\n\nimport seedu.address.logic.commands.UndoCommand;\nimport seedu.address.testutil.TestTask;\nimport seedu.address.testutil.TestUtil;\n\npublic class RedoCommandTest extends TaskManagerGuiTest {\n\n TestTask[] expectedList = td.getTypicalTasks();\n TestTask[] currentList = expectedList;\n\n\n public void redo() {\n redoAdd();\n }\n\n /**\n * Tries to redo an add command\n */\n @Test\n public void redoAdd() {\n TestTask taskToAdd = td.alice;\n commandBox.runCommand(taskToAdd.getAddCommand());\n commandBox.runCommand(UndoCommand.COMMAND_WORD);\n expectedList = TestUtil.addTasksToList(expectedList, taskToAdd);\n assertRedoSuccess(currentList, expectedList);\n }\n\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\npackage guitests;\n\nimport org.junit.Test;\n\nimport seedu.address.logic.commands.UndoCommand;\nimport seedu.address.testutil.TestTask;\nimport seedu.address.testutil.TestUtil;\n\npublic class RedoCommandTest extends TaskManagerGuiTest {\n\n TestTask[] expectedList = td.getTypicalTasks();\n TestTask[] currentList = expectedList;\n\n\n public void redo() {\n redoAdd();\n }\n\n /**\n * Tries to redo an add command\n */\n @Test\n public void redoAdd() {\n TestTask taskToAdd = td.alice;\n commandBox.runCommand(taskToAdd.getAddCommand());\n commandBox.runCommand(UndoCommand.COMMAND_WORD);\n expectedList = TestUtil.addTasksToList(expectedList, taskToAdd);\n assertRedoSuccess(currentList, expectedList);\n }\n\n}\n\n\n# ... rest of the code ..."}}},{"rowIdx":2187,"cells":{"commit":{"kind":"string","value":"b89e210f95b8f41efa8019ee66d6449b7242d56f"},"old_file":{"kind":"string","value":"tikplay/audio.py"},"new_file":{"kind":"string","value":"tikplay/audio.py"},"old_contents":{"kind":"string","value":"import json\nimport logging\nimport pysoundcard\nimport pysoundfile\nfrom tikplay.database import interface\n\n\nclass API():\n \"\"\" Implements the audio parsing interface for tikplay.\n\n Parses song metadata, handles database updating, and pushes the audio to soundcard\n\n Also implements basic song metadata fetching from the database\n \"\"\"\n def __init__(self, di=interface.DatabaseInterface):\n self.di = di()\n self.logger = logging.getLogger('AudioAPI')\n\n def play(self, song_hash):\n \"\"\" Play a song or add it to queue if a song is already playing\n\n Keyword arguments:\n song_hash: ...\n\n Return: true if started playing, false if added to queue\n \"\"\"\n soundcard = True\n for dev in list(pysoundcard.devices()):\n if '(hw:0,0)' in dev['name']:\n soundcard = dev\n break\n\n stream = pysoundcard.Stream(output_device=soundcard)\n soundfile = pysoundfile.SoundFile(song_hash)\n channels = soundfile.channels\n sample_rate = soundfile.sample_rate\n stream.output_channels = channels\n stream.start()\n stream.write(soundfile[:])\n stream.end()\n\n def now_playing(self, queue_length=1):\n \"\"\" Shows the now playing or the queue if queue_length is defined\n\n Keyword arguments:\n queue_length (optional): integer stating the length of queue to return. Default: 1.\n\n Return: the song that is now playing in the format\n (\"Artist - Title\"[, \"Artist - Title\", ...]) or None if empty\n \"\"\"\n return None\n"},"new_contents":{"kind":"string","value":"import json\nimport logging\nfrom pyglet import media\nfrom tikplay.database import interface\n\n\nclass API():\n \"\"\" Implements the audio parsing interface for tikplay.\n\n Parses song metadata, handles database updating, and pushes the audio to soundcard\n\n Also implements basic song metadata fetching from the database\n \"\"\"\n def __init__(self, di=interface.DatabaseInterface):\n self.player = media.Player()\n self.di = di()\n self.logger = logging.getLogger('AudioAPI')\n\n def play(self, song_hash):\n \"\"\" Play a song or add it to queue if a song is already playing\n\n Keyword arguments:\n song_hash: ...\n\n Return: true if started playing, false if added to queue\n \"\"\"\n # if cache: load audio metadata from cache\n # else: check that song_hash is actually a filename for an existing file\n\n audio_file = media.load(song_hash)\n self.player.queue(audio_file)\n if not self.player.playing:\n self.player.play()\n\n def next(self):\n self.player.next_source()\n\n def pause(self):\n self.player.pause()\n\n def resume(self):\n self.player.resume()\n\n def kill(self):\n while self.player.playing:\n self.player.next_source()\n\n def now_playing(self, queue_length=1):\n \"\"\" Shows the now playing or the queue if queue_length is defined\n\n Keyword arguments:\n queue_length (optional): integer stating the length of queue to return. Default: 1.\n\n Return: the song that is now playing in the format\n [(Artist, Title), (Artist, Title), ...) or None if empty\n \"\"\"\n src = self.player.source\n\n return [(src.info.author, src.info.title)]\n"},"subject":{"kind":"string","value":"Change pysoundcard and pysoundfile to pyglet"},"message":{"kind":"string","value":"Change pysoundcard and pysoundfile to pyglet\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"tietokilta-saato/tikplay,tietokilta-saato/tikplay,tietokilta-saato/tikplay,tietokilta-saato/tikplay"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport json\nimport logging\nimport pysoundcard\nimport pysoundfile\nfrom tikplay.database import interface\n\n\nclass API():\n \"\"\" Implements the audio parsing interface for tikplay.\n\n Parses song metadata, handles database updating, and pushes the audio to soundcard\n\n Also implements basic song metadata fetching from the database\n \"\"\"\n def __init__(self, di=interface.DatabaseInterface):\n self.di = di()\n self.logger = logging.getLogger('AudioAPI')\n\n def play(self, song_hash):\n \"\"\" Play a song or add it to queue if a song is already playing\n\n Keyword arguments:\n song_hash: ...\n\n Return: true if started playing, false if added to queue\n \"\"\"\n soundcard = True\n for dev in list(pysoundcard.devices()):\n if '(hw:0,0)' in dev['name']:\n soundcard = dev\n break\n\n stream = pysoundcard.Stream(output_device=soundcard)\n soundfile = pysoundfile.SoundFile(song_hash)\n channels = soundfile.channels\n sample_rate = soundfile.sample_rate\n stream.output_channels = channels\n stream.start()\n stream.write(soundfile[:])\n stream.end()\n\n def now_playing(self, queue_length=1):\n \"\"\" Shows the now playing or the queue if queue_length is defined\n\n Keyword arguments:\n queue_length (optional): integer stating the length of queue to return. Default: 1.\n\n Return: the song that is now playing in the format\n (\"Artist - Title\"[, \"Artist - Title\", ...]) or None if empty\n \"\"\"\n return None\n\n## Instruction:\nChange pysoundcard and pysoundfile to pyglet\n\n## Code After:\nimport json\nimport logging\nfrom pyglet import media\nfrom tikplay.database import interface\n\n\nclass API():\n \"\"\" Implements the audio parsing interface for tikplay.\n\n Parses song metadata, handles database updating, and pushes the audio to soundcard\n\n Also implements basic song metadata fetching from the database\n \"\"\"\n def __init__(self, di=interface.DatabaseInterface):\n self.player = media.Player()\n self.di = di()\n self.logger = logging.getLogger('AudioAPI')\n\n def play(self, song_hash):\n \"\"\" Play a song or add it to queue if a song is already playing\n\n Keyword arguments:\n song_hash: ...\n\n Return: true if started playing, false if added to queue\n \"\"\"\n # if cache: load audio metadata from cache\n # else: check that song_hash is actually a filename for an existing file\n\n audio_file = media.load(song_hash)\n self.player.queue(audio_file)\n if not self.player.playing:\n self.player.play()\n\n def next(self):\n self.player.next_source()\n\n def pause(self):\n self.player.pause()\n\n def resume(self):\n self.player.resume()\n\n def kill(self):\n while self.player.playing:\n self.player.next_source()\n\n def now_playing(self, queue_length=1):\n \"\"\" Shows the now playing or the queue if queue_length is defined\n\n Keyword arguments:\n queue_length (optional): integer stating the length of queue to return. Default: 1.\n\n Return: the song that is now playing in the format\n [(Artist, Title), (Artist, Title), ...) or None if empty\n \"\"\"\n src = self.player.source\n\n return [(src.info.author, src.info.title)]\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport json\nimport logging\nfrom pyglet import media\nfrom tikplay.database import interface\n\n\n\n\n// ... modified code ... \n\n\n Also implements basic song metadata fetching from the database\n \"\"\"\n def __init__(self, di=interface.DatabaseInterface):\n self.player = media.Player()\n self.di = di()\n self.logger = logging.getLogger('AudioAPI')\n\n\n\n ... \n\n\n\n Return: true if started playing, false if added to queue\n \"\"\"\n # if cache: load audio metadata from cache\n # else: check that song_hash is actually a filename for an existing file\n\n audio_file = media.load(song_hash)\n self.player.queue(audio_file)\n if not self.player.playing:\n self.player.play()\n\n def next(self):\n self.player.next_source()\n\n def pause(self):\n self.player.pause()\n\n def resume(self):\n self.player.resume()\n\n def kill(self):\n while self.player.playing:\n self.player.next_source()\n\n def now_playing(self, queue_length=1):\n \"\"\" Shows the now playing or the queue if queue_length is defined\n\n\n ... \n\n\n queue_length (optional): integer stating the length of queue to return. Default: 1.\n\n Return: the song that is now playing in the format\n [(Artist, Title), (Artist, Title), ...) or None if empty\n \"\"\"\n src = self.player.source\n\n return [(src.info.author, src.info.title)]\n\n\n// ... rest of the code ..."}}},{"rowIdx":2188,"cells":{"commit":{"kind":"string","value":"fd1f5bb3bf922fcfd5afdb5d6a0faced6eb995b3"},"old_file":{"kind":"string","value":"include/Genes/Pawn_Structure_Gene.h"},"new_file":{"kind":"string","value":"include/Genes/Pawn_Structure_Gene.h"},"old_contents":{"kind":"string","value":"\nclass Board;\n\nclass Pawn_Structure_Gene : public Clonable_Gene\n{\n public:\n Pawn_Structure_Gene() noexcept;\n std::string name() const noexcept override;\n double score_board(const Board& board, Piece_Color perspective, size_t depth, double game_progress) const noexcept override;\n\n private:\n double opening_guarded_by_pawn = 1.0;\n double opening_guarded_by_pawn_in_one_move = 1.0;\n double opening_guarded_by_piece = 1.0;\n\n double endgame_guarded_by_pawn = 1.0;\n double endgame_guarded_by_pawn_in_one_move = 1.0;\n double endgame_guarded_by_piece = 1.0;\n\n void gene_specific_mutation() noexcept override;\n void adjust_properties(std::map& properties) const noexcept override;\n void load_gene_properties(const std::map& properties) override;\n void normalize_guard_scores() noexcept;\n};\n\n#endif // PAWN_STRUCTURE_GENE_H\n"},"new_contents":{"kind":"string","value":"\nclass Board;\n\n//! \\brief A gene to evaluate how well pawns are protected.\nclass Pawn_Structure_Gene : public Clonable_Gene\n{\n public:\n Pawn_Structure_Gene() noexcept;\n std::string name() const noexcept override;\n\n private:\n double opening_guarded_by_pawn = 1.0;\n double opening_guarded_by_pawn_in_one_move = 1.0;\n double opening_guarded_by_piece = 1.0;\n\n double endgame_guarded_by_pawn = 1.0;\n double endgame_guarded_by_pawn_in_one_move = 1.0;\n double endgame_guarded_by_piece = 1.0;\n\n double score_board(const Board& board, Piece_Color perspective, size_t depth, double game_progress) const noexcept override;\n void gene_specific_mutation() noexcept override;\n void adjust_properties(std::map& properties) const noexcept override;\n void load_gene_properties(const std::map& properties) override;\n void normalize_guard_scores() noexcept;\n};\n\n#endif // PAWN_STRUCTURE_GENE_H\n"},"subject":{"kind":"string","value":"Fix public/private members and doxygen comments"},"message":{"kind":"string","value":"Fix public/private members and doxygen comments\n"},"lang":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"MarkZH/Genetic_Chess,MarkZH/Genetic_Chess,MarkZH/Genetic_Chess,MarkZH/Genetic_Chess,MarkZH/Genetic_Chess"},"config":{"kind":"string","value":"c"},"content":{"kind":"string","value":"## Code Before:\n\nclass Board;\n\nclass Pawn_Structure_Gene : public Clonable_Gene\n{\n public:\n Pawn_Structure_Gene() noexcept;\n std::string name() const noexcept override;\n double score_board(const Board& board, Piece_Color perspective, size_t depth, double game_progress) const noexcept override;\n\n private:\n double opening_guarded_by_pawn = 1.0;\n double opening_guarded_by_pawn_in_one_move = 1.0;\n double opening_guarded_by_piece = 1.0;\n\n double endgame_guarded_by_pawn = 1.0;\n double endgame_guarded_by_pawn_in_one_move = 1.0;\n double endgame_guarded_by_piece = 1.0;\n\n void gene_specific_mutation() noexcept override;\n void adjust_properties(std::map& properties) const noexcept override;\n void load_gene_properties(const std::map& properties) override;\n void normalize_guard_scores() noexcept;\n};\n\n#endif // PAWN_STRUCTURE_GENE_H\n\n## Instruction:\nFix public/private members and doxygen comments\n\n## Code After:\n\nclass Board;\n\n//! \\brief A gene to evaluate how well pawns are protected.\nclass Pawn_Structure_Gene : public Clonable_Gene\n{\n public:\n Pawn_Structure_Gene() noexcept;\n std::string name() const noexcept override;\n\n private:\n double opening_guarded_by_pawn = 1.0;\n double opening_guarded_by_pawn_in_one_move = 1.0;\n double opening_guarded_by_piece = 1.0;\n\n double endgame_guarded_by_pawn = 1.0;\n double endgame_guarded_by_pawn_in_one_move = 1.0;\n double endgame_guarded_by_piece = 1.0;\n\n double score_board(const Board& board, Piece_Color perspective, size_t depth, double game_progress) const noexcept override;\n void gene_specific_mutation() noexcept override;\n void adjust_properties(std::map& properties) const noexcept override;\n void load_gene_properties(const std::map& properties) override;\n void normalize_guard_scores() noexcept;\n};\n\n#endif // PAWN_STRUCTURE_GENE_H\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n\nclass Board;\n\n//! \\brief A gene to evaluate how well pawns are protected.\nclass Pawn_Structure_Gene : public Clonable_Gene\n{\n public:\n Pawn_Structure_Gene() noexcept;\n std::string name() const noexcept override;\n\n private:\n double opening_guarded_by_pawn = 1.0;\n\n\n ... \n\n\n double endgame_guarded_by_pawn_in_one_move = 1.0;\n double endgame_guarded_by_piece = 1.0;\n\n double score_board(const Board& board, Piece_Color perspective, size_t depth, double game_progress) const noexcept override;\n void gene_specific_mutation() noexcept override;\n void adjust_properties(std::map& properties) const noexcept override;\n void load_gene_properties(const std::map& properties) override;\n\n\n ... "}}},{"rowIdx":2189,"cells":{"commit":{"kind":"string","value":"4b03f4e9613e875794e7cc90d5fc6657ded2672d"},"old_file":{"kind":"string","value":"src/main/java/org/odindelrio/dddaopentitybehaviour/domain/Follower.java"},"new_file":{"kind":"string","value":"src/main/java/org/odindelrio/dddaopentitybehaviour/domain/Follower.java"},"old_contents":{"kind":"string","value":"package org.odindelrio.dddaopentitybehaviour.domain;\n\nimport org.odindelrio.dddaopentitybehaviour.domain.exception.FollowerCantFollowHimSelfException;\n\npublic class Follower {\n private final String followerId;\n public FollowersRepository followersRepository;\n\n public Follower(String followerId) {\n this.followerId = followerId;\n }\n\n public String getFollowerId() {\n return followerId;\n }\n\n public void follow(Followable followable) {\n if (followable.getFollowableId().equals(followerId)) {\n throw new FollowerCantFollowHimSelfException();\n }\n\n System.out.println(\"INSIDE Follower.follow() method\");\n\n this.followersRepository.persistFollower(this, followable);\n }\n}\n"},"new_contents":{"kind":"string","value":"package org.odindelrio.dddaopentitybehaviour.domain;\n\nimport org.odindelrio.dddaopentitybehaviour.domain.exception.FollowerCantFollowHimSelfException;\n\npublic class Follower {\n private final String followerId;\n public transient FollowersRepository followersRepository;\n\n public Follower(String followerId) {\n this.followerId = followerId;\n }\n\n public String getFollowerId() {\n return followerId;\n }\n\n public void follow(Followable followable) {\n if (followable.getFollowableId().equals(followerId)) {\n throw new FollowerCantFollowHimSelfException();\n }\n\n System.out.println(\"INSIDE Follower.follow() method\");\n\n this.followersRepository.persistFollower(this, followable);\n }\n}\n"},"subject":{"kind":"string","value":"Make entity dependency transient, telling serializers that repository shouldn't be serialized."},"message":{"kind":"string","value":"Make entity dependency transient, telling serializers that repository shouldn't be serialized.\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"odin-delrio/aop-for-entity-behaviour,odin-delrio/aop-for-entity-behaviour"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage org.odindelrio.dddaopentitybehaviour.domain;\n\nimport org.odindelrio.dddaopentitybehaviour.domain.exception.FollowerCantFollowHimSelfException;\n\npublic class Follower {\n private final String followerId;\n public FollowersRepository followersRepository;\n\n public Follower(String followerId) {\n this.followerId = followerId;\n }\n\n public String getFollowerId() {\n return followerId;\n }\n\n public void follow(Followable followable) {\n if (followable.getFollowableId().equals(followerId)) {\n throw new FollowerCantFollowHimSelfException();\n }\n\n System.out.println(\"INSIDE Follower.follow() method\");\n\n this.followersRepository.persistFollower(this, followable);\n }\n}\n\n## Instruction:\nMake entity dependency transient, telling serializers that repository shouldn't be serialized.\n\n## Code After:\npackage org.odindelrio.dddaopentitybehaviour.domain;\n\nimport org.odindelrio.dddaopentitybehaviour.domain.exception.FollowerCantFollowHimSelfException;\n\npublic class Follower {\n private final String followerId;\n public transient FollowersRepository followersRepository;\n\n public Follower(String followerId) {\n this.followerId = followerId;\n }\n\n public String getFollowerId() {\n return followerId;\n }\n\n public void follow(Followable followable) {\n if (followable.getFollowableId().equals(followerId)) {\n throw new FollowerCantFollowHimSelfException();\n }\n\n System.out.println(\"INSIDE Follower.follow() method\");\n\n this.followersRepository.persistFollower(this, followable);\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\npublic class Follower {\n private final String followerId;\n public transient FollowersRepository followersRepository;\n\n public Follower(String followerId) {\n this.followerId = followerId;\n\n\n// ... rest of the code ..."}}},{"rowIdx":2190,"cells":{"commit":{"kind":"string","value":"15c58fb05a9bfb06b87d8d00a1b26d50ee68c1f7"},"old_file":{"kind":"string","value":"django/publicmapping/redistricting/management/commands/makelanguagefiles.py"},"new_file":{"kind":"string","value":"django/publicmapping/redistricting/management/commands/makelanguagefiles.py"},"old_contents":{"kind":"string","value":"from django.core.management.base import BaseCommand\nfrom redistricting.utils import *\n\nclass Command(BaseCommand):\n \"\"\"\n This command prints creates and compiles language message files\n \"\"\"\n args = None\n help = 'Create and compile language message files'\n\n def handle(self, *args, **options):\n \"\"\"\n Create and compile language message files\n \"\"\"\n # Make messages for each language defined in settings\n for language in settings.LANGUAGES:\n management.call_command('makemessages', locale=language[0], interactive=False)\n\n # Compile all message files\n management.call_command('compilemessages', interactive=False)\n"},"new_contents":{"kind":"string","value":"from django.core.management.base import BaseCommand\nfrom redistricting.utils import *\n\nclass Command(BaseCommand):\n \"\"\"\n This command prints creates and compiles language message files\n \"\"\"\n args = None\n help = 'Create and compile language message files'\n\n def handle(self, *args, **options):\n \"\"\"\n Create and compile language message files\n \"\"\"\n # Make messages for each language defined in settings\n for language in settings.LANGUAGES:\n # For django templates\n management.call_command('makemessages', locale=language[0], interactive=False)\n\n # For javascript files\n management.call_command('makemessages', domain='djangojs', locale=language[0], interactive=False)\n\n # Compile all message files\n management.call_command('compilemessages', interactive=False)\n"},"subject":{"kind":"string","value":"Add creation of js message files to management command"},"message":{"kind":"string","value":"Add creation of js message files to management command\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"JimCallahanOrlando/DistrictBuilder,JimCallahanOrlando/DistrictBuilder,JimCallahanOrlando/DistrictBuilder,JimCallahanOrlando/DistrictBuilder"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom django.core.management.base import BaseCommand\nfrom redistricting.utils import *\n\nclass Command(BaseCommand):\n \"\"\"\n This command prints creates and compiles language message files\n \"\"\"\n args = None\n help = 'Create and compile language message files'\n\n def handle(self, *args, **options):\n \"\"\"\n Create and compile language message files\n \"\"\"\n # Make messages for each language defined in settings\n for language in settings.LANGUAGES:\n management.call_command('makemessages', locale=language[0], interactive=False)\n\n # Compile all message files\n management.call_command('compilemessages', interactive=False)\n\n## Instruction:\nAdd creation of js message files to management command\n\n## Code After:\nfrom django.core.management.base import BaseCommand\nfrom redistricting.utils import *\n\nclass Command(BaseCommand):\n \"\"\"\n This command prints creates and compiles language message files\n \"\"\"\n args = None\n help = 'Create and compile language message files'\n\n def handle(self, *args, **options):\n \"\"\"\n Create and compile language message files\n \"\"\"\n # Make messages for each language defined in settings\n for language in settings.LANGUAGES:\n # For django templates\n management.call_command('makemessages', locale=language[0], interactive=False)\n\n # For javascript files\n management.call_command('makemessages', domain='djangojs', locale=language[0], interactive=False)\n\n # Compile all message files\n management.call_command('compilemessages', interactive=False)\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n \"\"\"\n # Make messages for each language defined in settings\n for language in settings.LANGUAGES:\n # For django templates\n management.call_command('makemessages', locale=language[0], interactive=False)\n\n # For javascript files\n management.call_command('makemessages', domain='djangojs', locale=language[0], interactive=False)\n\n # Compile all message files\n management.call_command('compilemessages', interactive=False)\n\n\n# ... rest of the code ..."}}},{"rowIdx":2191,"cells":{"commit":{"kind":"string","value":"4f4ba39bf2d270ef1cb34afe1a5ebe7816d448b7"},"old_file":{"kind":"string","value":"manage.py"},"new_file":{"kind":"string","value":"manage.py"},"old_contents":{"kind":"string","value":"from werkzeug import script\n\n\ndef make_app():\n from cadorsfeed.application import CadorsFeed\n return CadorsFeed()\n\n\ndef make_shell():\n from cadorsfeed import utils\n application = make_app()\n return locals()\n\naction_runserver = script.make_runserver(make_app, use_reloader=True)\naction_shell = script.make_shell(make_shell)\n\nscript.run()\n"},"new_contents":{"kind":"string","value":"from werkzeug import script\n\n\ndef make_app():\n from cadorsfeed.application import CadorsFeed\n return CadorsFeed()\n\n\ndef make_shell():\n from cadorsfeed import utils\n application = make_app()\n return locals()\n\naction_runserver = script.make_runserver(make_app, use_reloader=True, hostname='')\naction_shell = script.make_shell(make_shell)\n\nscript.run()\n"},"subject":{"kind":"string","value":"Set hostname to '' so the server binds to all interfaces."},"message":{"kind":"string","value":"Set hostname to '' so the server binds to all interfaces.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"kurtraschke/cadors-parse,kurtraschke/cadors-parse"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom werkzeug import script\n\n\ndef make_app():\n from cadorsfeed.application import CadorsFeed\n return CadorsFeed()\n\n\ndef make_shell():\n from cadorsfeed import utils\n application = make_app()\n return locals()\n\naction_runserver = script.make_runserver(make_app, use_reloader=True)\naction_shell = script.make_shell(make_shell)\n\nscript.run()\n\n## Instruction:\nSet hostname to '' so the server binds to all interfaces.\n\n## Code After:\nfrom werkzeug import script\n\n\ndef make_app():\n from cadorsfeed.application import CadorsFeed\n return CadorsFeed()\n\n\ndef make_shell():\n from cadorsfeed import utils\n application = make_app()\n return locals()\n\naction_runserver = script.make_runserver(make_app, use_reloader=True, hostname='')\naction_shell = script.make_shell(make_shell)\n\nscript.run()\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n application = make_app()\n return locals()\n\naction_runserver = script.make_runserver(make_app, use_reloader=True, hostname='')\naction_shell = script.make_shell(make_shell)\n\nscript.run()\n\n\n ... "}}},{"rowIdx":2192,"cells":{"commit":{"kind":"string","value":"7f51f153f0fd1fd1dde06808879911897686f819"},"old_file":{"kind":"string","value":"cities/Sample_City.py"},"new_file":{"kind":"string","value":"cities/Sample_City.py"},"old_contents":{"kind":"string","value":"from bs4 import BeautifulSoup\nimport json\nimport datetime\nimport pytz\n\n# The URL for the page where the parking lots are listed\ndata_url = \"http://example.com\"\n\n# Name of the city, just in case it contains umlauts which this filename shouldn't\ncity_name = \"Sample City\"\n\n# Name of this file (without '.py'), sorry for needing this, but it makes things easier\nfile_name = \"Sample_City\"\n\n\ndef parse_html(html):\n soup = BeautifulSoup(html)\n\n # Do everything necessary to scrape the contents of the html\n # into a dictionary of the format specified by the schema.\n\n\ndef get_geodata_for_lot(lot_name):\n geofile = open(\"./cities/\" + file_name + \".geojson\")\n geodata = geofile.read()\n geofile.close()\n geodata = json.loads(geodata)\n\n for feature in geodata[\"features\"]:\n if feature[\"properties\"][\"name\"] == lot_name:\n return {\n \"lon\": feature[\"geometry\"][\"coordinates\"][0],\n \"lat\": feature[\"geometry\"][\"coordinates\"][1]\n }\n return []\n\n\nif __name__ == \"__main__\":\n file = open(\"../tests/sample_city.html\")\n html_data = file.read()\n file.close()\n parse_html(html_data)\n"},"new_contents":{"kind":"string","value":"from bs4 import BeautifulSoup\nimport datetime\nimport pytz\nfrom geodata import GeoData\n\n# The URL for the page where the parking lots are listed\ndata_url = \"http://example.com\"\n\n# Name of the city, just in case it contains umlauts which this filename shouldn't\ncity_name = \"Sample City\"\n\n# Name of this file (without '.py'), sorry for needing this, but it makes things easier\nfile_name = \"Sample_City\"\n\n# Uncomment the following line if there's geodata in the format of Sample_City.geodata in this directory\n# geodata = GeoData(city_name)\n\ndef parse_html(html):\n soup = BeautifulSoup(html)\n\n # Do everything necessary to scrape the contents of the html\n # into a dictionary of the format specified by the schema.\n\n data = {\n \"last_updated\": \"\",\n \"lots\": []\n }\n\n print(data)\n return data\n\n# the following is for testing this out, just delete it all when done\nif __name__ == \"__main__\":\n with open(\"../tests/sample_city.html\") as f:\n parse_html(f.read())\n"},"subject":{"kind":"string","value":"Clean up sample city file"},"message":{"kind":"string","value":"Clean up sample city file\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"offenesdresden/ParkAPI,Mic92/ParkAPI,offenesdresden/ParkAPI,Mic92/ParkAPI"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom bs4 import BeautifulSoup\nimport json\nimport datetime\nimport pytz\n\n# The URL for the page where the parking lots are listed\ndata_url = \"http://example.com\"\n\n# Name of the city, just in case it contains umlauts which this filename shouldn't\ncity_name = \"Sample City\"\n\n# Name of this file (without '.py'), sorry for needing this, but it makes things easier\nfile_name = \"Sample_City\"\n\n\ndef parse_html(html):\n soup = BeautifulSoup(html)\n\n # Do everything necessary to scrape the contents of the html\n # into a dictionary of the format specified by the schema.\n\n\ndef get_geodata_for_lot(lot_name):\n geofile = open(\"./cities/\" + file_name + \".geojson\")\n geodata = geofile.read()\n geofile.close()\n geodata = json.loads(geodata)\n\n for feature in geodata[\"features\"]:\n if feature[\"properties\"][\"name\"] == lot_name:\n return {\n \"lon\": feature[\"geometry\"][\"coordinates\"][0],\n \"lat\": feature[\"geometry\"][\"coordinates\"][1]\n }\n return []\n\n\nif __name__ == \"__main__\":\n file = open(\"../tests/sample_city.html\")\n html_data = file.read()\n file.close()\n parse_html(html_data)\n\n## Instruction:\nClean up sample city file\n\n## Code After:\nfrom bs4 import BeautifulSoup\nimport datetime\nimport pytz\nfrom geodata import GeoData\n\n# The URL for the page where the parking lots are listed\ndata_url = \"http://example.com\"\n\n# Name of the city, just in case it contains umlauts which this filename shouldn't\ncity_name = \"Sample City\"\n\n# Name of this file (without '.py'), sorry for needing this, but it makes things easier\nfile_name = \"Sample_City\"\n\n# Uncomment the following line if there's geodata in the format of Sample_City.geodata in this directory\n# geodata = GeoData(city_name)\n\ndef parse_html(html):\n soup = BeautifulSoup(html)\n\n # Do everything necessary to scrape the contents of the html\n # into a dictionary of the format specified by the schema.\n\n data = {\n \"last_updated\": \"\",\n \"lots\": []\n }\n\n print(data)\n return data\n\n# the following is for testing this out, just delete it all when done\nif __name__ == \"__main__\":\n with open(\"../tests/sample_city.html\") as f:\n parse_html(f.read())\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nfrom bs4 import BeautifulSoup\nimport datetime\nimport pytz\nfrom geodata import GeoData\n\n# The URL for the page where the parking lots are listed\ndata_url = \"http://example.com\"\n\n\n ... \n\n\n# Name of this file (without '.py'), sorry for needing this, but it makes things easier\nfile_name = \"Sample_City\"\n\n# Uncomment the following line if there's geodata in the format of Sample_City.geodata in this directory\n# geodata = GeoData(city_name)\n\ndef parse_html(html):\n soup = BeautifulSoup(html)\n\n\n ... \n\n\n # Do everything necessary to scrape the contents of the html\n # into a dictionary of the format specified by the schema.\n\n data = {\n \"last_updated\": \"\",\n \"lots\": []\n }\n\n print(data)\n return data\n\n# the following is for testing this out, just delete it all when done\nif __name__ == \"__main__\":\n with open(\"../tests/sample_city.html\") as f:\n parse_html(f.read())\n\n\n ... "}}},{"rowIdx":2193,"cells":{"commit":{"kind":"string","value":"791d021497e1e4169383b28e6e1c8ab806645616"},"old_file":{"kind":"string","value":"plugins/stats-collector/src/com/intellij/stats/experiment/EmulatedExperiment.kt"},"new_file":{"kind":"string","value":"plugins/stats-collector/src/com/intellij/stats/experiment/EmulatedExperiment.kt"},"old_contents":{"kind":"string","value":"// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.\npackage com.intellij.stats.experiment\n\nimport com.intellij.internal.statistic.DeviceIdManager\nimport com.intellij.openapi.application.ApplicationManager\nimport com.intellij.openapi.util.registry.Registry\n\n/*\n * For now, we decide about AB experiment inside IDE using user id and salt\n */\nclass EmulatedExperiment {\n companion object {\n const val GROUP_A_EXPERIMENT_VERSION: Int = 7\n const val GROUP_B_EXPERIMENT_VERSION: Int = 8\n const val IS_ENABLED = true\n\n fun shouldRank(experimentVersion: Int): Boolean {\n return experimentVersion == GROUP_B_EXPERIMENT_VERSION && !Registry.`is`(\"completion.stats.exit.experiment\")\n }\n }\n\n fun emulate(experimentVersion: Int, performExperiment: Boolean, salt: String): Int? {\n val application = ApplicationManager.getApplication()\n if (!application.isEAP || application.isUnitTestMode || experimentVersion != 2 || performExperiment || !IS_ENABLED) {\n return null\n }\n\n val userId = DeviceIdManager.getOrGenerateId()\n val hash = (userId + salt).hashCode() % 16\n return when (hash) {\n 3 -> GROUP_A_EXPERIMENT_VERSION\n 4 -> GROUP_B_EXPERIMENT_VERSION\n else -> null\n }\n }\n}"},"new_contents":{"kind":"string","value":"// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.\npackage com.intellij.stats.experiment\n\nimport com.intellij.internal.statistic.DeviceIdManager\nimport com.intellij.openapi.application.ApplicationManager\nimport com.intellij.openapi.util.registry.Registry\nimport kotlin.math.abs\n\n/*\n * For now, we decide about AB experiment inside IDE using user id and salt\n */\nclass EmulatedExperiment {\n companion object {\n const val GROUP_A_EXPERIMENT_VERSION: Int = 7\n const val GROUP_B_EXPERIMENT_VERSION: Int = 8\n const val IS_ENABLED = true\n\n fun shouldRank(experimentVersion: Int): Boolean {\n return experimentVersion == GROUP_B_EXPERIMENT_VERSION && !Registry.`is`(\"completion.stats.exit.experiment\")\n }\n }\n\n fun emulate(experimentVersion: Int, performExperiment: Boolean, salt: String): Int? {\n val application = ApplicationManager.getApplication()\n if (!application.isEAP || application.isUnitTestMode || experimentVersion != 2 || performExperiment || !IS_ENABLED) {\n return null\n }\n\n val userId = DeviceIdManager.getOrGenerateId()\n val hash = abs((userId + salt).hashCode()) % 8\n return when (hash) {\n 3 -> GROUP_A_EXPERIMENT_VERSION\n 4 -> GROUP_B_EXPERIMENT_VERSION\n else -> null\n }\n }\n}"},"subject":{"kind":"string","value":"Increase the number of AB experiment participants (x4)"},"message":{"kind":"string","value":"[stats-collector] Increase the number of AB experiment participants (x4)\n\nGitOrigin-RevId: e56710dffe69f4bafa7be3805c3f0d60e90c0529"},"lang":{"kind":"string","value":"Kotlin"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community"},"config":{"kind":"string","value":"kotlin"},"content":{"kind":"string","value":"## Code Before:\n// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.\npackage com.intellij.stats.experiment\n\nimport com.intellij.internal.statistic.DeviceIdManager\nimport com.intellij.openapi.application.ApplicationManager\nimport com.intellij.openapi.util.registry.Registry\n\n/*\n * For now, we decide about AB experiment inside IDE using user id and salt\n */\nclass EmulatedExperiment {\n companion object {\n const val GROUP_A_EXPERIMENT_VERSION: Int = 7\n const val GROUP_B_EXPERIMENT_VERSION: Int = 8\n const val IS_ENABLED = true\n\n fun shouldRank(experimentVersion: Int): Boolean {\n return experimentVersion == GROUP_B_EXPERIMENT_VERSION && !Registry.`is`(\"completion.stats.exit.experiment\")\n }\n }\n\n fun emulate(experimentVersion: Int, performExperiment: Boolean, salt: String): Int? {\n val application = ApplicationManager.getApplication()\n if (!application.isEAP || application.isUnitTestMode || experimentVersion != 2 || performExperiment || !IS_ENABLED) {\n return null\n }\n\n val userId = DeviceIdManager.getOrGenerateId()\n val hash = (userId + salt).hashCode() % 16\n return when (hash) {\n 3 -> GROUP_A_EXPERIMENT_VERSION\n 4 -> GROUP_B_EXPERIMENT_VERSION\n else -> null\n }\n }\n}\n## Instruction:\n[stats-collector] Increase the number of AB experiment participants (x4)\n\nGitOrigin-RevId: e56710dffe69f4bafa7be3805c3f0d60e90c0529\n## Code After:\n// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.\npackage com.intellij.stats.experiment\n\nimport com.intellij.internal.statistic.DeviceIdManager\nimport com.intellij.openapi.application.ApplicationManager\nimport com.intellij.openapi.util.registry.Registry\nimport kotlin.math.abs\n\n/*\n * For now, we decide about AB experiment inside IDE using user id and salt\n */\nclass EmulatedExperiment {\n companion object {\n const val GROUP_A_EXPERIMENT_VERSION: Int = 7\n const val GROUP_B_EXPERIMENT_VERSION: Int = 8\n const val IS_ENABLED = true\n\n fun shouldRank(experimentVersion: Int): Boolean {\n return experimentVersion == GROUP_B_EXPERIMENT_VERSION && !Registry.`is`(\"completion.stats.exit.experiment\")\n }\n }\n\n fun emulate(experimentVersion: Int, performExperiment: Boolean, salt: String): Int? {\n val application = ApplicationManager.getApplication()\n if (!application.isEAP || application.isUnitTestMode || experimentVersion != 2 || performExperiment || !IS_ENABLED) {\n return null\n }\n\n val userId = DeviceIdManager.getOrGenerateId()\n val hash = abs((userId + salt).hashCode()) % 8\n return when (hash) {\n 3 -> GROUP_A_EXPERIMENT_VERSION\n 4 -> GROUP_B_EXPERIMENT_VERSION\n else -> null\n }\n }\n}"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nimport com.intellij.internal.statistic.DeviceIdManager\nimport com.intellij.openapi.application.ApplicationManager\nimport com.intellij.openapi.util.registry.Registry\nimport kotlin.math.abs\n\n/*\n * For now, we decide about AB experiment inside IDE using user id and salt\n\n\n ... \n\n\n }\n\n val userId = DeviceIdManager.getOrGenerateId()\n val hash = abs((userId + salt).hashCode()) % 8\n return when (hash) {\n 3 -> GROUP_A_EXPERIMENT_VERSION\n 4 -> GROUP_B_EXPERIMENT_VERSION\n\n\n ... "}}},{"rowIdx":2194,"cells":{"commit":{"kind":"string","value":"e2be9eb27d6fc7cfa424cbf908347796ab595526"},"old_file":{"kind":"string","value":"groundstation/broadcast_announcer.py"},"new_file":{"kind":"string","value":"groundstation/broadcast_announcer.py"},"old_contents":{"kind":"string","value":"import socket\nimport logger\nfrom groundstation.broadcast_socket import BroadcastSocket\n\nimport logger\nlog = logger.getLogger(__name__)\n\nclass BroadcastAnnouncer(BroadcastSocket):\n def __init__(self, port):\n super(BroadcastAnnouncer, self).__init__()\n self._addr = '255.255.255.255', port\n self._name = None\n self.broadcast_payload = \"PING None\"\n\n @property\n def name(self):\n return self._name\n\n @name.setter\n def name(self, value):\n self._name = value\n self.broadcast_payload = \"PING %s\" % (self._name)\n\n def ping(self):\n log.info(\"ping payload: %s\" % (self.broadcast_payload))\n transmitted = self.socket.sendto(self.broadcast_payload, self._addr)\n if transmitted != len(self.broadcast_payload):\n log.warning(\"ping wasn't successfully broadcast\")\n"},"new_contents":{"kind":"string","value":"import socket\nimport logger\nfrom sockets.broadcast_socket import BroadcastSocket\n\nimport logger\nlog = logger.getLogger(__name__)\n\nclass BroadcastAnnouncer(BroadcastSocket):\n def __init__(self, port):\n super(BroadcastAnnouncer, self).__init__()\n self._addr = '255.255.255.255', port\n self._name = None\n self.broadcast_payload = \"PING None\"\n\n @property\n def name(self):\n return self._name\n\n @name.setter\n def name(self, value):\n self._name = value\n self.broadcast_payload = \"PING %s\" % (self._name)\n\n def ping(self):\n log.info(\"ping payload: %s\" % (self.broadcast_payload))\n transmitted = self.socket.sendto(self.broadcast_payload, self._addr)\n if transmitted != len(self.broadcast_payload):\n log.warning(\"ping wasn't successfully broadcast\")\n"},"subject":{"kind":"string","value":"Fix an import path bug masked by remaining .pyc files"},"message":{"kind":"string","value":"Fix an import path bug masked by remaining .pyc files\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport socket\nimport logger\nfrom groundstation.broadcast_socket import BroadcastSocket\n\nimport logger\nlog = logger.getLogger(__name__)\n\nclass BroadcastAnnouncer(BroadcastSocket):\n def __init__(self, port):\n super(BroadcastAnnouncer, self).__init__()\n self._addr = '255.255.255.255', port\n self._name = None\n self.broadcast_payload = \"PING None\"\n\n @property\n def name(self):\n return self._name\n\n @name.setter\n def name(self, value):\n self._name = value\n self.broadcast_payload = \"PING %s\" % (self._name)\n\n def ping(self):\n log.info(\"ping payload: %s\" % (self.broadcast_payload))\n transmitted = self.socket.sendto(self.broadcast_payload, self._addr)\n if transmitted != len(self.broadcast_payload):\n log.warning(\"ping wasn't successfully broadcast\")\n\n## Instruction:\nFix an import path bug masked by remaining .pyc files\n\n## Code After:\nimport socket\nimport logger\nfrom sockets.broadcast_socket import BroadcastSocket\n\nimport logger\nlog = logger.getLogger(__name__)\n\nclass BroadcastAnnouncer(BroadcastSocket):\n def __init__(self, port):\n super(BroadcastAnnouncer, self).__init__()\n self._addr = '255.255.255.255', port\n self._name = None\n self.broadcast_payload = \"PING None\"\n\n @property\n def name(self):\n return self._name\n\n @name.setter\n def name(self, value):\n self._name = value\n self.broadcast_payload = \"PING %s\" % (self._name)\n\n def ping(self):\n log.info(\"ping payload: %s\" % (self.broadcast_payload))\n transmitted = self.socket.sendto(self.broadcast_payload, self._addr)\n if transmitted != len(self.broadcast_payload):\n log.warning(\"ping wasn't successfully broadcast\")\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport socket\nimport logger\nfrom sockets.broadcast_socket import BroadcastSocket\n\nimport logger\nlog = logger.getLogger(__name__)\n\n\n// ... rest of the code ..."}}},{"rowIdx":2195,"cells":{"commit":{"kind":"string","value":"67c1855f75a3c29bc650c193235576f6b591c805"},"old_file":{"kind":"string","value":"payment_redsys/__manifest__.py"},"new_file":{"kind":"string","value":"payment_redsys/__manifest__.py"},"old_contents":{"kind":"string","value":"\n{\n \"name\": \"Pasarela de pago Redsys\",\n \"category\": \"Payment Acquirer\",\n \"summary\": \"Payment Acquirer: Redsys Implementation\",\n \"version\": \"14.0.2.0.0\",\n \"author\": \"Tecnativa,\" \"Odoo Community Association (OCA)\",\n \"website\": \"https://github.com/OCA/l10n-spain\",\n \"depends\": [\"payment\", \"website_sale\"],\n \"external_dependencies\": {\"python\": [\"Crypto.Cipher.DES3\"]},\n \"data\": [\n \"views/redsys.xml\",\n \"views/payment_acquirer.xml\",\n \"views/payment_redsys_templates.xml\",\n \"data/payment_redsys.xml\",\n ],\n \"license\": \"AGPL-3\",\n \"installable\": True,\n}\n"},"new_contents":{"kind":"string","value":"\n{\n \"name\": \"Pasarela de pago Redsys\",\n \"category\": \"Payment Acquirer\",\n \"summary\": \"Payment Acquirer: Redsys Implementation\",\n \"version\": \"14.0.2.0.0\",\n \"author\": \"Tecnativa,\" \"Odoo Community Association (OCA)\",\n \"website\": \"https://github.com/OCA/l10n-spain\",\n \"depends\": [\"payment\", \"website_sale\"],\n \"external_dependencies\": {\"python\": [\"pycrypto\"]},\n \"data\": [\n \"views/redsys.xml\",\n \"views/payment_acquirer.xml\",\n \"views/payment_redsys_templates.xml\",\n \"data/payment_redsys.xml\",\n ],\n \"license\": \"AGPL-3\",\n \"installable\": True,\n}\n"},"subject":{"kind":"string","value":"Put real package on pypi"},"message":{"kind":"string","value":"[IMP] payment_redsys: Put real package on pypi\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"cubells/l10n-spain,cubells/l10n-spain,cubells/l10n-spain"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\n{\n \"name\": \"Pasarela de pago Redsys\",\n \"category\": \"Payment Acquirer\",\n \"summary\": \"Payment Acquirer: Redsys Implementation\",\n \"version\": \"14.0.2.0.0\",\n \"author\": \"Tecnativa,\" \"Odoo Community Association (OCA)\",\n \"website\": \"https://github.com/OCA/l10n-spain\",\n \"depends\": [\"payment\", \"website_sale\"],\n \"external_dependencies\": {\"python\": [\"Crypto.Cipher.DES3\"]},\n \"data\": [\n \"views/redsys.xml\",\n \"views/payment_acquirer.xml\",\n \"views/payment_redsys_templates.xml\",\n \"data/payment_redsys.xml\",\n ],\n \"license\": \"AGPL-3\",\n \"installable\": True,\n}\n\n## Instruction:\n[IMP] payment_redsys: Put real package on pypi\n\n## Code After:\n\n{\n \"name\": \"Pasarela de pago Redsys\",\n \"category\": \"Payment Acquirer\",\n \"summary\": \"Payment Acquirer: Redsys Implementation\",\n \"version\": \"14.0.2.0.0\",\n \"author\": \"Tecnativa,\" \"Odoo Community Association (OCA)\",\n \"website\": \"https://github.com/OCA/l10n-spain\",\n \"depends\": [\"payment\", \"website_sale\"],\n \"external_dependencies\": {\"python\": [\"pycrypto\"]},\n \"data\": [\n \"views/redsys.xml\",\n \"views/payment_acquirer.xml\",\n \"views/payment_redsys_templates.xml\",\n \"data/payment_redsys.xml\",\n ],\n \"license\": \"AGPL-3\",\n \"installable\": True,\n}\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n \"author\": \"Tecnativa,\" \"Odoo Community Association (OCA)\",\n \"website\": \"https://github.com/OCA/l10n-spain\",\n \"depends\": [\"payment\", \"website_sale\"],\n \"external_dependencies\": {\"python\": [\"pycrypto\"]},\n \"data\": [\n \"views/redsys.xml\",\n \"views/payment_acquirer.xml\",\n\n\n ... "}}},{"rowIdx":2196,"cells":{"commit":{"kind":"string","value":"0d1f0fa79d2cf381ed15b98ada4f9bec28f6c749"},"old_file":{"kind":"string","value":"providence-tools-common/src/main/java/net/morimekta/providence/tools/common/Utils.java"},"new_file":{"kind":"string","value":"providence-tools-common/src/main/java/net/morimekta/providence/tools/common/Utils.java"},"old_contents":{"kind":"string","value":"package net.morimekta.providence.tools.common;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.Properties;\n\n/**\n * General utility methods.\n */\npublic class Utils {\n public static String getVersionString() throws IOException {\n Properties properties = new Properties();\n try (InputStream in = Utils.class.getResourceAsStream(\"/version.properties\")) {\n properties.load(in);\n }\n return \"v\" + properties.getProperty(\"build.version\");\n }\n}\n"},"new_contents":{"kind":"string","value":"package net.morimekta.providence.tools.common;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.UncheckedIOException;\nimport java.util.Properties;\n\n/**\n * General utility methods.\n */\npublic class Utils {\n private transient volatile static String versionString = null;\n\n public static String getVersionString() {\n if (versionString == null) {\n try {\n Properties properties = new Properties();\n try (InputStream in = Utils.class.getResourceAsStream(\"/version.properties\")) {\n properties.load(in);\n }\n versionString = \"v\" + properties.getProperty(\"build.version\");\n } catch (IOException e) {\n throw new UncheckedIOException(e.getMessage(), e);\n }\n }\n return versionString;\n }\n}\n"},"subject":{"kind":"string","value":"Read version string only once."},"message":{"kind":"string","value":"Read version string only once.\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"morimekta/thrift-j2,morimekta/providence,morimekta/providence"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage net.morimekta.providence.tools.common;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.util.Properties;\n\n/**\n * General utility methods.\n */\npublic class Utils {\n public static String getVersionString() throws IOException {\n Properties properties = new Properties();\n try (InputStream in = Utils.class.getResourceAsStream(\"/version.properties\")) {\n properties.load(in);\n }\n return \"v\" + properties.getProperty(\"build.version\");\n }\n}\n\n## Instruction:\nRead version string only once.\n\n## Code After:\npackage net.morimekta.providence.tools.common;\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.UncheckedIOException;\nimport java.util.Properties;\n\n/**\n * General utility methods.\n */\npublic class Utils {\n private transient volatile static String versionString = null;\n\n public static String getVersionString() {\n if (versionString == null) {\n try {\n Properties properties = new Properties();\n try (InputStream in = Utils.class.getResourceAsStream(\"/version.properties\")) {\n properties.load(in);\n }\n versionString = \"v\" + properties.getProperty(\"build.version\");\n } catch (IOException e) {\n throw new UncheckedIOException(e.getMessage(), e);\n }\n }\n return versionString;\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n\nimport java.io.IOException;\nimport java.io.InputStream;\nimport java.io.UncheckedIOException;\nimport java.util.Properties;\n\n/**\n\n\n ... \n\n\n * General utility methods.\n */\npublic class Utils {\n private transient volatile static String versionString = null;\n\n public static String getVersionString() {\n if (versionString == null) {\n try {\n Properties properties = new Properties();\n try (InputStream in = Utils.class.getResourceAsStream(\"/version.properties\")) {\n properties.load(in);\n }\n versionString = \"v\" + properties.getProperty(\"build.version\");\n } catch (IOException e) {\n throw new UncheckedIOException(e.getMessage(), e);\n }\n }\n return versionString;\n }\n}\n\n\n ... "}}},{"rowIdx":2197,"cells":{"commit":{"kind":"string","value":"6da466984143d2a9176870583ca5dba8d1b9764c"},"old_file":{"kind":"string","value":"test/integration/test_graylogapi.py"},"new_file":{"kind":"string","value":"test/integration/test_graylogapi.py"},"old_contents":{"kind":"string","value":"import pytest\nfrom pygraylog.pygraylog import graylogapi\n\ndef test_get():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n res = api._get()\n expected = {\n 'one': 'two'\n }\n assert res == expected\n\ndef test_post():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n with pytest.raises(NotImplementedError):\n api._post()\n\ndef test_put():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n with pytest.raises(NotImplementedError):\n api._put()\n\ndef test_delete():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n with pytest.raises(NotImplementedError):\n api._delete()\n\n"},"new_contents":{"kind":"string","value":"import pytest\nfrom pygraylog.pygraylog import graylogapi\n\ndef test_get():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n res = api._get()\n expected = \"{\\\"one\\\": \\\"two\\\"}\\n\"\n assert res == expected\n\ndef test_post():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n with pytest.raises(NotImplementedError):\n api._post()\n\ndef test_put():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n with pytest.raises(NotImplementedError):\n api._put()\n\ndef test_delete():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n with pytest.raises(NotImplementedError):\n api._delete()\n\n"},"subject":{"kind":"string","value":"Modify test to reflect that api returns string response."},"message":{"kind":"string","value":"Modify test to reflect that api returns string response.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"zmallen/pygraylog"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport pytest\nfrom pygraylog.pygraylog import graylogapi\n\ndef test_get():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n res = api._get()\n expected = {\n 'one': 'two'\n }\n assert res == expected\n\ndef test_post():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n with pytest.raises(NotImplementedError):\n api._post()\n\ndef test_put():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n with pytest.raises(NotImplementedError):\n api._put()\n\ndef test_delete():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n with pytest.raises(NotImplementedError):\n api._delete()\n\n\n## Instruction:\nModify test to reflect that api returns string response.\n\n## Code After:\nimport pytest\nfrom pygraylog.pygraylog import graylogapi\n\ndef test_get():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n res = api._get()\n expected = \"{\\\"one\\\": \\\"two\\\"}\\n\"\n assert res == expected\n\ndef test_post():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n with pytest.raises(NotImplementedError):\n api._post()\n\ndef test_put():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n with pytest.raises(NotImplementedError):\n api._put()\n\ndef test_delete():\n api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', \n username = 'Zack',\n password = 'Zack')\n with pytest.raises(NotImplementedError):\n api._delete()\n\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n username = 'Zack',\n password = 'Zack')\n res = api._get()\n expected = \"{\\\"one\\\": \\\"two\\\"}\\n\"\n assert res == expected\n\ndef test_post():\n\n\n// ... rest of the code ..."}}},{"rowIdx":2198,"cells":{"commit":{"kind":"string","value":"19dd810c5acb35ce5d7565ee57a55ae725194bd1"},"old_file":{"kind":"string","value":"mvp/integration.py"},"new_file":{"kind":"string","value":"mvp/integration.py"},"old_contents":{"kind":"string","value":"\n\nclass Integration(object):\n\n name = None\n description = None\n icon = None\n banner = None\n requires_confirmation = False\n enabled_by_default = False\n columns = 1\n\n def __init__(self):\n self.set_enabled(self.enabled_by_default)\n\n def fields(self):\n '''Return a list of fields.\n\n Example:\n return [\n {\n 'name': 'StringField',\n 'type': 'str',\n 'default': None,\n 'options': [...],\n 'required': False,\n },\n ...\n ]\n '''\n return NotImplemented\n\n def on_filename_changed(self, form, value):\n return NotImplemented\n\n def set_enabled(self, value):\n '''Returns True if the integration was successfully enabled'''\n\n if value:\n return self._on_enable()\n else:\n return self._on_disable()\n\n def _on_enable(self):\n self.enabled = self.on_enable()\n return self.enabled\n\n def on_enable(self):\n '''Return True to enable integration and False to disable'''\n\n return True\n\n def _on_disable(self):\n self.enabled = not self.on_disable()\n return self.enabled\n\n def on_disable(self):\n '''Return True to disable integration and False to enable'''\n\n return True\n\n def before_playblast(self, data):\n return NotImplemented\n\n def after_playblast(self, data):\n return NotImplemented\n"},"new_contents":{"kind":"string","value":"\n\nclass Integration(object):\n\n name = None\n description = None\n icon = None\n banner = None\n requires_confirmation = False\n enabled_by_default = False\n columns = 1\n\n def __init__(self):\n self.set_enabled(self.enabled_by_default)\n\n def fields(self):\n '''Return a list of fields.\n\n Example:\n return [\n {\n 'name': 'StringField',\n 'type': 'str',\n 'default': None,\n 'options': [...],\n 'required': False,\n },\n ...\n ]\n '''\n return NotImplemented\n\n def on_filename_changed(self, form, value):\n return NotImplemented\n\n def set_enabled(self, value):\n '''Returns True if the integration was successfully enabled'''\n\n if value:\n return self._on_enable()\n else:\n return self._on_disable()\n\n def _on_enable(self):\n self.enabled = self.on_enable()\n return self.enabled\n\n def on_enable(self):\n '''Return True to enable integration and False to disable'''\n\n return True\n\n def _on_disable(self):\n self.enabled = not self.on_disable()\n return self.enabled\n\n def on_disable(self):\n '''Return True to disable integration and False to enable'''\n\n return True\n\n def before_playblast(self, form, data):\n '''Runs before playblasting.'''\n\n return NotImplemented\n\n def after_playblast(self, form, data):\n '''Runs after playblasting.'''\n\n return NotImplemented\n\n def finalize(self, form, data):\n '''Runs after entire playblast process is finished.\n\n Unlike after_playblast, this method will only run ONCE after all\n playblasting is finished. So, when playblasting multiple render layers\n you can use this to execute after all of those render layers have\n completed rendering.\n\n Arguments:\n form: The Form object including render options\n data: List of renders that were output\n '''\n\n return NotImplemented\n"},"subject":{"kind":"string","value":"Add finalize method to Integration."},"message":{"kind":"string","value":"Add finalize method to Integration.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"danbradham/mvp"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\n\nclass Integration(object):\n\n name = None\n description = None\n icon = None\n banner = None\n requires_confirmation = False\n enabled_by_default = False\n columns = 1\n\n def __init__(self):\n self.set_enabled(self.enabled_by_default)\n\n def fields(self):\n '''Return a list of fields.\n\n Example:\n return [\n {\n 'name': 'StringField',\n 'type': 'str',\n 'default': None,\n 'options': [...],\n 'required': False,\n },\n ...\n ]\n '''\n return NotImplemented\n\n def on_filename_changed(self, form, value):\n return NotImplemented\n\n def set_enabled(self, value):\n '''Returns True if the integration was successfully enabled'''\n\n if value:\n return self._on_enable()\n else:\n return self._on_disable()\n\n def _on_enable(self):\n self.enabled = self.on_enable()\n return self.enabled\n\n def on_enable(self):\n '''Return True to enable integration and False to disable'''\n\n return True\n\n def _on_disable(self):\n self.enabled = not self.on_disable()\n return self.enabled\n\n def on_disable(self):\n '''Return True to disable integration and False to enable'''\n\n return True\n\n def before_playblast(self, data):\n return NotImplemented\n\n def after_playblast(self, data):\n return NotImplemented\n\n## Instruction:\nAdd finalize method to Integration.\n\n## Code After:\n\n\nclass Integration(object):\n\n name = None\n description = None\n icon = None\n banner = None\n requires_confirmation = False\n enabled_by_default = False\n columns = 1\n\n def __init__(self):\n self.set_enabled(self.enabled_by_default)\n\n def fields(self):\n '''Return a list of fields.\n\n Example:\n return [\n {\n 'name': 'StringField',\n 'type': 'str',\n 'default': None,\n 'options': [...],\n 'required': False,\n },\n ...\n ]\n '''\n return NotImplemented\n\n def on_filename_changed(self, form, value):\n return NotImplemented\n\n def set_enabled(self, value):\n '''Returns True if the integration was successfully enabled'''\n\n if value:\n return self._on_enable()\n else:\n return self._on_disable()\n\n def _on_enable(self):\n self.enabled = self.on_enable()\n return self.enabled\n\n def on_enable(self):\n '''Return True to enable integration and False to disable'''\n\n return True\n\n def _on_disable(self):\n self.enabled = not self.on_disable()\n return self.enabled\n\n def on_disable(self):\n '''Return True to disable integration and False to enable'''\n\n return True\n\n def before_playblast(self, form, data):\n '''Runs before playblasting.'''\n\n return NotImplemented\n\n def after_playblast(self, form, data):\n '''Runs after playblasting.'''\n\n return NotImplemented\n\n def finalize(self, form, data):\n '''Runs after entire playblast process is finished.\n\n Unlike after_playblast, this method will only run ONCE after all\n playblasting is finished. So, when playblasting multiple render layers\n you can use this to execute after all of those render layers have\n completed rendering.\n\n Arguments:\n form: The Form object including render options\n data: List of renders that were output\n '''\n\n return NotImplemented\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\n return True\n\n def before_playblast(self, form, data):\n '''Runs before playblasting.'''\n\n return NotImplemented\n\n def after_playblast(self, form, data):\n '''Runs after playblasting.'''\n\n return NotImplemented\n\n def finalize(self, form, data):\n '''Runs after entire playblast process is finished.\n\n Unlike after_playblast, this method will only run ONCE after all\n playblasting is finished. So, when playblasting multiple render layers\n you can use this to execute after all of those render layers have\n completed rendering.\n\n Arguments:\n form: The Form object including render options\n data: List of renders that were output\n '''\n\n return NotImplemented\n\n\n// ... rest of the code ..."}}},{"rowIdx":2199,"cells":{"commit":{"kind":"string","value":"aa55b1577582b61c011a2af2e4e5f3bf78421e38"},"old_file":{"kind":"string","value":"src/main/java/com/techcavern/wavetact/Main.java"},"new_file":{"kind":"string","value":"src/main/java/com/techcavern/wavetact/Main.java"},"old_contents":{"kind":"string","value":"package com.techcavern.wavetact;\n\nimport com.techcavern.wavetact.utils.CommandLineUtils;\nimport com.techcavern.wavetact.utils.GeneralRegistry;\nimport com.techcavern.wavetact.utils.IRCUtils;\n\nimport org.slf4j.impl.SimpleLogger;\n\n@SuppressWarnings(\"ConstantConditions\")\npublic class Main {\n\n\n public static void main(String[] args) throws Exception {\n CommandLineUtils.initializeCommandlines();\n // CommandLineUtils.parseCommandLineArguments(args);\n System.out.println(\"Starting...\");\n System.setProperty(SimpleLogger.SHOW_DATE_TIME_KEY, \"true\");\n System.setProperty(SimpleLogger.DATE_TIME_FORMAT_KEY, \"[yyyy/MM/dd HH:mm:ss]\");\n System.setProperty(SimpleLogger.LEVEL_IN_BRACKETS_KEY, \"true\");\n IRCUtils.registerCommands();\n// IRCUtils.registerNetworks();\n IRCUtils.registerDevServer();\n IRCUtils.loadSimpleActions();\n IRCUtils.loadSimpleMessages();\n IRCUtils.startThreads();\n GeneralRegistry.WaveTact.start();\n }\n}\n"},"new_contents":{"kind":"string","value":"package com.techcavern.wavetact;\n\nimport com.techcavern.wavetact.utils.CommandLineUtils;\nimport com.techcavern.wavetact.utils.GeneralRegistry;\nimport com.techcavern.wavetact.utils.IRCUtils;\n\nimport org.slf4j.impl.SimpleLogger;\n\n@SuppressWarnings(\"ConstantConditions\")\npublic class Main {\n\n\n public static void main(String[] args) throws Exception {\n if (!Boolean.parseBoolean(System.getProperty(\"dev\"))) {\n System.out.println(\"Running in production mode\");\n CommandLineUtils.initializeCommandlines();\n CommandLineUtils.parseCommandLineArguments(args);\n } else {\n System.out.println(\"Running in developer mode\");\n IRCUtils.registerDevServer();\n }\n\n System.setProperty(SimpleLogger.SHOW_DATE_TIME_KEY, \"true\");\n System.setProperty(SimpleLogger.DATE_TIME_FORMAT_KEY, \"[yyyy/MM/dd HH:mm:ss]\");\n System.setProperty(SimpleLogger.LEVEL_IN_BRACKETS_KEY, \"true\");\n IRCUtils.registerCommands();\n IRCUtils.registerDevServer();\n IRCUtils.loadSimpleActions();\n IRCUtils.loadSimpleMessages();\n IRCUtils.startThreads();\n GeneralRegistry.WaveTact.start();\n }\n}\n"},"subject":{"kind":"string","value":"Add system parameter for dev mode"},"message":{"kind":"string","value":"Add system parameter for dev mode\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"TechCavern/WaveTact"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage com.techcavern.wavetact;\n\nimport com.techcavern.wavetact.utils.CommandLineUtils;\nimport com.techcavern.wavetact.utils.GeneralRegistry;\nimport com.techcavern.wavetact.utils.IRCUtils;\n\nimport org.slf4j.impl.SimpleLogger;\n\n@SuppressWarnings(\"ConstantConditions\")\npublic class Main {\n\n\n public static void main(String[] args) throws Exception {\n CommandLineUtils.initializeCommandlines();\n // CommandLineUtils.parseCommandLineArguments(args);\n System.out.println(\"Starting...\");\n System.setProperty(SimpleLogger.SHOW_DATE_TIME_KEY, \"true\");\n System.setProperty(SimpleLogger.DATE_TIME_FORMAT_KEY, \"[yyyy/MM/dd HH:mm:ss]\");\n System.setProperty(SimpleLogger.LEVEL_IN_BRACKETS_KEY, \"true\");\n IRCUtils.registerCommands();\n// IRCUtils.registerNetworks();\n IRCUtils.registerDevServer();\n IRCUtils.loadSimpleActions();\n IRCUtils.loadSimpleMessages();\n IRCUtils.startThreads();\n GeneralRegistry.WaveTact.start();\n }\n}\n\n## Instruction:\nAdd system parameter for dev mode\n\n## Code After:\npackage com.techcavern.wavetact;\n\nimport com.techcavern.wavetact.utils.CommandLineUtils;\nimport com.techcavern.wavetact.utils.GeneralRegistry;\nimport com.techcavern.wavetact.utils.IRCUtils;\n\nimport org.slf4j.impl.SimpleLogger;\n\n@SuppressWarnings(\"ConstantConditions\")\npublic class Main {\n\n\n public static void main(String[] args) throws Exception {\n if (!Boolean.parseBoolean(System.getProperty(\"dev\"))) {\n System.out.println(\"Running in production mode\");\n CommandLineUtils.initializeCommandlines();\n CommandLineUtils.parseCommandLineArguments(args);\n } else {\n System.out.println(\"Running in developer mode\");\n IRCUtils.registerDevServer();\n }\n\n System.setProperty(SimpleLogger.SHOW_DATE_TIME_KEY, \"true\");\n System.setProperty(SimpleLogger.DATE_TIME_FORMAT_KEY, \"[yyyy/MM/dd HH:mm:ss]\");\n System.setProperty(SimpleLogger.LEVEL_IN_BRACKETS_KEY, \"true\");\n IRCUtils.registerCommands();\n IRCUtils.registerDevServer();\n IRCUtils.loadSimpleActions();\n IRCUtils.loadSimpleMessages();\n IRCUtils.startThreads();\n GeneralRegistry.WaveTact.start();\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n\n\n public static void main(String[] args) throws Exception {\n if (!Boolean.parseBoolean(System.getProperty(\"dev\"))) {\n System.out.println(\"Running in production mode\");\n CommandLineUtils.initializeCommandlines();\n CommandLineUtils.parseCommandLineArguments(args);\n } else {\n System.out.println(\"Running in developer mode\");\n IRCUtils.registerDevServer();\n }\n\n System.setProperty(SimpleLogger.SHOW_DATE_TIME_KEY, \"true\");\n System.setProperty(SimpleLogger.DATE_TIME_FORMAT_KEY, \"[yyyy/MM/dd HH:mm:ss]\");\n System.setProperty(SimpleLogger.LEVEL_IN_BRACKETS_KEY, \"true\");\n IRCUtils.registerCommands();\n IRCUtils.registerDevServer();\n IRCUtils.loadSimpleActions();\n IRCUtils.loadSimpleMessages();\n\n\n ... "}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":21,"numItemsPerPage":100,"numTotalItems":36908,"offset":2100,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1NzcyNTgzOSwic3ViIjoiL2RhdGFzZXRzL2tzZW5pYXN5Y2gvRWRpdFBhY2tGVC1NdWx0aS1hcHBseS1mdXp6eS1kaWZmcy1oZXVyaXN0aWNzX2NvbnRleHQtMyIsImV4cCI6MTc1NzcyOTQzOSwiaXNzIjoiaHR0cHM6Ly9odWdnaW5nZmFjZS5jbyJ9.RvevB1gvO1sqzTGbQu2wiHzmvJa77c7MnKsQfN5Twa3Oj51mRJq_-6aeS9wAI0Ll66jpmejAaCt-sxW7Mt9IAQ","displayUrls":true},"discussionsStats":{"closed":0,"open":1,"total":1},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
commit
stringlengths
40
40
old_file
stringlengths
4
234
new_file
stringlengths
4
234
old_contents
stringlengths
10
3.01k
new_contents
stringlengths
19
3.38k
subject
stringlengths
16
736
message
stringlengths
17
2.63k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
82.6k
config
stringclasses
4 values
content
stringlengths
134
4.41k
fuzzy_diff
stringlengths
29
3.44k
358f244b397f11cdf9f89304356ac45b4c6621b5
__init__.py
__init__.py
import os.path import subprocess class SubTask(): def __init__(self, output_dir, log): self.__output_dir = output_dir self.__log = log self.__wd = os.path.dirname(os.path.realpath(__file__)) self.__init_done = False print "__init__" def is_initialized(self): print "init", self.__init_done return self.__init_done def initialize(self): print "initialize" self.__init_done = True script = os.path.join(self.__wd, 'get_tcc.sh') retcode = subprocess.call([script, self.__wd]) self.__init_done = retcode == 0 def is_enabled(self): return True def __result(self, output_dir, retcode): return { 'gcc': os.path.join(output_dir, 'bin'), 'passed': retcode == 0 } def run(self, q, args): print "run" script = os.path.join(self.__wd, 'conf_and_make.sh') retcode = subprocess.call([script, self.__wd, self.__output_dir, self.__log]) q.put({'retcode': retcode, 'result': self.__result(self.__output_dir, retcode)})
import os.path import subprocess class SubTask(): def __init__(self, output_dir, log): self.__output_dir = output_dir self.__log = log self.__wd = os.path.dirname(os.path.realpath(__file__)) self.__init_done = False print "__init__" def is_initialized(self): print "init", self.__init_done return self.__init_done def initialize(self): print "initialize" self.__init_done = True script = os.path.join(self.__wd, 'get_tcc.sh') retcode = subprocess.call([script, self.__wd]) self.__init_done = retcode == 0 def is_enabled(self): return True def run(self, q, args): print "run" script = os.path.join(self.__wd, 'conf_and_make.sh') retcode = subprocess.call([script, self.__wd, self.__output_dir, self.__log]) q.put({'tcc': os.path.join(self.__output_dir, 'tcc', 'bin')})
Add return value: tcc path.
Add return value: tcc path.
Python
apache-2.0
lugovskoy/dts-sample-compile
python
## Code Before: import os.path import subprocess class SubTask(): def __init__(self, output_dir, log): self.__output_dir = output_dir self.__log = log self.__wd = os.path.dirname(os.path.realpath(__file__)) self.__init_done = False print "__init__" def is_initialized(self): print "init", self.__init_done return self.__init_done def initialize(self): print "initialize" self.__init_done = True script = os.path.join(self.__wd, 'get_tcc.sh') retcode = subprocess.call([script, self.__wd]) self.__init_done = retcode == 0 def is_enabled(self): return True def __result(self, output_dir, retcode): return { 'gcc': os.path.join(output_dir, 'bin'), 'passed': retcode == 0 } def run(self, q, args): print "run" script = os.path.join(self.__wd, 'conf_and_make.sh') retcode = subprocess.call([script, self.__wd, self.__output_dir, self.__log]) q.put({'retcode': retcode, 'result': self.__result(self.__output_dir, retcode)}) ## Instruction: Add return value: tcc path. ## Code After: import os.path import subprocess class SubTask(): def __init__(self, output_dir, log): self.__output_dir = output_dir self.__log = log self.__wd = os.path.dirname(os.path.realpath(__file__)) self.__init_done = False print "__init__" def is_initialized(self): print "init", self.__init_done return self.__init_done def initialize(self): print "initialize" self.__init_done = True script = os.path.join(self.__wd, 'get_tcc.sh') retcode = subprocess.call([script, self.__wd]) self.__init_done = retcode == 0 def is_enabled(self): return True def run(self, q, args): print "run" script = os.path.join(self.__wd, 'conf_and_make.sh') retcode = subprocess.call([script, self.__wd, self.__output_dir, self.__log]) q.put({'tcc': os.path.join(self.__output_dir, 'tcc', 'bin')})
# ... existing code ... return True def run(self, q, args): print "run" script = os.path.join(self.__wd, 'conf_and_make.sh') retcode = subprocess.call([script, self.__wd, self.__output_dir, self.__log]) q.put({'tcc': os.path.join(self.__output_dir, 'tcc', 'bin')}) # ... rest of the code ...
d4d73fe7d5e83c65d9abbf59ea14ed60eb23a83f
poem_reader.py
poem_reader.py
import argparse from lxml import etree argparser = argparse.ArgumentParser(description="Newspaper XML parser", fromfile_prefix_chars='@') argparser.add_argument("dataroot", help="Path to DHH 17 newspapers directory") args = argparser.parse_args() data_root = args.dataroot with open(data_root + 'newspapers/fin/1854/1457-4616/1457-4616_1854-08-01_31/alto/1457-4616_1854-08-01_31_001.xml', 'r') as f: tree = etree.parse(f) root = tree.getroot() print(root.tag)
import argparse import glob from lxml import etree argparser = argparse.ArgumentParser(description="Newspaper XML parser", fromfile_prefix_chars='@') argparser.add_argument("dataroot", help="Path to DHH 17 newspapers directory") args = argparser.parse_args() data_root = args.dataroot def read_xml_directory(path): """ Read XML files from path, parse them, and return them as list """ files = glob.glob(path + "*.xml") xmls = [] for xmlfile in files: with open(xmlfile, 'r') as f: xmls.append(etree.parse(f)) return xmls def find_by_block_id(xmls, block_id): """ Find an element by block_id from a list of lxml trees """ block_xpath = etree.XPath("//*[@ID='{id}']".format(id=block_id)) for xml in xmls: elements = block_xpath(xml) if elements: return elements[0] some_dir = data_root + 'newspapers/fin/1854/1457-4616/1457-4616_1854-08-01_31/alto/' xmls = read_xml_directory(some_dir) print(etree.tostring(find_by_block_id(xmls, 'P2_TB00001')))
Read XML files from a directory and find textblock by id
Read XML files from a directory and find textblock by id
Python
mit
dhh17/categories_norms_genres,dhh17/categories_norms_genres,dhh17/categories_norms_genres
python
## Code Before: import argparse from lxml import etree argparser = argparse.ArgumentParser(description="Newspaper XML parser", fromfile_prefix_chars='@') argparser.add_argument("dataroot", help="Path to DHH 17 newspapers directory") args = argparser.parse_args() data_root = args.dataroot with open(data_root + 'newspapers/fin/1854/1457-4616/1457-4616_1854-08-01_31/alto/1457-4616_1854-08-01_31_001.xml', 'r') as f: tree = etree.parse(f) root = tree.getroot() print(root.tag) ## Instruction: Read XML files from a directory and find textblock by id ## Code After: import argparse import glob from lxml import etree argparser = argparse.ArgumentParser(description="Newspaper XML parser", fromfile_prefix_chars='@') argparser.add_argument("dataroot", help="Path to DHH 17 newspapers directory") args = argparser.parse_args() data_root = args.dataroot def read_xml_directory(path): """ Read XML files from path, parse them, and return them as list """ files = glob.glob(path + "*.xml") xmls = [] for xmlfile in files: with open(xmlfile, 'r') as f: xmls.append(etree.parse(f)) return xmls def find_by_block_id(xmls, block_id): """ Find an element by block_id from a list of lxml trees """ block_xpath = etree.XPath("//*[@ID='{id}']".format(id=block_id)) for xml in xmls: elements = block_xpath(xml) if elements: return elements[0] some_dir = data_root + 'newspapers/fin/1854/1457-4616/1457-4616_1854-08-01_31/alto/' xmls = read_xml_directory(some_dir) print(etree.tostring(find_by_block_id(xmls, 'P2_TB00001')))
// ... existing code ... import argparse import glob from lxml import etree argparser = argparse.ArgumentParser(description="Newspaper XML parser", fromfile_prefix_chars='@') argparser.add_argument("dataroot", help="Path to DHH 17 newspapers directory") args = argparser.parse_args() data_root = args.dataroot def read_xml_directory(path): """ Read XML files from path, parse them, and return them as list """ files = glob.glob(path + "*.xml") xmls = [] for xmlfile in files: with open(xmlfile, 'r') as f: xmls.append(etree.parse(f)) return xmls def find_by_block_id(xmls, block_id): """ Find an element by block_id from a list of lxml trees """ block_xpath = etree.XPath("//*[@ID='{id}']".format(id=block_id)) for xml in xmls: elements = block_xpath(xml) if elements: return elements[0] some_dir = data_root + 'newspapers/fin/1854/1457-4616/1457-4616_1854-08-01_31/alto/' xmls = read_xml_directory(some_dir) print(etree.tostring(find_by_block_id(xmls, 'P2_TB00001'))) // ... rest of the code ...
114e2e877898f351bbb388cac7df5811b322c48f
setup.py
setup.py
from setuptools import find_packages, setup from shorty.version import __VERSION__ dependencies=[ 'django', 'django-autoconfig', 'django-nuit', ] test_dependencies=[ 'django-setuptest', ] setup( name='djshorty', version=__VERSION__, description='A Django URL shortening app', author='Ben Cardy', author_email='[email protected]', packages=find_packages(), install_requires=dependencies, # To run tests via python setup.py test tests_require=test_dependencies, test_suite='setuptest.setuptest.SetupTestSuite', include_package_data=True, )
from setuptools import find_packages, setup from shorty.version import __VERSION__ dependencies=[ 'django', 'django-autoconfig', 'django-nuit >= 1.0.0, < 2.0.0', ] test_dependencies=[ 'django-setuptest', ] setup( name='djshorty', version=__VERSION__, description='A Django URL shortening app', author='Ben Cardy', author_email='[email protected]', packages=find_packages(), install_requires=dependencies, # To run tests via python setup.py test tests_require=test_dependencies, test_suite='setuptest.setuptest.SetupTestSuite', include_package_data=True, )
Fix version dependency on nuit
Fix version dependency on nuit
Python
apache-2.0
benbacardi/djshorty,benbacardi/djshorty,ocadotechnology/djshorty,ocadotechnology/djshorty,ocadotechnology/djshorty,benbacardi/djshorty
python
## Code Before: from setuptools import find_packages, setup from shorty.version import __VERSION__ dependencies=[ 'django', 'django-autoconfig', 'django-nuit', ] test_dependencies=[ 'django-setuptest', ] setup( name='djshorty', version=__VERSION__, description='A Django URL shortening app', author='Ben Cardy', author_email='[email protected]', packages=find_packages(), install_requires=dependencies, # To run tests via python setup.py test tests_require=test_dependencies, test_suite='setuptest.setuptest.SetupTestSuite', include_package_data=True, ) ## Instruction: Fix version dependency on nuit ## Code After: from setuptools import find_packages, setup from shorty.version import __VERSION__ dependencies=[ 'django', 'django-autoconfig', 'django-nuit >= 1.0.0, < 2.0.0', ] test_dependencies=[ 'django-setuptest', ] setup( name='djshorty', version=__VERSION__, description='A Django URL shortening app', author='Ben Cardy', author_email='[email protected]', packages=find_packages(), install_requires=dependencies, # To run tests via python setup.py test tests_require=test_dependencies, test_suite='setuptest.setuptest.SetupTestSuite', include_package_data=True, )
# ... existing code ... dependencies=[ 'django', 'django-autoconfig', 'django-nuit >= 1.0.0, < 2.0.0', ] test_dependencies=[ 'django-setuptest', # ... rest of the code ...
aa83f506e7682b1ee26f118f7cb19db53249288e
mobile/src/main/java/com/alexstyl/specialdates/addevent/bottomsheet/IntentResolver.java
mobile/src/main/java/com/alexstyl/specialdates/addevent/bottomsheet/IntentResolver.java
package com.alexstyl.specialdates.addevent.bottomsheet; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.graphics.drawable.Drawable; import java.util.ArrayList; import java.util.List; final public class IntentResolver { private final PackageManager packageManager; public IntentResolver(PackageManager packageManager) { this.packageManager = packageManager; } List<IntentOptionViewModel> createViewModelsFor(Intent intent) { List<ResolveInfo> resolveInfos = packageManager.queryIntentActivities(intent, 0); List<IntentOptionViewModel> viewModels = new ArrayList<>(resolveInfos.size()); for (ResolveInfo resolveInfo : resolveInfos) { Drawable icon = resolveInfo.loadIcon(packageManager); String label = String.valueOf(resolveInfo.loadLabel(packageManager)); viewModels.add(new IntentOptionViewModel(icon, label, intent)); } return viewModels; } }
package com.alexstyl.specialdates.addevent.bottomsheet; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.graphics.drawable.Drawable; import java.util.ArrayList; import java.util.List; final public class IntentResolver { private final PackageManager packageManager; public IntentResolver(PackageManager packageManager) { this.packageManager = packageManager; } List<IntentOptionViewModel> createViewModelsFor(Intent intent) { List<ResolveInfo> resolveInfos = packageManager.queryIntentActivities(intent, 0); List<IntentOptionViewModel> viewModels = new ArrayList<>(resolveInfos.size()); for (ResolveInfo resolveInfo : resolveInfos) { Drawable icon = resolveInfo.loadIcon(packageManager); String label = String.valueOf(resolveInfo.loadLabel(packageManager)); Intent launchingIntent = new Intent(intent.getAction()); launchingIntent.setClassName(resolveInfo.activityInfo.packageName, resolveInfo.activityInfo.name); viewModels.add(new IntentOptionViewModel(icon, label, launchingIntent)); } return viewModels; } }
Create a new intent for each launching intent
Create a new intent for each launching intent
Java
mit
alexstyl/Memento-Calendar,auricgoldfinger/Memento-Namedays,alexstyl/Memento-Calendar,alexstyl/Memento-Calendar,alexstyl/Memento-Namedays,auricgoldfinger/Memento-Namedays,alexstyl/Memento-Namedays
java
## Code Before: package com.alexstyl.specialdates.addevent.bottomsheet; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.graphics.drawable.Drawable; import java.util.ArrayList; import java.util.List; final public class IntentResolver { private final PackageManager packageManager; public IntentResolver(PackageManager packageManager) { this.packageManager = packageManager; } List<IntentOptionViewModel> createViewModelsFor(Intent intent) { List<ResolveInfo> resolveInfos = packageManager.queryIntentActivities(intent, 0); List<IntentOptionViewModel> viewModels = new ArrayList<>(resolveInfos.size()); for (ResolveInfo resolveInfo : resolveInfos) { Drawable icon = resolveInfo.loadIcon(packageManager); String label = String.valueOf(resolveInfo.loadLabel(packageManager)); viewModels.add(new IntentOptionViewModel(icon, label, intent)); } return viewModels; } } ## Instruction: Create a new intent for each launching intent ## Code After: package com.alexstyl.specialdates.addevent.bottomsheet; import android.content.Intent; import android.content.pm.PackageManager; import android.content.pm.ResolveInfo; import android.graphics.drawable.Drawable; import java.util.ArrayList; import java.util.List; final public class IntentResolver { private final PackageManager packageManager; public IntentResolver(PackageManager packageManager) { this.packageManager = packageManager; } List<IntentOptionViewModel> createViewModelsFor(Intent intent) { List<ResolveInfo> resolveInfos = packageManager.queryIntentActivities(intent, 0); List<IntentOptionViewModel> viewModels = new ArrayList<>(resolveInfos.size()); for (ResolveInfo resolveInfo : resolveInfos) { Drawable icon = resolveInfo.loadIcon(packageManager); String label = String.valueOf(resolveInfo.loadLabel(packageManager)); Intent launchingIntent = new Intent(intent.getAction()); launchingIntent.setClassName(resolveInfo.activityInfo.packageName, resolveInfo.activityInfo.name); viewModels.add(new IntentOptionViewModel(icon, label, launchingIntent)); } return viewModels; } }
... for (ResolveInfo resolveInfo : resolveInfos) { Drawable icon = resolveInfo.loadIcon(packageManager); String label = String.valueOf(resolveInfo.loadLabel(packageManager)); Intent launchingIntent = new Intent(intent.getAction()); launchingIntent.setClassName(resolveInfo.activityInfo.packageName, resolveInfo.activityInfo.name); viewModels.add(new IntentOptionViewModel(icon, label, launchingIntent)); } return viewModels; } ...
cb39939355915d400a9ea0f0bee5e6fcc5dfeb37
miner.h
miner.h
struct work { uint8_t *midstate; uint8_t *data; }; #ifndef MAX #define MAX(a,b) ((a) < (b) ? (b) : (a)) #endif #ifndef MIN #define MIN(a,b) ((a) > (b) ? (b) : (a)) #endif typedef enum { false = 0, true = 1 } bool; #endif
struct work { uint8_t *midstate; uint8_t *data; }; #ifndef MAX #define MAX(a,b) ((a) < (b) ? (b) : (a)) #endif #ifndef MIN #define MIN(a,b) ((a) > (b) ? (b) : (a)) #endif #endif
Use stdbool.h for bool definition.
Use stdbool.h for bool definition. Our own collides with applications using strbool.h
C
mit
KnCMiner/knc-asic,KnCMiner/knc-asic,KnCMiner/knc-asic
c
## Code Before: struct work { uint8_t *midstate; uint8_t *data; }; #ifndef MAX #define MAX(a,b) ((a) < (b) ? (b) : (a)) #endif #ifndef MIN #define MIN(a,b) ((a) > (b) ? (b) : (a)) #endif typedef enum { false = 0, true = 1 } bool; #endif ## Instruction: Use stdbool.h for bool definition. Our own collides with applications using strbool.h ## Code After: struct work { uint8_t *midstate; uint8_t *data; }; #ifndef MAX #define MAX(a,b) ((a) < (b) ? (b) : (a)) #endif #ifndef MIN #define MIN(a,b) ((a) > (b) ? (b) : (a)) #endif #endif
// ... existing code ... #define MIN(a,b) ((a) > (b) ? (b) : (a)) #endif #endif // ... rest of the code ...
d66355e4758b37be39d17d681ede1dbbd6b9b311
setmagic/admin.py
setmagic/admin.py
from django import forms from django.contrib import admin from setmagic import settings from setmagic.models import Setting _denied = lambda *args: False class SetMagicAdmin(admin.ModelAdmin): list_display = 'label', 'current_value', list_editable = 'current_value', list_display_links = None has_add_permission = _denied has_delete_permission = _denied # Make all fields read-only at the change form def get_readonly_fields(self, *args, **kwargs): return self.opts.get_all_field_names() def changelist_view(self, *args, **kwargs): settings._sync() return super(SetMagicAdmin, self).changelist_view(*args, **kwargs) def get_queryset(self, request): return Setting.objects.filter(name__in=settings.defs) def get_changelist_form(self, *args, **kwargs): class Form(forms.ModelForm): class Meta: fields = self.list_editable def __init__(self, *args, **kwargs): super(Form, self).__init__(*args, **kwargs) # Do nothing for empty forms if not self.instance.pk: return # Set a custom field custom_field = settings.defs[self.instance.name].get('field') if custom_field: self.fields['current_value'] = custom_field return Form admin.site.register(Setting, SetMagicAdmin)
from django import forms from django.contrib import admin from django.utils.importlib import import_module from setmagic import settings from setmagic.models import Setting _denied = lambda *args: False class SetMagicAdmin(admin.ModelAdmin): list_display = 'label', 'current_value', list_editable = 'current_value', list_display_links = None has_add_permission = _denied has_delete_permission = _denied # Make all fields read-only at the change form def get_readonly_fields(self, *args, **kwargs): return self.opts.get_all_field_names() def changelist_view(self, *args, **kwargs): settings._sync() return super(SetMagicAdmin, self).changelist_view(*args, **kwargs) def get_queryset(self, request): return Setting.objects.filter(name__in=settings.defs) def get_changelist_form(self, *args, **kwargs): class Form(forms.ModelForm): class Meta: fields = self.list_editable def __init__(self, *args, **kwargs): super(Form, self).__init__(*args, **kwargs) # Do nothing for empty forms if not self.instance.pk: return # Set a custom field custom_field = settings.defs[self.instance.name].get('field') if custom_field: if isinstance(custom_field, str): module, name = custom_field.rsplit('.', 1) custom_field = getattr(import_module(module), name)() self.fields['current_value'] = custom_field return Form admin.site.register(Setting, SetMagicAdmin)
Use importlib to load custom fields by str
Use importlib to load custom fields by str
Python
mit
7ws/django-setmagic
python
## Code Before: from django import forms from django.contrib import admin from setmagic import settings from setmagic.models import Setting _denied = lambda *args: False class SetMagicAdmin(admin.ModelAdmin): list_display = 'label', 'current_value', list_editable = 'current_value', list_display_links = None has_add_permission = _denied has_delete_permission = _denied # Make all fields read-only at the change form def get_readonly_fields(self, *args, **kwargs): return self.opts.get_all_field_names() def changelist_view(self, *args, **kwargs): settings._sync() return super(SetMagicAdmin, self).changelist_view(*args, **kwargs) def get_queryset(self, request): return Setting.objects.filter(name__in=settings.defs) def get_changelist_form(self, *args, **kwargs): class Form(forms.ModelForm): class Meta: fields = self.list_editable def __init__(self, *args, **kwargs): super(Form, self).__init__(*args, **kwargs) # Do nothing for empty forms if not self.instance.pk: return # Set a custom field custom_field = settings.defs[self.instance.name].get('field') if custom_field: self.fields['current_value'] = custom_field return Form admin.site.register(Setting, SetMagicAdmin) ## Instruction: Use importlib to load custom fields by str ## Code After: from django import forms from django.contrib import admin from django.utils.importlib import import_module from setmagic import settings from setmagic.models import Setting _denied = lambda *args: False class SetMagicAdmin(admin.ModelAdmin): list_display = 'label', 'current_value', list_editable = 'current_value', list_display_links = None has_add_permission = _denied has_delete_permission = _denied # Make all fields read-only at the change form def get_readonly_fields(self, *args, **kwargs): return self.opts.get_all_field_names() def changelist_view(self, *args, **kwargs): settings._sync() return super(SetMagicAdmin, self).changelist_view(*args, **kwargs) def get_queryset(self, request): return Setting.objects.filter(name__in=settings.defs) def get_changelist_form(self, *args, **kwargs): class Form(forms.ModelForm): class Meta: fields = self.list_editable def __init__(self, *args, **kwargs): super(Form, self).__init__(*args, **kwargs) # Do nothing for empty forms if not self.instance.pk: return # Set a custom field custom_field = settings.defs[self.instance.name].get('field') if custom_field: if isinstance(custom_field, str): module, name = custom_field.rsplit('.', 1) custom_field = getattr(import_module(module), name)() self.fields['current_value'] = custom_field return Form admin.site.register(Setting, SetMagicAdmin)
# ... existing code ... from django import forms from django.contrib import admin from django.utils.importlib import import_module from setmagic import settings from setmagic.models import Setting # ... modified code ... # Set a custom field custom_field = settings.defs[self.instance.name].get('field') if custom_field: if isinstance(custom_field, str): module, name = custom_field.rsplit('.', 1) custom_field = getattr(import_module(module), name)() self.fields['current_value'] = custom_field return Form # ... rest of the code ...
596a29505351ec0e497cbe114a6e4d57d7cbada6
backoff/__init__.py
backoff/__init__.py
from backoff._decorator import on_predicate, on_exception from backoff._jitter import full_jitter, random_jitter from backoff._wait_gen import constant, expo, fibo, runtime __all__ = [ 'on_predicate', 'on_exception', 'constant', 'expo', 'fibo', 'runtime', 'full_jitter', 'random_jitter', ] __version__ = '2.1.0'
import importlib.metadata from backoff._decorator import on_exception, on_predicate from backoff._jitter import full_jitter, random_jitter from backoff._wait_gen import constant, expo, fibo, runtime __all__ = [ 'on_predicate', 'on_exception', 'constant', 'expo', 'fibo', 'runtime', 'full_jitter', 'random_jitter', ] __version__ = importlib.metadata.version("backoff")
Use importlib.metadata to set __version__
Use importlib.metadata to set __version__ This way we don't have to remember to update the version in two places every release. The version will only need to be set in pyproject.toml
Python
mit
litl/backoff
python
## Code Before: from backoff._decorator import on_predicate, on_exception from backoff._jitter import full_jitter, random_jitter from backoff._wait_gen import constant, expo, fibo, runtime __all__ = [ 'on_predicate', 'on_exception', 'constant', 'expo', 'fibo', 'runtime', 'full_jitter', 'random_jitter', ] __version__ = '2.1.0' ## Instruction: Use importlib.metadata to set __version__ This way we don't have to remember to update the version in two places every release. The version will only need to be set in pyproject.toml ## Code After: import importlib.metadata from backoff._decorator import on_exception, on_predicate from backoff._jitter import full_jitter, random_jitter from backoff._wait_gen import constant, expo, fibo, runtime __all__ = [ 'on_predicate', 'on_exception', 'constant', 'expo', 'fibo', 'runtime', 'full_jitter', 'random_jitter', ] __version__ = importlib.metadata.version("backoff")
... import importlib.metadata from backoff._decorator import on_exception, on_predicate from backoff._jitter import full_jitter, random_jitter from backoff._wait_gen import constant, expo, fibo, runtime ... 'random_jitter', ] __version__ = importlib.metadata.version("backoff") ...
101a7c8e0e26089d6d1deb4e7728e4eb59274b74
app/main/forms.py
app/main/forms.py
from flask.ext.wtf import Form from wtforms import validators from dmutils.forms import StripWhitespaceStringField from .. import data_api_client class AdminEmailAddressValidator(object): def __init__(self, message=None): self.message = message def __call__(self, form, field): if not data_api_client.email_is_valid_for_admin_user(field.data): raise validators.StopValidation(self.message) class EmailAddressForm(Form): email_address = StripWhitespaceStringField('Email address', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class MoveUserForm(Form): user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class EmailDomainForm(Form): new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[ validators.DataRequired(message="The domain field can not be empty.") ])
from flask.ext.wtf import Form from wtforms import RadioField, validators from dmutils.forms import StripWhitespaceStringField from .. import data_api_client class AdminEmailAddressValidator(object): def __init__(self, message=None): self.message = message def __call__(self, form, field): if not data_api_client.email_is_valid_for_admin_user(field.data): raise validators.StopValidation(self.message) class EmailAddressForm(Form): email_address = StripWhitespaceStringField('Email address', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class MoveUserForm(Form): user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class EmailDomainForm(Form): new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[ validators.DataRequired(message="The domain field can not be empty.") ]) class InviteAdminForm(Form): role_choices = [ ('admin-ccs-category', 'Category'), ('admin-ccs-sourcing', 'Sourcing'), ('admin', 'Support'), ] email_address = StripWhitespaceStringField( 'Email address', validators=[ validators.DataRequired(message='You must provide an email address'), validators.Email(message='Please enter a valid email address'), AdminEmailAddressValidator(message='The email address must belong to an approved domain') ] ) role = RadioField( 'Permissions', validators=[validators.InputRequired(message='You must choose a permission')], choices=role_choices ) def __init__(self, *args, **kwargs): super(InviteAdminForm, self).__init__(*args, **kwargs) self.role.toolkit_macro_options = [{'value': i[0], 'label': i[1]} for i in self.role_choices]
Add InviteAdminForm with email_address and role fields
Add InviteAdminForm with email_address and role fields
Python
mit
alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend
python
## Code Before: from flask.ext.wtf import Form from wtforms import validators from dmutils.forms import StripWhitespaceStringField from .. import data_api_client class AdminEmailAddressValidator(object): def __init__(self, message=None): self.message = message def __call__(self, form, field): if not data_api_client.email_is_valid_for_admin_user(field.data): raise validators.StopValidation(self.message) class EmailAddressForm(Form): email_address = StripWhitespaceStringField('Email address', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class MoveUserForm(Form): user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class EmailDomainForm(Form): new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[ validators.DataRequired(message="The domain field can not be empty.") ]) ## Instruction: Add InviteAdminForm with email_address and role fields ## Code After: from flask.ext.wtf import Form from wtforms import RadioField, validators from dmutils.forms import StripWhitespaceStringField from .. import data_api_client class AdminEmailAddressValidator(object): def __init__(self, message=None): self.message = message def __call__(self, form, field): if not data_api_client.email_is_valid_for_admin_user(field.data): raise validators.StopValidation(self.message) class EmailAddressForm(Form): email_address = StripWhitespaceStringField('Email address', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class MoveUserForm(Form): user_to_move_email_address = StripWhitespaceStringField('Move an existing user to this supplier', validators=[ validators.DataRequired(message="Email can not be empty"), validators.Email(message="Please enter a valid email address") ]) class EmailDomainForm(Form): new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[ validators.DataRequired(message="The domain field can not be empty.") ]) class InviteAdminForm(Form): role_choices = [ ('admin-ccs-category', 'Category'), ('admin-ccs-sourcing', 'Sourcing'), ('admin', 'Support'), ] email_address = StripWhitespaceStringField( 'Email address', validators=[ validators.DataRequired(message='You must provide an email address'), validators.Email(message='Please enter a valid email address'), AdminEmailAddressValidator(message='The email address must belong to an approved domain') ] ) role = RadioField( 'Permissions', validators=[validators.InputRequired(message='You must choose a permission')], choices=role_choices ) def __init__(self, *args, **kwargs): super(InviteAdminForm, self).__init__(*args, **kwargs) self.role.toolkit_macro_options = [{'value': i[0], 'label': i[1]} for i in self.role_choices]
# ... existing code ... from flask.ext.wtf import Form from wtforms import RadioField, validators from dmutils.forms import StripWhitespaceStringField # ... modified code ... new_buyer_domain = StripWhitespaceStringField('Add a buyer email domain', validators=[ validators.DataRequired(message="The domain field can not be empty.") ]) class InviteAdminForm(Form): role_choices = [ ('admin-ccs-category', 'Category'), ('admin-ccs-sourcing', 'Sourcing'), ('admin', 'Support'), ] email_address = StripWhitespaceStringField( 'Email address', validators=[ validators.DataRequired(message='You must provide an email address'), validators.Email(message='Please enter a valid email address'), AdminEmailAddressValidator(message='The email address must belong to an approved domain') ] ) role = RadioField( 'Permissions', validators=[validators.InputRequired(message='You must choose a permission')], choices=role_choices ) def __init__(self, *args, **kwargs): super(InviteAdminForm, self).__init__(*args, **kwargs) self.role.toolkit_macro_options = [{'value': i[0], 'label': i[1]} for i in self.role_choices] # ... rest of the code ...
938b137bf0d12a999ae823e3e33ab5825810cf26
tests/regression/01-cpa/10-posneg.c
tests/regression/01-cpa/10-posneg.c
int main() { int i,k,j; if (k == 5) { assert(k == 5); return 0; } assert(k != 5); // simple arithmetic i = k + 1; assert(i != 6); i = k - 1; assert(i != 4); i = k * 2; assert(i != 10); // UNKNOWN! k could be -2147483643; i = k / 2; assert(i != 2); // UNKNOWN! k could be 4 return 0; }
int main() { int i,k,j; if (k == 5) { assert(k == 5); return 0; } assert(k != 5); // simple arithmetic i = k + 1; assert(i != 6); i = k - 1; assert(i != 4); i = k * 3; // multiplication with odd numbers is injective assert(i != 15); i = k * 2; // multiplication with even numbers is not-injective assert(i != 10); // UNKNOWN! k could be -2147483643; i = k / 2; assert(i != 2); // UNKNOWN! k could be 4 return 0; }
Add assertion about result of multiplication to test
Add assertion about result of multiplication to test
C
mit
goblint/analyzer,goblint/analyzer,goblint/analyzer,goblint/analyzer,goblint/analyzer
c
## Code Before: int main() { int i,k,j; if (k == 5) { assert(k == 5); return 0; } assert(k != 5); // simple arithmetic i = k + 1; assert(i != 6); i = k - 1; assert(i != 4); i = k * 2; assert(i != 10); // UNKNOWN! k could be -2147483643; i = k / 2; assert(i != 2); // UNKNOWN! k could be 4 return 0; } ## Instruction: Add assertion about result of multiplication to test ## Code After: int main() { int i,k,j; if (k == 5) { assert(k == 5); return 0; } assert(k != 5); // simple arithmetic i = k + 1; assert(i != 6); i = k - 1; assert(i != 4); i = k * 3; // multiplication with odd numbers is injective assert(i != 15); i = k * 2; // multiplication with even numbers is not-injective assert(i != 10); // UNKNOWN! k could be -2147483643; i = k / 2; assert(i != 2); // UNKNOWN! k could be 4 return 0; }
// ... existing code ... assert(i != 6); i = k - 1; assert(i != 4); i = k * 3; // multiplication with odd numbers is injective assert(i != 15); i = k * 2; // multiplication with even numbers is not-injective assert(i != 10); // UNKNOWN! k could be -2147483643; i = k / 2; assert(i != 2); // UNKNOWN! k could be 4 // ... rest of the code ...
935657aa643de17c04ea15ddb8af9aee27a05516
setup.py
setup.py
"""Rachiopy setup script.""" from setuptools import find_packages, setup from datetime import datetime NOW = datetime.now().strftime("%m/%d/%Y%H%M%S") VERSION = f"1.0.0-dev{NOW}" GITHUB_USERNAME = "rfverbruggen" GITHUB_REPOSITORY = "rachiopy" GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}" GITHUB_URL = f"https://github.com/{GITHUB_PATH}" DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz" PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"} PACKAGES = find_packages(exclude=["tests", "tests.*"]) setup( name="RachioPy", version=VERSION, author="Robbert Verbruggen", author_email="[email protected]", packages=PACKAGES, install_requires=["requests"], url=GITHUB_URL, download_url=DOWNLOAD_URL, project_urls=PROJECT_URLS, license="MIT", description="A Python module for the Rachio API.", platforms="Cross Platform", classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Topic :: Software Development", ], )
"""Rachiopy setup script.""" from setuptools import find_packages, setup from datetime import datetime VERSION = "1.0.0" GITHUB_USERNAME = "rfverbruggen" GITHUB_REPOSITORY = "rachiopy" GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}" GITHUB_URL = f"https://github.com/{GITHUB_PATH}" DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz" PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"} PACKAGES = find_packages(exclude=["tests", "tests.*"]) setup( name="RachioPy", version=VERSION, author="Robbert Verbruggen", author_email="[email protected]", packages=PACKAGES, install_requires=["requests"], url=GITHUB_URL, download_url=DOWNLOAD_URL, project_urls=PROJECT_URLS, license="MIT", description="A Python module for the Rachio API.", platforms="Cross Platform", classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Topic :: Software Development", ], )
Set the final version number
Set the final version number
Python
mit
rfverbruggen/rachiopy
python
## Code Before: """Rachiopy setup script.""" from setuptools import find_packages, setup from datetime import datetime NOW = datetime.now().strftime("%m/%d/%Y%H%M%S") VERSION = f"1.0.0-dev{NOW}" GITHUB_USERNAME = "rfverbruggen" GITHUB_REPOSITORY = "rachiopy" GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}" GITHUB_URL = f"https://github.com/{GITHUB_PATH}" DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz" PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"} PACKAGES = find_packages(exclude=["tests", "tests.*"]) setup( name="RachioPy", version=VERSION, author="Robbert Verbruggen", author_email="[email protected]", packages=PACKAGES, install_requires=["requests"], url=GITHUB_URL, download_url=DOWNLOAD_URL, project_urls=PROJECT_URLS, license="MIT", description="A Python module for the Rachio API.", platforms="Cross Platform", classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Topic :: Software Development", ], ) ## Instruction: Set the final version number ## Code After: """Rachiopy setup script.""" from setuptools import find_packages, setup from datetime import datetime VERSION = "1.0.0" GITHUB_USERNAME = "rfverbruggen" GITHUB_REPOSITORY = "rachiopy" GITHUB_PATH = f"{GITHUB_USERNAME}/{GITHUB_REPOSITORY}" GITHUB_URL = f"https://github.com/{GITHUB_PATH}" DOWNLOAD_URL = f"{GITHUB_URL}/archive/{VERSION}.tar.gz" PROJECT_URLS = {"Bug Reports": f"{GITHUB_URL}/issues"} PACKAGES = find_packages(exclude=["tests", "tests.*"]) setup( name="RachioPy", version=VERSION, author="Robbert Verbruggen", author_email="[email protected]", packages=PACKAGES, install_requires=["requests"], url=GITHUB_URL, download_url=DOWNLOAD_URL, project_urls=PROJECT_URLS, license="MIT", description="A Python module for the Rachio API.", platforms="Cross Platform", classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Topic :: Software Development", ], )
# ... existing code ... from setuptools import find_packages, setup from datetime import datetime VERSION = "1.0.0" GITHUB_USERNAME = "rfverbruggen" GITHUB_REPOSITORY = "rachiopy" # ... rest of the code ...
e8506331cfa5e14029e3de4ccb16c5e0267e85b3
manoseimas/votings/nodes.py
manoseimas/votings/nodes.py
from zope.component import adapts from zope.component import provideAdapter from sboard.nodes import CreateView from sboard.nodes import DetailsView from .forms import PolicyIssueForm from .interfaces import IVoting from .interfaces import IPolicyIssue class VotingView(DetailsView): adapts(IVoting) templates = { 'details': 'votings/voting_details.html', } provideAdapter(VotingView) class CreatePolicyIssueView(CreateView): adapts(object, IPolicyIssue) form = PolicyIssueForm provideAdapter(CreatePolicyIssueView, name="create")
from zope.component import adapts from zope.component import provideAdapter from sboard.nodes import CreateView from sboard.nodes import DetailsView from sboard.nodes import TagListView from .forms import PolicyIssueForm from .interfaces import IVoting from .interfaces import IPolicyIssue class VotingView(DetailsView): adapts(IVoting) templates = { 'details': 'votings/voting_details.html', } provideAdapter(VotingView) class CreatePolicyIssueView(CreateView): adapts(object, IPolicyIssue) form = PolicyIssueForm provideAdapter(CreatePolicyIssueView, name="create") provideAdapter(TagListView, (IPolicyIssue,))
Use TagListView for IPolicyIssue as default view.
Use TagListView for IPolicyIssue as default view.
Python
agpl-3.0
ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt,ManoSeimas/manoseimas.lt
python
## Code Before: from zope.component import adapts from zope.component import provideAdapter from sboard.nodes import CreateView from sboard.nodes import DetailsView from .forms import PolicyIssueForm from .interfaces import IVoting from .interfaces import IPolicyIssue class VotingView(DetailsView): adapts(IVoting) templates = { 'details': 'votings/voting_details.html', } provideAdapter(VotingView) class CreatePolicyIssueView(CreateView): adapts(object, IPolicyIssue) form = PolicyIssueForm provideAdapter(CreatePolicyIssueView, name="create") ## Instruction: Use TagListView for IPolicyIssue as default view. ## Code After: from zope.component import adapts from zope.component import provideAdapter from sboard.nodes import CreateView from sboard.nodes import DetailsView from sboard.nodes import TagListView from .forms import PolicyIssueForm from .interfaces import IVoting from .interfaces import IPolicyIssue class VotingView(DetailsView): adapts(IVoting) templates = { 'details': 'votings/voting_details.html', } provideAdapter(VotingView) class CreatePolicyIssueView(CreateView): adapts(object, IPolicyIssue) form = PolicyIssueForm provideAdapter(CreatePolicyIssueView, name="create") provideAdapter(TagListView, (IPolicyIssue,))
# ... existing code ... from sboard.nodes import CreateView from sboard.nodes import DetailsView from sboard.nodes import TagListView from .forms import PolicyIssueForm from .interfaces import IVoting # ... modified code ... form = PolicyIssueForm provideAdapter(CreatePolicyIssueView, name="create") provideAdapter(TagListView, (IPolicyIssue,)) # ... rest of the code ...
726a982145a5da2530056e2012853848b07d0460
django_snooze/utils.py
django_snooze/utils.py
import json from django.http import HttpResponse def json_response(content, status_code=200, headers={}): """ Simple function to serialise content and return a valid HTTP response. It takes three parameters: - content (required): the content to serialise. - status_code (default 200): The HTTP status code to use. - headers (default None): The headers to add to the response. """ response = HttpResponse() response.write(json.dumps(content)) response.status_code = status_code if headers: for key, value in headers.items: response[key] = value return response
import json from django.http import HttpResponse def json_response(content, status_code=200, headers={}): """ Simple function to serialise content and return a valid HTTP response. It takes three parameters: - content (required): the content to serialise. - status_code (default 200): The HTTP status code to use. - headers (default None): The headers to add to the response. """ response = HttpResponse() response.write(json.dumps(content)) response.status_code = status_code response['Content-Type'] = 'application/json; charset=utf-8' if headers: for key, value in headers.items: response[key] = value return response
Fix the Content-Type header of the json_response
Fix the Content-Type header of the json_response Seems I forgot to add the correct Content-Type header to the json_response utility. This has now been fixed.
Python
bsd-3-clause
ainmosni/django-snooze,ainmosni/django-snooze
python
## Code Before: import json from django.http import HttpResponse def json_response(content, status_code=200, headers={}): """ Simple function to serialise content and return a valid HTTP response. It takes three parameters: - content (required): the content to serialise. - status_code (default 200): The HTTP status code to use. - headers (default None): The headers to add to the response. """ response = HttpResponse() response.write(json.dumps(content)) response.status_code = status_code if headers: for key, value in headers.items: response[key] = value return response ## Instruction: Fix the Content-Type header of the json_response Seems I forgot to add the correct Content-Type header to the json_response utility. This has now been fixed. ## Code After: import json from django.http import HttpResponse def json_response(content, status_code=200, headers={}): """ Simple function to serialise content and return a valid HTTP response. It takes three parameters: - content (required): the content to serialise. - status_code (default 200): The HTTP status code to use. - headers (default None): The headers to add to the response. """ response = HttpResponse() response.write(json.dumps(content)) response.status_code = status_code response['Content-Type'] = 'application/json; charset=utf-8' if headers: for key, value in headers.items: response[key] = value return response
... import json from django.http import HttpResponse def json_response(content, status_code=200, headers={}): """ ... response = HttpResponse() response.write(json.dumps(content)) response.status_code = status_code response['Content-Type'] = 'application/json; charset=utf-8' if headers: for key, value in headers.items: response[key] = value ...
6a1846c91a5829d0b41ca3f81f797e9f4aa26d6e
misura/canon/plugin/__init__.py
misura/canon/plugin/__init__.py
"""Plugin utilities""" from domains import NavigatorDomain, navigator_domains, node, nodes from dataimport import Converter, create_tree, create_dataset, search_registry, get_converter, convert_file, data_importers # List of functions which will be executed to update confdb and extend its options clientconf_update_functions = [] # Mapping of instrument:DefaultPlotPlugin names default_plot_plugins = {} # Mapping of instrument: plotting rule generating function default_plot_rules = {} load_rules = []
"""Plugin utilities""" from domains import NavigatorDomain, navigator_domains, node, nodes from dataimport import Converter, create_tree, create_dataset, search_registry, get_converter, convert_file, data_importers # List of functions which will be executed to update confdb and extend its options clientconf_update_functions = [] # Mapping of instrument:DefaultPlotPlugin names default_plot_plugins = {} # Mapping of instrument: plotting rule generating function default_plot_rules = {}
Remove obsolete load_rules definition, FLTD-196
Remove obsolete load_rules definition, FLTD-196
Python
mit
tainstr/misura.canon,tainstr/misura.canon
python
## Code Before: """Plugin utilities""" from domains import NavigatorDomain, navigator_domains, node, nodes from dataimport import Converter, create_tree, create_dataset, search_registry, get_converter, convert_file, data_importers # List of functions which will be executed to update confdb and extend its options clientconf_update_functions = [] # Mapping of instrument:DefaultPlotPlugin names default_plot_plugins = {} # Mapping of instrument: plotting rule generating function default_plot_rules = {} load_rules = [] ## Instruction: Remove obsolete load_rules definition, FLTD-196 ## Code After: """Plugin utilities""" from domains import NavigatorDomain, navigator_domains, node, nodes from dataimport import Converter, create_tree, create_dataset, search_registry, get_converter, convert_file, data_importers # List of functions which will be executed to update confdb and extend its options clientconf_update_functions = [] # Mapping of instrument:DefaultPlotPlugin names default_plot_plugins = {} # Mapping of instrument: plotting rule generating function default_plot_rules = {}
# ... existing code ... # Mapping of instrument: plotting rule generating function default_plot_rules = {} # ... rest of the code ...
6034265dfdfb2a7e1e4881076cc0f011ff0e639d
netbox/extras/migrations/0022_custom_links.py
netbox/extras/migrations/0022_custom_links.py
from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('contenttypes', '0002_remove_content_type_name'), ('extras', '0021_add_color_comments_changelog_to_tag'), ] operations = [ migrations.CreateModel( name='CustomLink', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)), ('name', models.CharField(max_length=100, unique=True)), ('text', models.CharField(max_length=200)), ('url', models.CharField(max_length=200)), ('weight', models.PositiveSmallIntegerField(default=100)), ('group_name', models.CharField(blank=True, max_length=50)), ('button_class', models.CharField(default='default', max_length=30)), ('new_window', models.BooleanField()), ('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), ], options={ 'ordering': ['group_name', 'weight', 'name'], }, ), ]
from django.db import migrations, models import django.db.models.deletion import extras.models class Migration(migrations.Migration): dependencies = [ ('contenttypes', '0002_remove_content_type_name'), ('extras', '0021_add_color_comments_changelog_to_tag'), ] operations = [ migrations.CreateModel( name='CustomLink', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)), ('name', models.CharField(max_length=100, unique=True)), ('text', models.CharField(max_length=200)), ('url', models.CharField(max_length=200)), ('weight', models.PositiveSmallIntegerField(default=100)), ('group_name', models.CharField(blank=True, max_length=50)), ('button_class', models.CharField(default='default', max_length=30)), ('new_window', models.BooleanField()), ('content_type', models.ForeignKey(limit_choices_to=extras.models.get_custom_link_models, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), ], options={ 'ordering': ['group_name', 'weight', 'name'], }, ), ]
Add limit_choices_to to CustomLink.content_type field
Add limit_choices_to to CustomLink.content_type field
Python
apache-2.0
lampwins/netbox,lampwins/netbox,digitalocean/netbox,digitalocean/netbox,digitalocean/netbox,lampwins/netbox,lampwins/netbox,digitalocean/netbox
python
## Code Before: from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('contenttypes', '0002_remove_content_type_name'), ('extras', '0021_add_color_comments_changelog_to_tag'), ] operations = [ migrations.CreateModel( name='CustomLink', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)), ('name', models.CharField(max_length=100, unique=True)), ('text', models.CharField(max_length=200)), ('url', models.CharField(max_length=200)), ('weight', models.PositiveSmallIntegerField(default=100)), ('group_name', models.CharField(blank=True, max_length=50)), ('button_class', models.CharField(default='default', max_length=30)), ('new_window', models.BooleanField()), ('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), ], options={ 'ordering': ['group_name', 'weight', 'name'], }, ), ] ## Instruction: Add limit_choices_to to CustomLink.content_type field ## Code After: from django.db import migrations, models import django.db.models.deletion import extras.models class Migration(migrations.Migration): dependencies = [ ('contenttypes', '0002_remove_content_type_name'), ('extras', '0021_add_color_comments_changelog_to_tag'), ] operations = [ migrations.CreateModel( name='CustomLink', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False)), ('name', models.CharField(max_length=100, unique=True)), ('text', models.CharField(max_length=200)), ('url', models.CharField(max_length=200)), ('weight', models.PositiveSmallIntegerField(default=100)), ('group_name', models.CharField(blank=True, max_length=50)), ('button_class', models.CharField(default='default', max_length=30)), ('new_window', models.BooleanField()), ('content_type', models.ForeignKey(limit_choices_to=extras.models.get_custom_link_models, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), ], options={ 'ordering': ['group_name', 'weight', 'name'], }, ), ]
// ... existing code ... from django.db import migrations, models import django.db.models.deletion import extras.models class Migration(migrations.Migration): // ... modified code ... ('group_name', models.CharField(blank=True, max_length=50)), ('button_class', models.CharField(default='default', max_length=30)), ('new_window', models.BooleanField()), ('content_type', models.ForeignKey(limit_choices_to=extras.models.get_custom_link_models, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')), ], options={ 'ordering': ['group_name', 'weight', 'name'], // ... rest of the code ...
17ac329783bce0cb88d92659cf58a3ea476c66ef
scripts/sound_output_test.py
scripts/sound_output_test.py
import pyaudio import wave import time import sys import numpy as np if len(sys.argv) < 2: print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0]) sys.exit(-1) wf = wave.open(sys.argv[1], 'rb') p = pyaudio.PyAudio() DEVICE_ID=2 def callback(in_data, frame_count, time_info, status): data = wf.readframes(frame_count) # npdata = np.frombuffer(data, dtype=np.int16) return (data, pyaudio.paContinue) print("Device parameters: {}".format(p.get_default_output_device_info())) stream = p.open(format=p.get_format_from_width(wf.getsampwidth()), channels=wf.getnchannels(), rate=wf.getframerate(), output_device_index=DEVICE_ID, output=True, stream_callback=callback) stream.start_stream() while stream.is_active(): time.sleep(0.1) stream.stop_stream() stream.close() wf.close() p.terminate()
import pyaudio import wave import time import sys import numpy as np if len(sys.argv) < 2: print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0]) sys.exit(-1) wf = wave.open(sys.argv[1], 'rb') p = pyaudio.PyAudio() n_bytes_to_test = 1024 * 2 * 6 DEVICE_ID=2 def callback(in_data, frame_count, time_info, status): data = wf.readframes(frame_count) # npdata = np.frombuffer(data, dtype=np.int16) # print("len(data): {}, frame_count: {}".format(len(data), frame_count)) if len(data) < n_bytes_to_test: wf.rewind() data = wf.readframes(frame_count) print("Rewinding") return (data, pyaudio.paContinue) print("Device parameters: {}".format(p.get_device_info_by_index(DEVICE_ID))) stream = p.open(format=p.get_format_from_width(wf.getsampwidth()), channels=wf.getnchannels(), rate=48000, output_device_index=DEVICE_ID, output=True, stream_callback=callback) stream.start_stream() while stream.is_active(): time.sleep(0.1) stream.stop_stream() stream.close() wf.close() p.terminate()
Add support for looping sample
Add support for looping sample
Python
bsd-2-clause
mfergie/human-hive
python
## Code Before: import pyaudio import wave import time import sys import numpy as np if len(sys.argv) < 2: print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0]) sys.exit(-1) wf = wave.open(sys.argv[1], 'rb') p = pyaudio.PyAudio() DEVICE_ID=2 def callback(in_data, frame_count, time_info, status): data = wf.readframes(frame_count) # npdata = np.frombuffer(data, dtype=np.int16) return (data, pyaudio.paContinue) print("Device parameters: {}".format(p.get_default_output_device_info())) stream = p.open(format=p.get_format_from_width(wf.getsampwidth()), channels=wf.getnchannels(), rate=wf.getframerate(), output_device_index=DEVICE_ID, output=True, stream_callback=callback) stream.start_stream() while stream.is_active(): time.sleep(0.1) stream.stop_stream() stream.close() wf.close() p.terminate() ## Instruction: Add support for looping sample ## Code After: import pyaudio import wave import time import sys import numpy as np if len(sys.argv) < 2: print("Plays a wave file.\n\nUsage: %s filename.wav" % sys.argv[0]) sys.exit(-1) wf = wave.open(sys.argv[1], 'rb') p = pyaudio.PyAudio() n_bytes_to_test = 1024 * 2 * 6 DEVICE_ID=2 def callback(in_data, frame_count, time_info, status): data = wf.readframes(frame_count) # npdata = np.frombuffer(data, dtype=np.int16) # print("len(data): {}, frame_count: {}".format(len(data), frame_count)) if len(data) < n_bytes_to_test: wf.rewind() data = wf.readframes(frame_count) print("Rewinding") return (data, pyaudio.paContinue) print("Device parameters: {}".format(p.get_device_info_by_index(DEVICE_ID))) stream = p.open(format=p.get_format_from_width(wf.getsampwidth()), channels=wf.getnchannels(), rate=48000, output_device_index=DEVICE_ID, output=True, stream_callback=callback) stream.start_stream() while stream.is_active(): time.sleep(0.1) stream.stop_stream() stream.close() wf.close() p.terminate()
// ... existing code ... p = pyaudio.PyAudio() n_bytes_to_test = 1024 * 2 * 6 DEVICE_ID=2 def callback(in_data, frame_count, time_info, status): data = wf.readframes(frame_count) # npdata = np.frombuffer(data, dtype=np.int16) # print("len(data): {}, frame_count: {}".format(len(data), frame_count)) if len(data) < n_bytes_to_test: wf.rewind() data = wf.readframes(frame_count) print("Rewinding") return (data, pyaudio.paContinue) print("Device parameters: {}".format(p.get_device_info_by_index(DEVICE_ID))) stream = p.open(format=p.get_format_from_width(wf.getsampwidth()), channels=wf.getnchannels(), rate=48000, output_device_index=DEVICE_ID, output=True, stream_callback=callback) // ... rest of the code ...
d8a7abd16e115e142299a4c1ed01b18b15a5b806
tests/test_hashring.py
tests/test_hashring.py
from hashring import HashRing def test_basic_ring(): hr = HashRing(range(3)) actual = hr.get_node('howdy') expected = 1 assert expected == actual
from hashring import HashRing def test_basic_ring(): hr = HashRing(range(3)) actual = hr.get_node('howdy') expected = 1 assert expected == actual def test_server_ring(): memcache_servers = ['192.168.0.246:11212', '192.168.0.247:11212', '192.168.0.249:11212'] ring = HashRing(memcache_servers) actual = ring.get_node('my_key') expected = '192.168.0.247:11212' assert expected == actual
Add additional test for strings
Add additional test for strings
Python
bsd-2-clause
goller/hashring
python
## Code Before: from hashring import HashRing def test_basic_ring(): hr = HashRing(range(3)) actual = hr.get_node('howdy') expected = 1 assert expected == actual ## Instruction: Add additional test for strings ## Code After: from hashring import HashRing def test_basic_ring(): hr = HashRing(range(3)) actual = hr.get_node('howdy') expected = 1 assert expected == actual def test_server_ring(): memcache_servers = ['192.168.0.246:11212', '192.168.0.247:11212', '192.168.0.249:11212'] ring = HashRing(memcache_servers) actual = ring.get_node('my_key') expected = '192.168.0.247:11212' assert expected == actual
... actual = hr.get_node('howdy') expected = 1 assert expected == actual def test_server_ring(): memcache_servers = ['192.168.0.246:11212', '192.168.0.247:11212', '192.168.0.249:11212'] ring = HashRing(memcache_servers) actual = ring.get_node('my_key') expected = '192.168.0.247:11212' assert expected == actual ...
b31fa660ff0b7b428b53bc7c5c3d097d763bfb1c
src/cz/muni/fi/rtsystems/robojagr/enums/Color.java
src/cz/muni/fi/rtsystems/robojagr/enums/Color.java
package cz.muni.fi.rtsystems.robojagr.enums; public enum Color { WHITE; public static Color getColor(int number) { return Color.WHITE; } }
package cz.muni.fi.rtsystems.robojagr.enums; public enum Color { WHITE, NOTWHITE; public static Color getColor(int number) { if (number == -1) { return Color.WHITE; } return NOTWHITE; } }
Add "NOTWHITE" color and simple conversion rule.
Add "NOTWHITE" color and simple conversion rule.
Java
mit
matobet/robo-jagr
java
## Code Before: package cz.muni.fi.rtsystems.robojagr.enums; public enum Color { WHITE; public static Color getColor(int number) { return Color.WHITE; } } ## Instruction: Add "NOTWHITE" color and simple conversion rule. ## Code After: package cz.muni.fi.rtsystems.robojagr.enums; public enum Color { WHITE, NOTWHITE; public static Color getColor(int number) { if (number == -1) { return Color.WHITE; } return NOTWHITE; } }
# ... existing code ... package cz.muni.fi.rtsystems.robojagr.enums; public enum Color { WHITE, NOTWHITE; public static Color getColor(int number) { if (number == -1) { return Color.WHITE; } return NOTWHITE; } } # ... rest of the code ...
22517e8658df76ebd8f9957f27384e1701d10b6e
plugin-dotnet-agent/src/main/kotlin/jetbrains/buildServer/agent/EnvironmentImpl.kt
plugin-dotnet-agent/src/main/kotlin/jetbrains/buildServer/agent/EnvironmentImpl.kt
package jetbrains.buildServer.agent import jetbrains.buildServer.agent.impl.OSTypeDetector import jetbrains.buildServer.util.OSType import jetbrains.buildServer.util.StringUtil import java.io.File class EnvironmentImpl(private val _fileSystemService: FileSystemService) : Environment { override fun tryGetVariable(name: String): String? { return System.getenv(name) } override val paths: Sequence<File> get() = tryGetVariable(PathEnvironmentVariableName)?.let { return StringUtil.splitHonorQuotes(it, File.pathSeparatorChar) .asSequence() .map { File(it) } .filter { _fileSystemService.isExists(it) } } ?: emptySequence() override val os: OSType get() = OSDetector.detect() ?: OSType.UNIX companion object { private const val PathEnvironmentVariableName = "PATH" private val OSDetector = OSTypeDetector() } }
package jetbrains.buildServer.agent import jetbrains.buildServer.agent.impl.OSTypeDetector import jetbrains.buildServer.util.OSType import jetbrains.buildServer.util.StringUtil import java.io.File class EnvironmentImpl(private val _fileSystemService: FileSystemService) : Environment { override fun tryGetVariable(name: String): String? { return System.getenv(name) } override val paths: Sequence<File> get() = (tryGetVariable(PathEnvironmentVariableName)?.let { path -> StringUtil.splitHonorQuotes(path, File.pathSeparatorChar) .asSequence() .map { File(it) } .filter { _fileSystemService.isExists(it) } } ?: emptySequence()) + getHintPaths() override val os: OSType get() = OSDetector.detect() ?: OSType.UNIX /** * Provides a well known paths for tools on each platform. */ private fun getHintPaths(): Sequence<File> = sequence { when (os) { OSType.MAC -> yield(File("/usr/local/share/dotnet")) OSType.UNIX -> yield(File("/usr/share/dotnet")) OSType.WINDOWS -> yield(File("C:\\Program Files\\dotnet")) } } companion object { private const val PathEnvironmentVariableName = "PATH" private val OSDetector = OSTypeDetector() } }
Make .NET CLI more robust in finding dotnet core installation
Make .NET CLI more robust in finding dotnet core installation Issue: TW-59135
Kotlin
apache-2.0
JetBrains/teamcity-dnx-plugin
kotlin
## Code Before: package jetbrains.buildServer.agent import jetbrains.buildServer.agent.impl.OSTypeDetector import jetbrains.buildServer.util.OSType import jetbrains.buildServer.util.StringUtil import java.io.File class EnvironmentImpl(private val _fileSystemService: FileSystemService) : Environment { override fun tryGetVariable(name: String): String? { return System.getenv(name) } override val paths: Sequence<File> get() = tryGetVariable(PathEnvironmentVariableName)?.let { return StringUtil.splitHonorQuotes(it, File.pathSeparatorChar) .asSequence() .map { File(it) } .filter { _fileSystemService.isExists(it) } } ?: emptySequence() override val os: OSType get() = OSDetector.detect() ?: OSType.UNIX companion object { private const val PathEnvironmentVariableName = "PATH" private val OSDetector = OSTypeDetector() } } ## Instruction: Make .NET CLI more robust in finding dotnet core installation Issue: TW-59135 ## Code After: package jetbrains.buildServer.agent import jetbrains.buildServer.agent.impl.OSTypeDetector import jetbrains.buildServer.util.OSType import jetbrains.buildServer.util.StringUtil import java.io.File class EnvironmentImpl(private val _fileSystemService: FileSystemService) : Environment { override fun tryGetVariable(name: String): String? { return System.getenv(name) } override val paths: Sequence<File> get() = (tryGetVariable(PathEnvironmentVariableName)?.let { path -> StringUtil.splitHonorQuotes(path, File.pathSeparatorChar) .asSequence() .map { File(it) } .filter { _fileSystemService.isExists(it) } } ?: emptySequence()) + getHintPaths() override val os: OSType get() = OSDetector.detect() ?: OSType.UNIX /** * Provides a well known paths for tools on each platform. */ private fun getHintPaths(): Sequence<File> = sequence { when (os) { OSType.MAC -> yield(File("/usr/local/share/dotnet")) OSType.UNIX -> yield(File("/usr/share/dotnet")) OSType.WINDOWS -> yield(File("C:\\Program Files\\dotnet")) } } companion object { private const val PathEnvironmentVariableName = "PATH" private val OSDetector = OSTypeDetector() } }
# ... existing code ... override val paths: Sequence<File> get() = (tryGetVariable(PathEnvironmentVariableName)?.let { path -> StringUtil.splitHonorQuotes(path, File.pathSeparatorChar) .asSequence() .map { File(it) } .filter { _fileSystemService.isExists(it) } } ?: emptySequence()) + getHintPaths() override val os: OSType get() = OSDetector.detect() ?: OSType.UNIX /** * Provides a well known paths for tools on each platform. */ private fun getHintPaths(): Sequence<File> = sequence { when (os) { OSType.MAC -> yield(File("/usr/local/share/dotnet")) OSType.UNIX -> yield(File("/usr/share/dotnet")) OSType.WINDOWS -> yield(File("C:\\Program Files\\dotnet")) } } companion object { private const val PathEnvironmentVariableName = "PATH" # ... rest of the code ...
f8aa722b9b56ca543f73a40f22fd682a1c71fb4c
clowder_server/management/commands/send_alerts.py
clowder_server/management/commands/send_alerts.py
import datetime from django.core.management.base import BaseCommand, CommandError from clowder_server.emailer import send_alert from clowder_server.models import Alert class Command(BaseCommand): help = 'Checks and sends alerts' def handle(self, *args, **options): alerts = Alert.objects.filter(notify_at__lte=datetime.datetime.now) for alert in alerts: send_alert(request.user, alert.name) alert.notify_at = None alert.save()
import datetime from django.core.management.base import BaseCommand, CommandError from clowder_account.models import ClowderUser from clowder_server.emailer import send_alert from clowder_server.models import Alert, Ping class Command(BaseCommand): help = 'Checks and sends alerts' def handle(self, *args, **options): # delete old pings for user in ClowderUser.objects.all(): pings = Ping.objects.filter(user=user)[:500] pings = list(pings) # forces database hit Ping.objects.exclude(pk__in=pings).delete() # send alerts alerts = Alert.objects.filter(notify_at__lte=datetime.datetime.now) for alert in alerts: send_alert(request.user, alert.name) alert.notify_at = None alert.save()
Delete old unused pings from users
Delete old unused pings from users
Python
agpl-3.0
keithhackbarth/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server,framewr/clowder_server,framewr/clowder_server,keithhackbarth/clowder_server,framewr/clowder_server,framewr/clowder_server
python
## Code Before: import datetime from django.core.management.base import BaseCommand, CommandError from clowder_server.emailer import send_alert from clowder_server.models import Alert class Command(BaseCommand): help = 'Checks and sends alerts' def handle(self, *args, **options): alerts = Alert.objects.filter(notify_at__lte=datetime.datetime.now) for alert in alerts: send_alert(request.user, alert.name) alert.notify_at = None alert.save() ## Instruction: Delete old unused pings from users ## Code After: import datetime from django.core.management.base import BaseCommand, CommandError from clowder_account.models import ClowderUser from clowder_server.emailer import send_alert from clowder_server.models import Alert, Ping class Command(BaseCommand): help = 'Checks and sends alerts' def handle(self, *args, **options): # delete old pings for user in ClowderUser.objects.all(): pings = Ping.objects.filter(user=user)[:500] pings = list(pings) # forces database hit Ping.objects.exclude(pk__in=pings).delete() # send alerts alerts = Alert.objects.filter(notify_at__lte=datetime.datetime.now) for alert in alerts: send_alert(request.user, alert.name) alert.notify_at = None alert.save()
// ... existing code ... from django.core.management.base import BaseCommand, CommandError from clowder_account.models import ClowderUser from clowder_server.emailer import send_alert from clowder_server.models import Alert, Ping class Command(BaseCommand): help = 'Checks and sends alerts' def handle(self, *args, **options): # delete old pings for user in ClowderUser.objects.all(): pings = Ping.objects.filter(user=user)[:500] pings = list(pings) # forces database hit Ping.objects.exclude(pk__in=pings).delete() # send alerts alerts = Alert.objects.filter(notify_at__lte=datetime.datetime.now) for alert in alerts: send_alert(request.user, alert.name) // ... rest of the code ...
e4297691f20ec4185ed4491ab41553df14a05a91
pycc/pycompat.py
pycc/pycompat.py
"""Compatibility helpers for Py2 and Py3.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import sys class VERSION(object): """Stand in for sys.version_info. The values from sys only have named parameters starting in PY27. This allows us to use named parameters for all versions of Python. """ major, minor, micro, releaselevel, serial = sys.version_info PY2 = VERSION.major == 2 PY25 = PY2 and VERSION.minor == 5 PY26 = PY2 and VERSION.minor == 6 PY27 = PY2 and VERSION.minor == 7 PY3 = not PY2 PY31 = PY3 and VERSION.minor == 1 PY32 = PY3 and VERSION.minor == 2 PY33 = PY3 and VERSION.minor == 3 py34 = PY3 and VERSION.minor == 4 # Provide a nice range function for py2. try: range = xrange except NameError: pass
"""Compatibility helpers for Py2 and Py3.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import sys class VERSION(object): """Stand in for sys.version_info. The values from sys only have named parameters starting in PY27. This allows us to use named parameters for all versions of Python. """ major, minor, micro, releaselevel, serial = sys.version_info PY2 = VERSION.major == 2 PY25 = PY2 and VERSION.minor == 5 PY26 = PY2 and VERSION.minor == 6 PY27 = PY2 and VERSION.minor == 7 PY3 = not PY2 PY31 = PY3 and VERSION.minor == 1 PY32 = PY3 and VERSION.minor == 2 PY33 = PY3 and VERSION.minor == 3 py34 = PY3 and VERSION.minor == 4 # Provide a nice range function for py2. try: range = xrange except NameError: pass # Provide a long type for py3. try: long = long except NameError: long = int
Add a long type backfill for PY3 compat
Add a long type backfill for PY3 compat PY3 combined the long and int types which makes some compiler operations difficult. Adding a backfill to help with PY2/PY3 compat. Signed-off-by: Kevin Conway <[email protected]>
Python
apache-2.0
kevinconway/pycc,kevinconway/pycc
python
## Code Before: """Compatibility helpers for Py2 and Py3.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import sys class VERSION(object): """Stand in for sys.version_info. The values from sys only have named parameters starting in PY27. This allows us to use named parameters for all versions of Python. """ major, minor, micro, releaselevel, serial = sys.version_info PY2 = VERSION.major == 2 PY25 = PY2 and VERSION.minor == 5 PY26 = PY2 and VERSION.minor == 6 PY27 = PY2 and VERSION.minor == 7 PY3 = not PY2 PY31 = PY3 and VERSION.minor == 1 PY32 = PY3 and VERSION.minor == 2 PY33 = PY3 and VERSION.minor == 3 py34 = PY3 and VERSION.minor == 4 # Provide a nice range function for py2. try: range = xrange except NameError: pass ## Instruction: Add a long type backfill for PY3 compat PY3 combined the long and int types which makes some compiler operations difficult. Adding a backfill to help with PY2/PY3 compat. Signed-off-by: Kevin Conway <[email protected]> ## Code After: """Compatibility helpers for Py2 and Py3.""" from __future__ import division from __future__ import absolute_import from __future__ import print_function from __future__ import unicode_literals import sys class VERSION(object): """Stand in for sys.version_info. The values from sys only have named parameters starting in PY27. This allows us to use named parameters for all versions of Python. """ major, minor, micro, releaselevel, serial = sys.version_info PY2 = VERSION.major == 2 PY25 = PY2 and VERSION.minor == 5 PY26 = PY2 and VERSION.minor == 6 PY27 = PY2 and VERSION.minor == 7 PY3 = not PY2 PY31 = PY3 and VERSION.minor == 1 PY32 = PY3 and VERSION.minor == 2 PY33 = PY3 and VERSION.minor == 3 py34 = PY3 and VERSION.minor == 4 # Provide a nice range function for py2. try: range = xrange except NameError: pass # Provide a long type for py3. try: long = long except NameError: long = int
... range = xrange except NameError: pass # Provide a long type for py3. try: long = long except NameError: long = int ...
5a1edb15cac470f392ccb4447b81cc99e8af2a68
robinette/server.py
robinette/server.py
from irc import irc from xmlrpc.server import AsyncXMLRPCServer if __name__ == '__main__': server = AsyncXMLRPCServer(('localhost', 8000), allow_none=True) server.add_handler(irc) try: server.serve_forever() except KeyboardInterrupt: print 'Exiting'
from irc import irc from xmlrpc.server import AsyncXMLRPCServer if __name__ == '__main__': server = AsyncXMLRPCServer(('localhost', 8000), allow_none=True) server.add_handler(irc) print 'Running on %s:%s' % map(str, server.server_address) try: server.serve_forever() except KeyboardInterrupt: print 'Exiting'
Print where we're running at
Print where we're running at
Python
mit
mgracik/robinette
python
## Code Before: from irc import irc from xmlrpc.server import AsyncXMLRPCServer if __name__ == '__main__': server = AsyncXMLRPCServer(('localhost', 8000), allow_none=True) server.add_handler(irc) try: server.serve_forever() except KeyboardInterrupt: print 'Exiting' ## Instruction: Print where we're running at ## Code After: from irc import irc from xmlrpc.server import AsyncXMLRPCServer if __name__ == '__main__': server = AsyncXMLRPCServer(('localhost', 8000), allow_none=True) server.add_handler(irc) print 'Running on %s:%s' % map(str, server.server_address) try: server.serve_forever() except KeyboardInterrupt: print 'Exiting'
... if __name__ == '__main__': server = AsyncXMLRPCServer(('localhost', 8000), allow_none=True) server.add_handler(irc) print 'Running on %s:%s' % map(str, server.server_address) try: server.serve_forever() except KeyboardInterrupt: ...
621ca7bebfcc53026d8f98b9f6cfefe6ff25961b
src/util/constants.py
src/util/constants.py
SOS = '<S>' # end of sentence token EOS = '</S>'
SOS = chr(2) # end of sentence token EOS = chr(3)
Use separate characters for SOS and EOS
Use separate characters for SOS and EOS
Python
mit
milankinen/c2w2c,milankinen/c2w2c
python
## Code Before: SOS = '<S>' # end of sentence token EOS = '</S>' ## Instruction: Use separate characters for SOS and EOS ## Code After: SOS = chr(2) # end of sentence token EOS = chr(3)
// ... existing code ... SOS = chr(2) # end of sentence token EOS = chr(3) // ... rest of the code ...
16c5c9e89a6cf565070ab58d55a7796ea3183ced
coltrane/managers.py
coltrane/managers.py
from comment_utils.managers import CommentedObjectManager from django.db import models class LiveEntryManager(CommentedObjectManager): """ Custom manager for the Entry model, providing shortcuts for filtering by entry status. """ def featured(self): """ Returns a ``QuerySet`` of featured Entries. """ return self.filter(featured__exact=True) def get_query_set(self): """ Overrides the default ``QuerySet`` to only include Entries with a status of 'live'. """ return super(LiveEntryManager, self).get_query_set().filter(status__exact=1) def latest_featured(self): """ Returns the latest featured Entry if there is one, or ``None`` if there isn't. """ try: return self.featured()[0] except IndexError: return None
from comment_utils.managers import CommentedObjectManager from django.db import models class LiveEntryManager(CommentedObjectManager): """ Custom manager for the Entry model, providing shortcuts for filtering by entry status. """ def featured(self): """ Returns a ``QuerySet`` of featured Entries. """ return self.filter(featured__exact=True) def get_query_set(self): """ Overrides the default ``QuerySet`` to only include Entries with a status of 'live'. """ return super(LiveEntryManager, self).get_query_set().filter(status__exact=self.model.LIVE_STATUS) def latest_featured(self): """ Returns the latest featured Entry if there is one, or ``None`` if there isn't. """ try: return self.featured()[0] except IndexError: return None
Add the support for the new module constants to the LiveEntryManager
Add the support for the new module constants to the LiveEntryManager git-svn-id: 9770886a22906f523ce26b0ad22db0fc46e41232@71 5f8205a5-902a-0410-8b63-8f478ce83d95
Python
bsd-3-clause
mafix/coltrane-blog,clones/django-coltrane
python
## Code Before: from comment_utils.managers import CommentedObjectManager from django.db import models class LiveEntryManager(CommentedObjectManager): """ Custom manager for the Entry model, providing shortcuts for filtering by entry status. """ def featured(self): """ Returns a ``QuerySet`` of featured Entries. """ return self.filter(featured__exact=True) def get_query_set(self): """ Overrides the default ``QuerySet`` to only include Entries with a status of 'live'. """ return super(LiveEntryManager, self).get_query_set().filter(status__exact=1) def latest_featured(self): """ Returns the latest featured Entry if there is one, or ``None`` if there isn't. """ try: return self.featured()[0] except IndexError: return None ## Instruction: Add the support for the new module constants to the LiveEntryManager git-svn-id: 9770886a22906f523ce26b0ad22db0fc46e41232@71 5f8205a5-902a-0410-8b63-8f478ce83d95 ## Code After: from comment_utils.managers import CommentedObjectManager from django.db import models class LiveEntryManager(CommentedObjectManager): """ Custom manager for the Entry model, providing shortcuts for filtering by entry status. """ def featured(self): """ Returns a ``QuerySet`` of featured Entries. """ return self.filter(featured__exact=True) def get_query_set(self): """ Overrides the default ``QuerySet`` to only include Entries with a status of 'live'. """ return super(LiveEntryManager, self).get_query_set().filter(status__exact=self.model.LIVE_STATUS) def latest_featured(self): """ Returns the latest featured Entry if there is one, or ``None`` if there isn't. """ try: return self.featured()[0] except IndexError: return None
... with a status of 'live'. """ return super(LiveEntryManager, self).get_query_set().filter(status__exact=self.model.LIVE_STATUS) def latest_featured(self): """ ...
f52c8cc3938567a24ac6ea0a807654aa73caa871
pages/views.py
pages/views.py
from pages.models import Page, Language, Content from pages.utils import auto_render from django.contrib.admin.views.decorators import staff_member_required from django import forms from django.http import Http404 import settings @auto_render def details(request, page_id=None): template = None lang = Language.get_from_request(request) pages = Page.objects.filter(parent__isnull=True).order_by("tree_id") if len(pages) > 0: if page_id: try: current_page = Page.objects.get(id=int(page_id), status=1) except Page.DoesNotExist: raise Http404 else: # get the first root page current_page = pages[0] template = current_page.get_template() else: template = settings.DEFAULT_PAGE_TEMPLATE return template, locals()
from pages.models import Page, Language, Content from pages.utils import auto_render from django.contrib.admin.views.decorators import staff_member_required from django import forms from django.http import Http404 import settings @auto_render def details(request, page_id=None): template = None lang = Language.get_from_request(request) pages = Page.objects.filter(parent__isnull=True).order_by("tree_id") if len(pages) > 0: if page_id: try: current_page = Page.objects.get(id=int(page_id), status=1) except Page.DoesNotExist: raise Http404 else: # get the first root page current_page = pages[0] template = current_page.get_template() else: current_page = None template = settings.DEFAULT_PAGE_TEMPLATE return template, locals()
Fix a bug with an empty database
Fix a bug with an empty database git-svn-id: 54fea250f97f2a4e12c6f7a610b8f07cb4c107b4@138 439a9e5f-3f3e-0410-bc46-71226ad0111b
Python
bsd-3-clause
pombredanne/django-page-cms-1,oliciv/django-page-cms,akaihola/django-page-cms,pombredanne/django-page-cms-1,akaihola/django-page-cms,remik/django-page-cms,pombredanne/django-page-cms-1,batiste/django-page-cms,oliciv/django-page-cms,remik/django-page-cms,batiste/django-page-cms,oliciv/django-page-cms,batiste/django-page-cms,remik/django-page-cms,remik/django-page-cms,akaihola/django-page-cms
python
## Code Before: from pages.models import Page, Language, Content from pages.utils import auto_render from django.contrib.admin.views.decorators import staff_member_required from django import forms from django.http import Http404 import settings @auto_render def details(request, page_id=None): template = None lang = Language.get_from_request(request) pages = Page.objects.filter(parent__isnull=True).order_by("tree_id") if len(pages) > 0: if page_id: try: current_page = Page.objects.get(id=int(page_id), status=1) except Page.DoesNotExist: raise Http404 else: # get the first root page current_page = pages[0] template = current_page.get_template() else: template = settings.DEFAULT_PAGE_TEMPLATE return template, locals() ## Instruction: Fix a bug with an empty database git-svn-id: 54fea250f97f2a4e12c6f7a610b8f07cb4c107b4@138 439a9e5f-3f3e-0410-bc46-71226ad0111b ## Code After: from pages.models import Page, Language, Content from pages.utils import auto_render from django.contrib.admin.views.decorators import staff_member_required from django import forms from django.http import Http404 import settings @auto_render def details(request, page_id=None): template = None lang = Language.get_from_request(request) pages = Page.objects.filter(parent__isnull=True).order_by("tree_id") if len(pages) > 0: if page_id: try: current_page = Page.objects.get(id=int(page_id), status=1) except Page.DoesNotExist: raise Http404 else: # get the first root page current_page = pages[0] template = current_page.get_template() else: current_page = None template = settings.DEFAULT_PAGE_TEMPLATE return template, locals()
# ... existing code ... current_page = pages[0] template = current_page.get_template() else: current_page = None template = settings.DEFAULT_PAGE_TEMPLATE return template, locals() # ... rest of the code ...
f59b249cf2b149f96833d9e1025a98819bf5f62a
sharepa/search.py
sharepa/search.py
import json import requests from elasticsearch_dsl import Search from elasticsearch_dsl.result import Response class ShareSearch(Search): BASE_URL = 'https://osf.io/api/v1/share/search/' HEADERS = {'content-type': 'application/json'} PARAMS = dict(raw=True) def execute(self): return Response( self._query(self.to_dict()), callbacks=self._doc_type_map ) def count(self): d = self.to_dict() if d.get('aggs'): del d['aggs'] self = ShareSearch.from_dict(d) return self._query(self.to_dict(), params=dict(count=True))['count'] def scan(self, size=100): count = 0 page = list(self[0:size].execute()) while(page): for hit in page: count += 1 yield hit page = list(self[count:count + size].execute()) def _query(self, data, params=None): return requests.post( self.BASE_URL, headers=self.HEADERS, data=json.dumps(self.to_dict()), params=params or self.PARAMS ).json() basic_search = ShareSearch() basic_search.aggs.bucket( 'sourceAgg', 'terms', field='_type', size=0, min_doc_count=0 )
import json import requests from elasticsearch_dsl import Search from elasticsearch_dsl.result import Response class ShareSearch(Search): BASE_URL = 'http://localhost:8000/api/search/abstractcreativework/_search' HEADERS = {'content-type': 'application/json'} PARAMS = dict(raw=True) def execute(self): return Response( self._query(self.to_dict()), callbacks=self._doc_type_map ) def count(self): d = self.to_dict() if d.get('aggs'): del d['aggs'] self = ShareSearch.from_dict(d) return self._query(self.to_dict(), params=dict(size=0))['hits']['total'] def scan(self, size=100): count = 0 page = list(self[0:size].execute()) while(page): for hit in page: count += 1 yield hit page = list(self[count:count + size].execute()) def _query(self, data, params=None): return requests.post( self.BASE_URL, headers=self.HEADERS, data=json.dumps(self.to_dict()), params=params or self.PARAMS ).json() basic_search = ShareSearch() basic_search.aggs.bucket( 'sourceAgg', 'terms', field='_type', size=0, min_doc_count=0 )
Fix count param, use local es for now
Fix count param, use local es for now
Python
mit
CenterForOpenScience/sharepa,fabianvf/sharepa
python
## Code Before: import json import requests from elasticsearch_dsl import Search from elasticsearch_dsl.result import Response class ShareSearch(Search): BASE_URL = 'https://osf.io/api/v1/share/search/' HEADERS = {'content-type': 'application/json'} PARAMS = dict(raw=True) def execute(self): return Response( self._query(self.to_dict()), callbacks=self._doc_type_map ) def count(self): d = self.to_dict() if d.get('aggs'): del d['aggs'] self = ShareSearch.from_dict(d) return self._query(self.to_dict(), params=dict(count=True))['count'] def scan(self, size=100): count = 0 page = list(self[0:size].execute()) while(page): for hit in page: count += 1 yield hit page = list(self[count:count + size].execute()) def _query(self, data, params=None): return requests.post( self.BASE_URL, headers=self.HEADERS, data=json.dumps(self.to_dict()), params=params or self.PARAMS ).json() basic_search = ShareSearch() basic_search.aggs.bucket( 'sourceAgg', 'terms', field='_type', size=0, min_doc_count=0 ) ## Instruction: Fix count param, use local es for now ## Code After: import json import requests from elasticsearch_dsl import Search from elasticsearch_dsl.result import Response class ShareSearch(Search): BASE_URL = 'http://localhost:8000/api/search/abstractcreativework/_search' HEADERS = {'content-type': 'application/json'} PARAMS = dict(raw=True) def execute(self): return Response( self._query(self.to_dict()), callbacks=self._doc_type_map ) def count(self): d = self.to_dict() if d.get('aggs'): del d['aggs'] self = ShareSearch.from_dict(d) return self._query(self.to_dict(), params=dict(size=0))['hits']['total'] def scan(self, size=100): count = 0 page = list(self[0:size].execute()) while(page): for hit in page: count += 1 yield hit page = list(self[count:count + size].execute()) def _query(self, data, params=None): return requests.post( self.BASE_URL, headers=self.HEADERS, data=json.dumps(self.to_dict()), params=params or self.PARAMS ).json() basic_search = ShareSearch() basic_search.aggs.bucket( 'sourceAgg', 'terms', field='_type', size=0, min_doc_count=0 )
# ... existing code ... class ShareSearch(Search): BASE_URL = 'http://localhost:8000/api/search/abstractcreativework/_search' HEADERS = {'content-type': 'application/json'} PARAMS = dict(raw=True) # ... modified code ... if d.get('aggs'): del d['aggs'] self = ShareSearch.from_dict(d) return self._query(self.to_dict(), params=dict(size=0))['hits']['total'] def scan(self, size=100): count = 0 # ... rest of the code ...
fc25a6c4796ad008570974a682037bc575f15018
astroquery/lamda/tests/test_lamda.py
astroquery/lamda/tests/test_lamda.py
from ... import lamda def test_query(): Q = lamda.core.LAMDAQuery() Q.lamda_query(mol='co', query_type='erg_levels') Q.lamda_query(mol='co', query_type='rad_trans') Q.lamda_query(mol='co', query_type='coll_rates')
from ... import lamda def test_query(): lamda.print_mols() lamda.query(mol='co', query_type='erg_levels') lamda.query(mol='co', query_type='rad_trans') lamda.query(mol='co', query_type='coll_rates', coll_partner_index=1)
Update tests for new style
Update tests for new style Also added test for printing molecule list and made the collisional rate test more complicated.
Python
bsd-3-clause
imbasimba/astroquery,imbasimba/astroquery,ceb8/astroquery,ceb8/astroquery
python
## Code Before: from ... import lamda def test_query(): Q = lamda.core.LAMDAQuery() Q.lamda_query(mol='co', query_type='erg_levels') Q.lamda_query(mol='co', query_type='rad_trans') Q.lamda_query(mol='co', query_type='coll_rates') ## Instruction: Update tests for new style Also added test for printing molecule list and made the collisional rate test more complicated. ## Code After: from ... import lamda def test_query(): lamda.print_mols() lamda.query(mol='co', query_type='erg_levels') lamda.query(mol='co', query_type='rad_trans') lamda.query(mol='co', query_type='coll_rates', coll_partner_index=1)
# ... existing code ... from ... import lamda def test_query(): lamda.print_mols() lamda.query(mol='co', query_type='erg_levels') lamda.query(mol='co', query_type='rad_trans') lamda.query(mol='co', query_type='coll_rates', coll_partner_index=1) # ... rest of the code ...
0722b517f5b5b9a84b7521b6b7d350cbc6537948
src/core/models.py
src/core/models.py
from django.db import models class BigForeignKey(models.ForeignKey): def db_type(self, connection): """ Adds support for foreign keys to big integers as primary keys. """ presumed_type = super().db_type(connection) if presumed_type == 'integer': return 'bigint' return presumed_type
from django.apps import apps from django.db import models class BigForeignKey(models.ForeignKey): def db_type(self, connection): """ Adds support for foreign keys to big integers as primary keys. Django's AutoField is actually an IntegerField (SQL integer field), but in some cases we are using bigint on PostgreSQL without Django knowing it. So we continue to trick Django here, swapping its field type detection, and just tells it to use bigint. :seealso: Migrations in the ``postgres`` app. """ presumed_type = super().db_type(connection) if apps.is_installed('postgres') and presumed_type == 'integer': return 'bigint' return presumed_type
Add some explaination on BigForeignKey
Add some explaination on BigForeignKey
Python
mit
uranusjr/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,uranusjr/pycontw2016,pycontw/pycontw2016,uranusjr/pycontw2016,uranusjr/pycontw2016
python
## Code Before: from django.db import models class BigForeignKey(models.ForeignKey): def db_type(self, connection): """ Adds support for foreign keys to big integers as primary keys. """ presumed_type = super().db_type(connection) if presumed_type == 'integer': return 'bigint' return presumed_type ## Instruction: Add some explaination on BigForeignKey ## Code After: from django.apps import apps from django.db import models class BigForeignKey(models.ForeignKey): def db_type(self, connection): """ Adds support for foreign keys to big integers as primary keys. Django's AutoField is actually an IntegerField (SQL integer field), but in some cases we are using bigint on PostgreSQL without Django knowing it. So we continue to trick Django here, swapping its field type detection, and just tells it to use bigint. :seealso: Migrations in the ``postgres`` app. """ presumed_type = super().db_type(connection) if apps.is_installed('postgres') and presumed_type == 'integer': return 'bigint' return presumed_type
... from django.apps import apps from django.db import models ... class BigForeignKey(models.ForeignKey): def db_type(self, connection): """ Adds support for foreign keys to big integers as primary keys. Django's AutoField is actually an IntegerField (SQL integer field), but in some cases we are using bigint on PostgreSQL without Django knowing it. So we continue to trick Django here, swapping its field type detection, and just tells it to use bigint. :seealso: Migrations in the ``postgres`` app. """ presumed_type = super().db_type(connection) if apps.is_installed('postgres') and presumed_type == 'integer': return 'bigint' return presumed_type ...
23d5d0e0e77dc0b0816df51a8a1e42bc4069112b
rst2pdf/style2yaml.py
rst2pdf/style2yaml.py
import argparse import json import yaml from rst2pdf.dumpstyle import fixstyle from rst2pdf.rson import loads as rloads def main(): parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument( 'paths', metavar='PATH', nargs='+', help='An RSON-formatted file to convert.', ) args = parser.parse_args() for path in args.paths: # read rson from a file with open(path, 'rb') as fh: style_data = fixstyle(rloads(fh.read())) # output the style as json, then parse that json_style = json.dumps(style_data) reparsed_style = json.loads(json_style) yaml_style = yaml.dump(reparsed_style, default_flow_style=None) print(yaml_style) if __name__ == '__main__': main()
import argparse import json import os import yaml from rst2pdf.dumpstyle import fixstyle from rst2pdf.rson import loads as rloads def main(): # set up the command, optional --save parameter, and a list of paths parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument( '--save', action='store_true', help='Save .yaml version of the file (rather than output to stdout)', ) parser.add_argument( 'paths', metavar='PATH', nargs='+', help='An RSON-formatted file to convert.', ) args = parser.parse_args() # loop over the files for path in args.paths: # read rson from a file with open(path, 'rb') as fh: style_data = fixstyle(rloads(fh.read())) # output the style as json (already supported), then parse that json_style = json.dumps(style_data) reparsed_style = json.loads(json_style) yaml_style = yaml.dump(reparsed_style, default_flow_style=None) # output the yaml or save to a file if args.save: new_path = '.'.join((os.path.splitext(path)[0], 'yaml')) if os.path.exists(new_path): print("File " + new_path + " exists, cannot overwrite") else: print("Creating file " + new_path) with open(new_path, 'w') as file: file.write(yaml_style) else: print(yaml_style) if __name__ == '__main__': main()
Add save functionality to the conversion script
Add save functionality to the conversion script
Python
mit
rst2pdf/rst2pdf,rst2pdf/rst2pdf
python
## Code Before: import argparse import json import yaml from rst2pdf.dumpstyle import fixstyle from rst2pdf.rson import loads as rloads def main(): parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument( 'paths', metavar='PATH', nargs='+', help='An RSON-formatted file to convert.', ) args = parser.parse_args() for path in args.paths: # read rson from a file with open(path, 'rb') as fh: style_data = fixstyle(rloads(fh.read())) # output the style as json, then parse that json_style = json.dumps(style_data) reparsed_style = json.loads(json_style) yaml_style = yaml.dump(reparsed_style, default_flow_style=None) print(yaml_style) if __name__ == '__main__': main() ## Instruction: Add save functionality to the conversion script ## Code After: import argparse import json import os import yaml from rst2pdf.dumpstyle import fixstyle from rst2pdf.rson import loads as rloads def main(): # set up the command, optional --save parameter, and a list of paths parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument( '--save', action='store_true', help='Save .yaml version of the file (rather than output to stdout)', ) parser.add_argument( 'paths', metavar='PATH', nargs='+', help='An RSON-formatted file to convert.', ) args = parser.parse_args() # loop over the files for path in args.paths: # read rson from a file with open(path, 'rb') as fh: style_data = fixstyle(rloads(fh.read())) # output the style as json (already supported), then parse that json_style = json.dumps(style_data) reparsed_style = json.loads(json_style) yaml_style = yaml.dump(reparsed_style, default_flow_style=None) # output the yaml or save to a file if args.save: new_path = '.'.join((os.path.splitext(path)[0], 'yaml')) if os.path.exists(new_path): print("File " + new_path + " exists, cannot overwrite") else: print("Creating file " + new_path) with open(new_path, 'w') as file: file.write(yaml_style) else: print(yaml_style) if __name__ == '__main__': main()
// ... existing code ... import argparse import json import os import yaml // ... modified code ... def main(): # set up the command, optional --save parameter, and a list of paths parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument( '--save', action='store_true', help='Save .yaml version of the file (rather than output to stdout)', ) parser.add_argument( 'paths', ... help='An RSON-formatted file to convert.', ) args = parser.parse_args() # loop over the files for path in args.paths: # read rson from a file with open(path, 'rb') as fh: style_data = fixstyle(rloads(fh.read())) # output the style as json (already supported), then parse that json_style = json.dumps(style_data) reparsed_style = json.loads(json_style) yaml_style = yaml.dump(reparsed_style, default_flow_style=None) # output the yaml or save to a file if args.save: new_path = '.'.join((os.path.splitext(path)[0], 'yaml')) if os.path.exists(new_path): print("File " + new_path + " exists, cannot overwrite") else: print("Creating file " + new_path) with open(new_path, 'w') as file: file.write(yaml_style) else: print(yaml_style) if __name__ == '__main__': // ... rest of the code ...
8e202175767660bd90c4a894953d2553eec1a1d3
pythonx/completers/common/__init__.py
pythonx/completers/common/__init__.py
import completor import itertools import re from completor.compat import text_type from .filename import Filename # noqa from .buffer import Buffer # noqa from .omni import Omni # noqa try: from UltiSnips import UltiSnips_Manager # noqa from .ultisnips import Ultisnips # noqa except ImportError: pass word = re.compile(r'[^\W\d]\w*$', re.U) class Common(completor.Completor): filetype = 'common' sync = True def completions(self, completer, base): com = completor.get(completer) if not com: return [] com.ft = self.ft if com.disabled: return [] return com.parse(base) def parse(self, base): if not isinstance(base, text_type): return [] match = word.search(base) if not match: return [] base = match.group() if len(base) < self.get_option('min_chars'): return [] return list(itertools.chain( *[self.completions(n, base) for n in ('ultisnips', 'buffer')]))
import completor import itertools import re from completor.compat import text_type from .filename import Filename # noqa from .buffer import Buffer # noqa from .omni import Omni # noqa try: from UltiSnips import UltiSnips_Manager # noqa from .ultisnips import Ultisnips # noqa except ImportError: pass word = re.compile(r'[^\W\d]\w*$', re.U) class Common(completor.Completor): filetype = 'common' sync = True hooks = ['ultisnips', 'buffer'] def completions(self, completer, base): com = completor.get(completer) if not com: return [] com.ft = self.ft if com.disabled: return [] return com.parse(base) def parse(self, base): if not isinstance(base, text_type): return [] match = word.search(base) if not match: return [] base = match.group() if len(base) < self.get_option('min_chars'): return [] return list(itertools.chain( *[self.completions(n, base) for n in self.hooks]))
Make it possible to extend common completions
Make it possible to extend common completions
Python
mit
maralla/completor.vim,maralla/completor.vim
python
## Code Before: import completor import itertools import re from completor.compat import text_type from .filename import Filename # noqa from .buffer import Buffer # noqa from .omni import Omni # noqa try: from UltiSnips import UltiSnips_Manager # noqa from .ultisnips import Ultisnips # noqa except ImportError: pass word = re.compile(r'[^\W\d]\w*$', re.U) class Common(completor.Completor): filetype = 'common' sync = True def completions(self, completer, base): com = completor.get(completer) if not com: return [] com.ft = self.ft if com.disabled: return [] return com.parse(base) def parse(self, base): if not isinstance(base, text_type): return [] match = word.search(base) if not match: return [] base = match.group() if len(base) < self.get_option('min_chars'): return [] return list(itertools.chain( *[self.completions(n, base) for n in ('ultisnips', 'buffer')])) ## Instruction: Make it possible to extend common completions ## Code After: import completor import itertools import re from completor.compat import text_type from .filename import Filename # noqa from .buffer import Buffer # noqa from .omni import Omni # noqa try: from UltiSnips import UltiSnips_Manager # noqa from .ultisnips import Ultisnips # noqa except ImportError: pass word = re.compile(r'[^\W\d]\w*$', re.U) class Common(completor.Completor): filetype = 'common' sync = True hooks = ['ultisnips', 'buffer'] def completions(self, completer, base): com = completor.get(completer) if not com: return [] com.ft = self.ft if com.disabled: return [] return com.parse(base) def parse(self, base): if not isinstance(base, text_type): return [] match = word.search(base) if not match: return [] base = match.group() if len(base) < self.get_option('min_chars'): return [] return list(itertools.chain( *[self.completions(n, base) for n in self.hooks]))
// ... existing code ... filetype = 'common' sync = True hooks = ['ultisnips', 'buffer'] def completions(self, completer, base): com = completor.get(completer) if not com: // ... modified code ... return [] return list(itertools.chain( *[self.completions(n, base) for n in self.hooks])) // ... rest of the code ...
e0cb864f19f05f4ddfed0fa90c8b9895bde9b8df
caminae/core/management/__init__.py
caminae/core/management/__init__.py
import logging import traceback from south.signals import post_migrate logger = logging.getLogger(__name__) def run_initial_sql(sender, **kwargs): app_label = kwargs.get('app') import os from django.db import connection, transaction, models app_dir = os.path.normpath(os.path.join(os.path.dirname( models.get_app(app_label).__file__), 'sql')) backend_name = connection.settings_dict['ENGINE'].split('.')[-1] sql_files = [os.path.join(app_dir, "%s.%s.sql" % (app_label, backend_name)), os.path.join(app_dir, "%s.sql" % app_label)] cursor = connection.cursor() for sql_file in sql_files: try: if os.path.exists(sql_file): logger.info("Loading initial SQL data from '%s'" % sql_file) f = open(sql_file) sql = f.read() f.close() cursor.execute(sql) except Exception, e: logger.error("Failed to install custom SQL file '%s': %s\n" % (sql_file, e)) traceback.print_exc() transaction.rollback_unless_managed() else: transaction.commit_unless_managed() post_migrate.connect(run_initial_sql)
import logging import traceback from south.signals import post_migrate logger = logging.getLogger(__name__) def run_initial_sql(sender, **kwargs): import os import re from django.db import connection, transaction, models app_label = kwargs.get('app') app_dir = os.path.normpath(os.path.join(os.path.dirname( models.get_app(app_label).__file__), 'sql')) if not os.path.exists(app_dir): return r = re.compile(r'^.*\.sql$') sql_files = [os.path.join(app_dir, f) for f in os.listdir(app_dir) if r.match(f) is not None] sql_files.sort() cursor = connection.cursor() for sql_file in sql_files: try: logger.info("Loading initial SQL data from '%s'" % sql_file) f = open(sql_file) sql = f.read() f.close() cursor.execute(sql) except Exception, e: logger.error("Failed to install custom SQL file '%s': %s\n" % (sql_file, e)) traceback.print_exc() transaction.rollback_unless_managed() else: transaction.commit_unless_managed() post_migrate.connect(run_initial_sql)
Enable loading of SQL scripts with arbitrary name
Enable loading of SQL scripts with arbitrary name
Python
bsd-2-clause
Anaethelion/Geotrek,makinacorpus/Geotrek,johan--/Geotrek,makinacorpus/Geotrek,camillemonchicourt/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,Anaethelion/Geotrek,Anaethelion/Geotrek,johan--/Geotrek,mabhub/Geotrek,camillemonchicourt/Geotrek,makinacorpus/Geotrek,johan--/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,mabhub/Geotrek,GeotrekCE/Geotrek-admin,mabhub/Geotrek,mabhub/Geotrek,johan--/Geotrek,camillemonchicourt/Geotrek,Anaethelion/Geotrek
python
## Code Before: import logging import traceback from south.signals import post_migrate logger = logging.getLogger(__name__) def run_initial_sql(sender, **kwargs): app_label = kwargs.get('app') import os from django.db import connection, transaction, models app_dir = os.path.normpath(os.path.join(os.path.dirname( models.get_app(app_label).__file__), 'sql')) backend_name = connection.settings_dict['ENGINE'].split('.')[-1] sql_files = [os.path.join(app_dir, "%s.%s.sql" % (app_label, backend_name)), os.path.join(app_dir, "%s.sql" % app_label)] cursor = connection.cursor() for sql_file in sql_files: try: if os.path.exists(sql_file): logger.info("Loading initial SQL data from '%s'" % sql_file) f = open(sql_file) sql = f.read() f.close() cursor.execute(sql) except Exception, e: logger.error("Failed to install custom SQL file '%s': %s\n" % (sql_file, e)) traceback.print_exc() transaction.rollback_unless_managed() else: transaction.commit_unless_managed() post_migrate.connect(run_initial_sql) ## Instruction: Enable loading of SQL scripts with arbitrary name ## Code After: import logging import traceback from south.signals import post_migrate logger = logging.getLogger(__name__) def run_initial_sql(sender, **kwargs): import os import re from django.db import connection, transaction, models app_label = kwargs.get('app') app_dir = os.path.normpath(os.path.join(os.path.dirname( models.get_app(app_label).__file__), 'sql')) if not os.path.exists(app_dir): return r = re.compile(r'^.*\.sql$') sql_files = [os.path.join(app_dir, f) for f in os.listdir(app_dir) if r.match(f) is not None] sql_files.sort() cursor = connection.cursor() for sql_file in sql_files: try: logger.info("Loading initial SQL data from '%s'" % sql_file) f = open(sql_file) sql = f.read() f.close() cursor.execute(sql) except Exception, e: logger.error("Failed to install custom SQL file '%s': %s\n" % (sql_file, e)) traceback.print_exc() transaction.rollback_unless_managed() else: transaction.commit_unless_managed() post_migrate.connect(run_initial_sql)
// ... existing code ... def run_initial_sql(sender, **kwargs): import os import re from django.db import connection, transaction, models app_label = kwargs.get('app') app_dir = os.path.normpath(os.path.join(os.path.dirname( models.get_app(app_label).__file__), 'sql')) if not os.path.exists(app_dir): return r = re.compile(r'^.*\.sql$') sql_files = [os.path.join(app_dir, f) for f in os.listdir(app_dir) if r.match(f) is not None] sql_files.sort() cursor = connection.cursor() for sql_file in sql_files: try: logger.info("Loading initial SQL data from '%s'" % sql_file) f = open(sql_file) sql = f.read() f.close() cursor.execute(sql) except Exception, e: logger.error("Failed to install custom SQL file '%s': %s\n" % (sql_file, e)) // ... rest of the code ...
73d0225b64ec82c7a8142dbac023be499b41fe0f
figures.py
figures.py
import sys import re import yaml FILE = sys.argv[1] YAML = sys.argv[2] TYPE = sys.argv[3] header = open(YAML, "r") text = open(FILE, "r") copy = open(FILE+"_NEW", "wt") docs = yaml.load_all(header) for doc in docs: if not doc == None: if 'figure' in doc.keys(): for line in text: mfig = False for f in doc['figure']: my_regex = r"^!\{" + re.escape(f['id']) + r"\}$" if re.search(my_regex, line, re.IGNORECASE): mfig = True print line if TYPE == 'preprint': ftype = "figure" fwidth = "\\columnwidth" if "wide" in f.keys(): ftype = "figure*" fwidth = "\\textwidth" copy.write("\n\\begin{" + ftype + "}[bt]\n") copy.write("\t\\centering\n") print f copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n") copy.write("\t\\caption{" + f['caption'] + "}\n") copy.write("\t\\label{" + f['id'] + "}\n") copy.write("\\end{" + ftype + "}\n\n") if not mfig: copy.write(line) header.close() text.close() copy.close()
import sys import re import yaml FILE = sys.argv[1] YAML = sys.argv[2] TYPE = sys.argv[3] header = open(YAML, "r") text = open(FILE, "r") copy = open(FILE+"_NEW", "wt") docs = yaml.load_all(header) for doc in docs: if not doc == None: if 'figure' in doc.keys(): for line in text: mfig = False for f in doc['figure']: my_regex = r"^!\{" + re.escape(f['id']) + r"\}$" if re.search(my_regex, line, re.IGNORECASE): mfig = True if TYPE == 'preprint': ftype = "figure" fwidth = "\\columnwidth" if "wide" in f.keys(): ftype = "figure*" fwidth = "\\textwidth" copy.write("\n\\begin{" + ftype + "}[bt]\n") copy.write("\t\\centering\n") copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n") copy.write("\t\\caption{" + f['caption'] + "}\n") copy.write("\t\\label{" + f['id'] + "}\n") copy.write("\\end{" + ftype + "}\n\n") if not mfig: copy.write(line) header.close() text.close() copy.close()
Make the python script silent
Make the python script silent
Python
mit
PoisotLab/PLMT
python
## Code Before: import sys import re import yaml FILE = sys.argv[1] YAML = sys.argv[2] TYPE = sys.argv[3] header = open(YAML, "r") text = open(FILE, "r") copy = open(FILE+"_NEW", "wt") docs = yaml.load_all(header) for doc in docs: if not doc == None: if 'figure' in doc.keys(): for line in text: mfig = False for f in doc['figure']: my_regex = r"^!\{" + re.escape(f['id']) + r"\}$" if re.search(my_regex, line, re.IGNORECASE): mfig = True print line if TYPE == 'preprint': ftype = "figure" fwidth = "\\columnwidth" if "wide" in f.keys(): ftype = "figure*" fwidth = "\\textwidth" copy.write("\n\\begin{" + ftype + "}[bt]\n") copy.write("\t\\centering\n") print f copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n") copy.write("\t\\caption{" + f['caption'] + "}\n") copy.write("\t\\label{" + f['id'] + "}\n") copy.write("\\end{" + ftype + "}\n\n") if not mfig: copy.write(line) header.close() text.close() copy.close() ## Instruction: Make the python script silent ## Code After: import sys import re import yaml FILE = sys.argv[1] YAML = sys.argv[2] TYPE = sys.argv[3] header = open(YAML, "r") text = open(FILE, "r") copy = open(FILE+"_NEW", "wt") docs = yaml.load_all(header) for doc in docs: if not doc == None: if 'figure' in doc.keys(): for line in text: mfig = False for f in doc['figure']: my_regex = r"^!\{" + re.escape(f['id']) + r"\}$" if re.search(my_regex, line, re.IGNORECASE): mfig = True if TYPE == 'preprint': ftype = "figure" fwidth = "\\columnwidth" if "wide" in f.keys(): ftype = "figure*" fwidth = "\\textwidth" copy.write("\n\\begin{" + ftype + "}[bt]\n") copy.write("\t\\centering\n") copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n") copy.write("\t\\caption{" + f['caption'] + "}\n") copy.write("\t\\label{" + f['id'] + "}\n") copy.write("\\end{" + ftype + "}\n\n") if not mfig: copy.write(line) header.close() text.close() copy.close()
# ... existing code ... my_regex = r"^!\{" + re.escape(f['id']) + r"\}$" if re.search(my_regex, line, re.IGNORECASE): mfig = True if TYPE == 'preprint': ftype = "figure" fwidth = "\\columnwidth" # ... modified code ... fwidth = "\\textwidth" copy.write("\n\\begin{" + ftype + "}[bt]\n") copy.write("\t\\centering\n") copy.write("\t\\includegraphics[width=" + fwidth + "]{" + f['file'] + "}\n") copy.write("\t\\caption{" + f['caption'] + "}\n") copy.write("\t\\label{" + f['id'] + "}\n") # ... rest of the code ...
2615effe9d37c75792dd0721f6d583c3c9a586f6
app/src/main/java/me/devsaki/hentoid/database/domains/DuplicateEntry.kt
app/src/main/java/me/devsaki/hentoid/database/domains/DuplicateEntry.kt
package me.devsaki.hentoid.database.domains import io.objectbox.annotation.Entity import io.objectbox.annotation.Id import me.devsaki.hentoid.util.Helper @Entity data class DuplicateEntry( val referenceId: Long, val referenceSize: Long, var duplicateId: Long = -1, val titleScore: Float = 0f, val coverScore: Float = 0f, val artistScore: Float = 0f, @Id var id: Long = 0) { // ID is mandatory for ObjectBox to work @Transient private var totalScore = -1f @Transient var nbDuplicates = 1 @Transient var referenceContent: Content? = null @Transient var duplicateContent: Content? = null @Transient var keep: Boolean? = null fun calcTotalScore(): Float { if (totalScore > -1) return totalScore // Calculate val operands = ArrayList<android.util.Pair<Float, Float>>() if (titleScore > -1) operands.add(android.util.Pair<Float, Float>(titleScore, 1f)) if (coverScore > -1) operands.add(android.util.Pair<Float, Float>(coverScore, 1f)) if (artistScore > -1) operands.add(android.util.Pair<Float, Float>(artistScore, 0.5f)) return Helper.weigthedAverage(operands) } fun hash64(): Long { return Helper.hash64(("$referenceId.$duplicateId").toByteArray()) } }
package me.devsaki.hentoid.database.domains import io.objectbox.annotation.Entity import io.objectbox.annotation.Id import me.devsaki.hentoid.util.Helper @Entity data class DuplicateEntry( val referenceId: Long, val referenceSize: Long, var duplicateId: Long = -1, val titleScore: Float = 0f, val coverScore: Float = 0f, val artistScore: Float = 0f, @Id var id: Long = 0 ) { // ID is mandatory for ObjectBox to work @Transient private var totalScore = -1f @Transient var nbDuplicates = 1 @Transient var referenceContent: Content? = null @Transient var duplicateContent: Content? = null @Transient var keep: Boolean? = null fun calcTotalScore(): Float { // Try to fetch pre-calculated score, if present if (totalScore > -1) return totalScore // Calculate val operands = ArrayList<android.util.Pair<Float, Float>>() if (titleScore > -1) operands.add(android.util.Pair<Float, Float>(titleScore, 1f)) if (coverScore > -1) operands.add(android.util.Pair<Float, Float>(coverScore, 1f)) return Helper.weigthedAverage(operands) * (if (artistScore > -1) artistScore else 1f) } fun hash64(): Long { return Helper.hash64(("$referenceId.$duplicateId").toByteArray()) } }
Change role of artist score in the global scoring
Change role of artist score in the global scoring
Kotlin
apache-2.0
AVnetWS/Hentoid,AVnetWS/Hentoid,AVnetWS/Hentoid
kotlin
## Code Before: package me.devsaki.hentoid.database.domains import io.objectbox.annotation.Entity import io.objectbox.annotation.Id import me.devsaki.hentoid.util.Helper @Entity data class DuplicateEntry( val referenceId: Long, val referenceSize: Long, var duplicateId: Long = -1, val titleScore: Float = 0f, val coverScore: Float = 0f, val artistScore: Float = 0f, @Id var id: Long = 0) { // ID is mandatory for ObjectBox to work @Transient private var totalScore = -1f @Transient var nbDuplicates = 1 @Transient var referenceContent: Content? = null @Transient var duplicateContent: Content? = null @Transient var keep: Boolean? = null fun calcTotalScore(): Float { if (totalScore > -1) return totalScore // Calculate val operands = ArrayList<android.util.Pair<Float, Float>>() if (titleScore > -1) operands.add(android.util.Pair<Float, Float>(titleScore, 1f)) if (coverScore > -1) operands.add(android.util.Pair<Float, Float>(coverScore, 1f)) if (artistScore > -1) operands.add(android.util.Pair<Float, Float>(artistScore, 0.5f)) return Helper.weigthedAverage(operands) } fun hash64(): Long { return Helper.hash64(("$referenceId.$duplicateId").toByteArray()) } } ## Instruction: Change role of artist score in the global scoring ## Code After: package me.devsaki.hentoid.database.domains import io.objectbox.annotation.Entity import io.objectbox.annotation.Id import me.devsaki.hentoid.util.Helper @Entity data class DuplicateEntry( val referenceId: Long, val referenceSize: Long, var duplicateId: Long = -1, val titleScore: Float = 0f, val coverScore: Float = 0f, val artistScore: Float = 0f, @Id var id: Long = 0 ) { // ID is mandatory for ObjectBox to work @Transient private var totalScore = -1f @Transient var nbDuplicates = 1 @Transient var referenceContent: Content? = null @Transient var duplicateContent: Content? = null @Transient var keep: Boolean? = null fun calcTotalScore(): Float { // Try to fetch pre-calculated score, if present if (totalScore > -1) return totalScore // Calculate val operands = ArrayList<android.util.Pair<Float, Float>>() if (titleScore > -1) operands.add(android.util.Pair<Float, Float>(titleScore, 1f)) if (coverScore > -1) operands.add(android.util.Pair<Float, Float>(coverScore, 1f)) return Helper.weigthedAverage(operands) * (if (artistScore > -1) artistScore else 1f) } fun hash64(): Long { return Helper.hash64(("$referenceId.$duplicateId").toByteArray()) } }
... @Entity data class DuplicateEntry( val referenceId: Long, val referenceSize: Long, var duplicateId: Long = -1, val titleScore: Float = 0f, val coverScore: Float = 0f, val artistScore: Float = 0f, @Id var id: Long = 0 ) { // ID is mandatory for ObjectBox to work @Transient private var totalScore = -1f ... fun calcTotalScore(): Float { // Try to fetch pre-calculated score, if present if (totalScore > -1) return totalScore // Calculate val operands = ArrayList<android.util.Pair<Float, Float>>() if (titleScore > -1) operands.add(android.util.Pair<Float, Float>(titleScore, 1f)) if (coverScore > -1) operands.add(android.util.Pair<Float, Float>(coverScore, 1f)) return Helper.weigthedAverage(operands) * (if (artistScore > -1) artistScore else 1f) } fun hash64(): Long { ...
af6c260bb27f6b1c5f56ffbd0616b30b9afdbd7b
tests/user_utils_test.py
tests/user_utils_test.py
"""Tests for user utility functions.""" from drudge import Vec, sum_, prod_ from drudge.term import parse_terms def test_sum_prod_utility(): """Test the summation and product utility.""" v = Vec('v') vecs = [v[i] for i in range(3)] v0, v1, v2 = vecs # The proxy object cannot be directly compared. assert parse_terms(sum_(vecs)) == parse_terms(v0 + v1 + v2) assert parse_terms(prod_(vecs)) == parse_terms(v0 * v1 * v2) assert sum_([]) == 0 assert prod_([]) == 1
"""Tests for user utility functions.""" import time import types from unittest.mock import MagicMock from drudge import Vec, sum_, prod_, TimeStamper from drudge.term import parse_terms def test_sum_prod_utility(): """Test the summation and product utility.""" v = Vec('v') vecs = [v[i] for i in range(3)] v0, v1, v2 = vecs # The proxy object cannot be directly compared. assert parse_terms(sum_(vecs)) == parse_terms(v0 + v1 + v2) assert parse_terms(prod_(vecs)) == parse_terms(v0 * v1 * v2) assert sum_([]) == 0 assert prod_([]) == 1 def test_time_stamper(): """Test the time stamper utility.""" tensor = types.SimpleNamespace(n_terms=2, cache=MagicMock()) stamper = TimeStamper() time.sleep(0.5) res = stamper.stamp('Nothing') assert res.startswith('Nothing done') assert float(res.split()[-2]) - 0.5 < 0.1 time.sleep(0.5) res = stamper.stamp('Tensor', tensor) assert res.startswith('Tensor done, 2 terms') assert float(res.split()[-2]) - 0.5 < 0.1 tensor.cache.assert_called_once_with()
Add tests for the time stamping facility
Add tests for the time stamping facility
Python
mit
tschijnmo/drudge,tschijnmo/drudge,tschijnmo/drudge
python
## Code Before: """Tests for user utility functions.""" from drudge import Vec, sum_, prod_ from drudge.term import parse_terms def test_sum_prod_utility(): """Test the summation and product utility.""" v = Vec('v') vecs = [v[i] for i in range(3)] v0, v1, v2 = vecs # The proxy object cannot be directly compared. assert parse_terms(sum_(vecs)) == parse_terms(v0 + v1 + v2) assert parse_terms(prod_(vecs)) == parse_terms(v0 * v1 * v2) assert sum_([]) == 0 assert prod_([]) == 1 ## Instruction: Add tests for the time stamping facility ## Code After: """Tests for user utility functions.""" import time import types from unittest.mock import MagicMock from drudge import Vec, sum_, prod_, TimeStamper from drudge.term import parse_terms def test_sum_prod_utility(): """Test the summation and product utility.""" v = Vec('v') vecs = [v[i] for i in range(3)] v0, v1, v2 = vecs # The proxy object cannot be directly compared. assert parse_terms(sum_(vecs)) == parse_terms(v0 + v1 + v2) assert parse_terms(prod_(vecs)) == parse_terms(v0 * v1 * v2) assert sum_([]) == 0 assert prod_([]) == 1 def test_time_stamper(): """Test the time stamper utility.""" tensor = types.SimpleNamespace(n_terms=2, cache=MagicMock()) stamper = TimeStamper() time.sleep(0.5) res = stamper.stamp('Nothing') assert res.startswith('Nothing done') assert float(res.split()[-2]) - 0.5 < 0.1 time.sleep(0.5) res = stamper.stamp('Tensor', tensor) assert res.startswith('Tensor done, 2 terms') assert float(res.split()[-2]) - 0.5 < 0.1 tensor.cache.assert_called_once_with()
// ... existing code ... """Tests for user utility functions.""" import time import types from unittest.mock import MagicMock from drudge import Vec, sum_, prod_, TimeStamper from drudge.term import parse_terms // ... modified code ... assert sum_([]) == 0 assert prod_([]) == 1 def test_time_stamper(): """Test the time stamper utility.""" tensor = types.SimpleNamespace(n_terms=2, cache=MagicMock()) stamper = TimeStamper() time.sleep(0.5) res = stamper.stamp('Nothing') assert res.startswith('Nothing done') assert float(res.split()[-2]) - 0.5 < 0.1 time.sleep(0.5) res = stamper.stamp('Tensor', tensor) assert res.startswith('Tensor done, 2 terms') assert float(res.split()[-2]) - 0.5 < 0.1 tensor.cache.assert_called_once_with() // ... rest of the code ...
071926edc64241b0359c9a0148fc0825a09cb6ba
marionette/__init__.py
marionette/__init__.py
from cgi import parse_header import json from django.http import HttpResponse, Http404 RPC_MARKER = '_rpc' class Resource(object): def __init__(self, request, *args, **kwargs): self.request = request self.args = args self.kwargs = kwargs @classmethod def as_view(cls): def view(request, *args, **kwargs): self = cls(request, *args, **kwargs) return self.dispatch(request) return view def dispatch(self, request): method = request.META['HTTP_X_RPC_ACTION'] func = getattr(self, method, None) if not getattr(func, RPC_MARKER, True): raise Http404 data = self.get_request_data(request) resp = func(data) return HttpResponse(json.dumps(resp), content_type='application/json') def get_request_data(self, default=None): '''Retrieve data from request''' c_type, _ = parse_header(self.request.META.get('CONTENT_TYPE', '')) if c_type in ['application/json', 'text/json']: if not self.request.body: return default return self.loads(self.request.body) if self.request.method == 'GET': return self.request.GET return self.request.POST def rpc(view): '''Mark a view as accessible via RPC''' setattr(view, '_rpc', True) return view
from cgi import parse_header import json from django.http import HttpResponse, Http404 RPC_MARKER = '_rpc' class Resource(object): def __init__(self, request, *args, **kwargs): self.request = request self.args = args self.kwargs = kwargs @classmethod def as_view(cls): def view(request, *args, **kwargs): self = cls(request, *args, **kwargs) return self.dispatch(request) return view def dispatch(self, request): method = request.META['HTTP_X_RPC_ACTION'] func = getattr(self, method, None) if not getattr(func, RPC_MARKER, True): raise Http404 data = self.get_request_data(request) resp = self.execute(func, data) return HttpResponse(json.dumps(resp), content_type='application/json') def execute(self, handler, data): '''Helpful hook to ease wrapping the handler''' return handler(**data) def get_request_data(self, default=None): '''Retrieve data from request''' c_type, _ = parse_header(self.request.META.get('CONTENT_TYPE', '')) if c_type in ['application/json', 'text/json']: if not self.request.body: return default return self.loads(self.request.body) if self.request.method == 'GET': return self.request.GET return self.request.POST def rpc(view): '''Mark a view as accessible via RPC''' setattr(view, '_rpc', True) return view
Add execute hook to allow wrapping handler calls
Add execute hook to allow wrapping handler calls
Python
mit
funkybob/django-marionette
python
## Code Before: from cgi import parse_header import json from django.http import HttpResponse, Http404 RPC_MARKER = '_rpc' class Resource(object): def __init__(self, request, *args, **kwargs): self.request = request self.args = args self.kwargs = kwargs @classmethod def as_view(cls): def view(request, *args, **kwargs): self = cls(request, *args, **kwargs) return self.dispatch(request) return view def dispatch(self, request): method = request.META['HTTP_X_RPC_ACTION'] func = getattr(self, method, None) if not getattr(func, RPC_MARKER, True): raise Http404 data = self.get_request_data(request) resp = func(data) return HttpResponse(json.dumps(resp), content_type='application/json') def get_request_data(self, default=None): '''Retrieve data from request''' c_type, _ = parse_header(self.request.META.get('CONTENT_TYPE', '')) if c_type in ['application/json', 'text/json']: if not self.request.body: return default return self.loads(self.request.body) if self.request.method == 'GET': return self.request.GET return self.request.POST def rpc(view): '''Mark a view as accessible via RPC''' setattr(view, '_rpc', True) return view ## Instruction: Add execute hook to allow wrapping handler calls ## Code After: from cgi import parse_header import json from django.http import HttpResponse, Http404 RPC_MARKER = '_rpc' class Resource(object): def __init__(self, request, *args, **kwargs): self.request = request self.args = args self.kwargs = kwargs @classmethod def as_view(cls): def view(request, *args, **kwargs): self = cls(request, *args, **kwargs) return self.dispatch(request) return view def dispatch(self, request): method = request.META['HTTP_X_RPC_ACTION'] func = getattr(self, method, None) if not getattr(func, RPC_MARKER, True): raise Http404 data = self.get_request_data(request) resp = self.execute(func, data) return HttpResponse(json.dumps(resp), content_type='application/json') def execute(self, handler, data): '''Helpful hook to ease wrapping the handler''' return handler(**data) def get_request_data(self, default=None): '''Retrieve data from request''' c_type, _ = parse_header(self.request.META.get('CONTENT_TYPE', '')) if c_type in ['application/json', 'text/json']: if not self.request.body: return default return self.loads(self.request.body) if self.request.method == 'GET': return self.request.GET return self.request.POST def rpc(view): '''Mark a view as accessible via RPC''' setattr(view, '_rpc', True) return view
# ... existing code ... data = self.get_request_data(request) resp = self.execute(func, data) return HttpResponse(json.dumps(resp), content_type='application/json') def execute(self, handler, data): '''Helpful hook to ease wrapping the handler''' return handler(**data) def get_request_data(self, default=None): '''Retrieve data from request''' # ... rest of the code ...
96fe288cbd4c4399c83b4c3d56da6e427aaad0f9
spicedham/digitdestroyer.py
spicedham/digitdestroyer.py
from spicedham.basewrapper import BaseWrapper class DigitDestroyer(BaseWrapper): def train(*args): pass def classify(self, response): if all(map(unicode.isdigit, response)): return 1 else: return 0.5
from spicedham.basewrapper import BaseWrapper class DigitDestroyer(object): def train(*args): pass def classify(self, response): if all(map(unicode.isdigit, response)): return 1 else: return None
Fix inheritence error and return value
Fix inheritence error and return value It shouldn't inherit from BaseWrapper, but merely object. It should return None instead of 0.5 so it will have no effect on the average.
Python
mpl-2.0
mozilla/spicedham,mozilla/spicedham
python
## Code Before: from spicedham.basewrapper import BaseWrapper class DigitDestroyer(BaseWrapper): def train(*args): pass def classify(self, response): if all(map(unicode.isdigit, response)): return 1 else: return 0.5 ## Instruction: Fix inheritence error and return value It shouldn't inherit from BaseWrapper, but merely object. It should return None instead of 0.5 so it will have no effect on the average. ## Code After: from spicedham.basewrapper import BaseWrapper class DigitDestroyer(object): def train(*args): pass def classify(self, response): if all(map(unicode.isdigit, response)): return 1 else: return None
# ... existing code ... from spicedham.basewrapper import BaseWrapper class DigitDestroyer(object): def train(*args): pass def classify(self, response): # ... modified code ... if all(map(unicode.isdigit, response)): return 1 else: return None # ... rest of the code ...
03dabdb0fd6af5ef5784d795d3953ca4e7175ce5
android/src/com/gdxjam/base/android/AndroidLauncher.java
android/src/com/gdxjam/base/android/AndroidLauncher.java
package com.gdxjam.base.android; import android.os.Bundle; import com.badlogic.gdx.backends.android.AndroidApplication; import com.badlogic.gdx.backends.android.AndroidApplicationConfiguration; import com.gdxjam.Main; public class AndroidLauncher extends AndroidApplication { @Override protected void onCreate (Bundle savedInstanceState) { super.onCreate(savedInstanceState); AndroidApplicationConfiguration config = new AndroidApplicationConfiguration(); config.useImmersiveMode = true; initialize(new Main(), config); } }
package com.gdxjam.base.android; import android.os.Bundle; import com.badlogic.gdx.backends.android.AndroidApplication; import com.badlogic.gdx.backends.android.AndroidApplicationConfiguration; import com.gdxjam.Main; public class AndroidLauncher extends AndroidApplication { @Override protected void onCreate (Bundle savedInstanceState) { super.onCreate(savedInstanceState); AndroidApplicationConfiguration config = new AndroidApplicationConfiguration(); initialize(new Main(), config); } }
Revert "Android build now uses immersive mode."
Revert "Android build now uses immersive mode." This reverts commit fee8b18cf00d8d566e35ae2d97d413d1f15c36af.
Java
mit
lanen/GDXJam,Twiebs/GDXJam,Twiebs/GDXJam,lanen/GDXJam,libgdx-jam/GDXJam,libgdx-jam/GDXJam,Twiebs/GDXJam,lanen/GDXJam,libgdx-jam/GDXJam
java
## Code Before: package com.gdxjam.base.android; import android.os.Bundle; import com.badlogic.gdx.backends.android.AndroidApplication; import com.badlogic.gdx.backends.android.AndroidApplicationConfiguration; import com.gdxjam.Main; public class AndroidLauncher extends AndroidApplication { @Override protected void onCreate (Bundle savedInstanceState) { super.onCreate(savedInstanceState); AndroidApplicationConfiguration config = new AndroidApplicationConfiguration(); config.useImmersiveMode = true; initialize(new Main(), config); } } ## Instruction: Revert "Android build now uses immersive mode." This reverts commit fee8b18cf00d8d566e35ae2d97d413d1f15c36af. ## Code After: package com.gdxjam.base.android; import android.os.Bundle; import com.badlogic.gdx.backends.android.AndroidApplication; import com.badlogic.gdx.backends.android.AndroidApplicationConfiguration; import com.gdxjam.Main; public class AndroidLauncher extends AndroidApplication { @Override protected void onCreate (Bundle savedInstanceState) { super.onCreate(savedInstanceState); AndroidApplicationConfiguration config = new AndroidApplicationConfiguration(); initialize(new Main(), config); } }
... protected void onCreate (Bundle savedInstanceState) { super.onCreate(savedInstanceState); AndroidApplicationConfiguration config = new AndroidApplicationConfiguration(); initialize(new Main(), config); } } ...
a4375a6ec5ca54b887527885235317986011801c
guesser.py
guesser.py
from synt.utils.redis_manager import RedisManager from synt.utils.extractors import best_word_feats from synt.utils.text import sanitize_text MANAGER = RedisManager() DEFAULT_CLASSIFIER = MANAGER.load_classifier() def guess(text, classifier=DEFAULT_CLASSIFIER): """Takes a blob of text and returns the sentiment and confidence score.""" assert classifier, "Needs a classifier." bag_of_words = best_word_feats(sanitize_text(text)) if bag_of_words: guess = classifier.classify(bag_of_words) prob = classifier.prob_classify(bag_of_words) return (guess, [(prob.prob(sample),sample) for sample in prob.samples()])
from synt.utils.redis_manager import RedisManager from synt.utils.extractors import best_word_feats from synt.utils.text import sanitize_text MANAGER = RedisManager() DEFAULT_CLASSIFIER = MANAGER.load_classifier() def guess(text, classifier=DEFAULT_CLASSIFIER): """Takes a blob of text and returns the sentiment and confidence score.""" assert classifier, "Needs a classifier." bag_of_words = best_word_feats(sanitize_text(text)) if bag_of_words: guess = classifier.classify(bag_of_words) prob = classifier.prob_classify(bag_of_words) #return a -1 .. 1 score score = prob.prob('positive') - prob.prob('negative') return score
Return a -1 .. 1 sentiment score.
Return a -1 .. 1 sentiment score.
Python
agpl-3.0
lrvick/synt
python
## Code Before: from synt.utils.redis_manager import RedisManager from synt.utils.extractors import best_word_feats from synt.utils.text import sanitize_text MANAGER = RedisManager() DEFAULT_CLASSIFIER = MANAGER.load_classifier() def guess(text, classifier=DEFAULT_CLASSIFIER): """Takes a blob of text and returns the sentiment and confidence score.""" assert classifier, "Needs a classifier." bag_of_words = best_word_feats(sanitize_text(text)) if bag_of_words: guess = classifier.classify(bag_of_words) prob = classifier.prob_classify(bag_of_words) return (guess, [(prob.prob(sample),sample) for sample in prob.samples()]) ## Instruction: Return a -1 .. 1 sentiment score. ## Code After: from synt.utils.redis_manager import RedisManager from synt.utils.extractors import best_word_feats from synt.utils.text import sanitize_text MANAGER = RedisManager() DEFAULT_CLASSIFIER = MANAGER.load_classifier() def guess(text, classifier=DEFAULT_CLASSIFIER): """Takes a blob of text and returns the sentiment and confidence score.""" assert classifier, "Needs a classifier." bag_of_words = best_word_feats(sanitize_text(text)) if bag_of_words: guess = classifier.classify(bag_of_words) prob = classifier.prob_classify(bag_of_words) #return a -1 .. 1 score score = prob.prob('positive') - prob.prob('negative') return score
// ... existing code ... if bag_of_words: guess = classifier.classify(bag_of_words) prob = classifier.prob_classify(bag_of_words) #return a -1 .. 1 score score = prob.prob('positive') - prob.prob('negative') return score // ... rest of the code ...
498552599753f07d179025b5de1e8207ec2b94cd
test/unit/util/test_multipart_stream.py
test/unit/util/test_multipart_stream.py
from __future__ import unicode_literals, absolute_import import pytest from boxsdk.util.multipart_stream import MultipartStream @pytest.fixture(params=({}, {'data_1': b'data_1_value', 'data_2': b'data_2_value'})) def multipart_stream_data(request): return request.param @pytest.fixture(params=({}, {'file_1': b'file_1_value', 'file_2': b'file_2_value'})) def multipart_stream_files(request): return request.param def test_multipart_stream_orders_data_before_files(multipart_stream_data, multipart_stream_files): if not multipart_stream_data and not multipart_stream_files: pytest.xfail('Encoder does not support empty fields.') stream = MultipartStream(multipart_stream_data, multipart_stream_files) encoded_stream = stream.to_string() data_indices = [encoded_stream.find(value) for value in multipart_stream_data.values()] file_indices = [encoded_stream.find(value) for value in multipart_stream_files.values()] assert -1 not in data_indices assert -1 not in file_indices assert all((all((data_index < f for f in file_indices)) for data_index in data_indices))
from __future__ import unicode_literals, absolute_import import pytest from boxsdk.util.multipart_stream import MultipartStream @pytest.fixture(params=({}, {'data_1': b'data_1_value', 'data_2': b'data_2_value'})) def multipart_stream_data(request): return request.param @pytest.fixture(params=({}, {'file_1': b'file_1_value', 'file_2': b'file_2_value'})) def multipart_stream_files(request): return request.param def test_multipart_stream_orders_data_before_files(multipart_stream_data, multipart_stream_files): # pylint:disable=redefined-outer-name if not multipart_stream_data and not multipart_stream_files: pytest.xfail('Encoder does not support empty fields.') stream = MultipartStream(multipart_stream_data, multipart_stream_files) encoded_stream = stream.to_string() data_indices = [encoded_stream.find(value) for value in multipart_stream_data.values()] file_indices = [encoded_stream.find(value) for value in multipart_stream_files.values()] assert -1 not in data_indices assert -1 not in file_indices assert all((all((data_index < f for f in file_indices)) for data_index in data_indices))
Disable redefined outer name pylint warning.
Disable redefined outer name pylint warning.
Python
apache-2.0
Frencil/box-python-sdk,Frencil/box-python-sdk,box/box-python-sdk
python
## Code Before: from __future__ import unicode_literals, absolute_import import pytest from boxsdk.util.multipart_stream import MultipartStream @pytest.fixture(params=({}, {'data_1': b'data_1_value', 'data_2': b'data_2_value'})) def multipart_stream_data(request): return request.param @pytest.fixture(params=({}, {'file_1': b'file_1_value', 'file_2': b'file_2_value'})) def multipart_stream_files(request): return request.param def test_multipart_stream_orders_data_before_files(multipart_stream_data, multipart_stream_files): if not multipart_stream_data and not multipart_stream_files: pytest.xfail('Encoder does not support empty fields.') stream = MultipartStream(multipart_stream_data, multipart_stream_files) encoded_stream = stream.to_string() data_indices = [encoded_stream.find(value) for value in multipart_stream_data.values()] file_indices = [encoded_stream.find(value) for value in multipart_stream_files.values()] assert -1 not in data_indices assert -1 not in file_indices assert all((all((data_index < f for f in file_indices)) for data_index in data_indices)) ## Instruction: Disable redefined outer name pylint warning. ## Code After: from __future__ import unicode_literals, absolute_import import pytest from boxsdk.util.multipart_stream import MultipartStream @pytest.fixture(params=({}, {'data_1': b'data_1_value', 'data_2': b'data_2_value'})) def multipart_stream_data(request): return request.param @pytest.fixture(params=({}, {'file_1': b'file_1_value', 'file_2': b'file_2_value'})) def multipart_stream_files(request): return request.param def test_multipart_stream_orders_data_before_files(multipart_stream_data, multipart_stream_files): # pylint:disable=redefined-outer-name if not multipart_stream_data and not multipart_stream_files: pytest.xfail('Encoder does not support empty fields.') stream = MultipartStream(multipart_stream_data, multipart_stream_files) encoded_stream = stream.to_string() data_indices = [encoded_stream.find(value) for value in multipart_stream_data.values()] file_indices = [encoded_stream.find(value) for value in multipart_stream_files.values()] assert -1 not in data_indices assert -1 not in file_indices assert all((all((data_index < f for f in file_indices)) for data_index in data_indices))
... def test_multipart_stream_orders_data_before_files(multipart_stream_data, multipart_stream_files): # pylint:disable=redefined-outer-name if not multipart_stream_data and not multipart_stream_files: pytest.xfail('Encoder does not support empty fields.') stream = MultipartStream(multipart_stream_data, multipart_stream_files) ...
d5ddfb8af861f02074fe113f87a6ea6b4f1bc5db
tests/child-process-sigterm-trap.py
tests/child-process-sigterm-trap.py
from common import * import sys, signal # Be naughty and ignore SIGTERM to simulate hanging child signal.signal(signal.SIGTERM, signal.SIG_IGN) # Start a server that listens for incoming connections try: print_ok("child starting up on port %s" % sys.argv[1]) s = TcpServer(int(sys.argv[1])) s.listen() while True: try: s.socket, _ = s.listener.accept() s.socket.settimeout(TIMEOUT) except: pass finally: s.cleanup() print_ok("child exiting")
from common import * import sys, signal # Be naughty and ignore SIGTERM to simulate hanging child signal.signal(signal.SIGTERM, signal.SIG_IGN) # Start a server that listens for incoming connections try: print_ok("child starting up on port %s" % sys.argv[1]) s = TcpServer(int(sys.argv[1])) s.listen() while True: try: s.socket, _ = s.listener.accept() s.socket.settimeout(TIMEOUT) except: pass finally: s.cleanup() print_ok("child exiting")
Fix formatting in child sample to match other files
Fix formatting in child sample to match other files
Python
apache-2.0
square/ghostunnel,square/ghostunnel
python
## Code Before: from common import * import sys, signal # Be naughty and ignore SIGTERM to simulate hanging child signal.signal(signal.SIGTERM, signal.SIG_IGN) # Start a server that listens for incoming connections try: print_ok("child starting up on port %s" % sys.argv[1]) s = TcpServer(int(sys.argv[1])) s.listen() while True: try: s.socket, _ = s.listener.accept() s.socket.settimeout(TIMEOUT) except: pass finally: s.cleanup() print_ok("child exiting") ## Instruction: Fix formatting in child sample to match other files ## Code After: from common import * import sys, signal # Be naughty and ignore SIGTERM to simulate hanging child signal.signal(signal.SIGTERM, signal.SIG_IGN) # Start a server that listens for incoming connections try: print_ok("child starting up on port %s" % sys.argv[1]) s = TcpServer(int(sys.argv[1])) s.listen() while True: try: s.socket, _ = s.listener.accept() s.socket.settimeout(TIMEOUT) except: pass finally: s.cleanup() print_ok("child exiting")
... # Start a server that listens for incoming connections try: print_ok("child starting up on port %s" % sys.argv[1]) s = TcpServer(int(sys.argv[1])) s.listen() while True: try: s.socket, _ = s.listener.accept() s.socket.settimeout(TIMEOUT) except: pass finally: s.cleanup() print_ok("child exiting") ...
d53152aedff7777be771124a91ba325f75398739
test.py
test.py
from theora import Ogg from numpy import concatenate, zeros_like from scipy.misc import toimage f = open("video.ogv") o = Ogg(f) Y, Cb, Cr = o.test() Cb2 = zeros_like(Y) for i in range(Cb2.shape[0]): for j in range(Cb2.shape[1]): Cb2[i, j] = Cb[i/2, j/2] Cr2 = zeros_like(Y) for i in range(Cr2.shape[0]): for j in range(Cr2.shape[1]): Cr2[i, j] = Cr[i/2, j/2] w, h = Y.shape Y = Y.reshape((1, w, h)) Cb = Cb2.reshape((1, w, h)) Cr = Cr2.reshape((1, w, h)) A = concatenate((Y, Cb, Cr)) img = toimage(A, mode="YCbCr", channel_axis=0) print img from pylab import imshow, show from matplotlib import cm imshow(img, origin="lower") show()
from theora import Ogg from numpy import concatenate, zeros_like from scipy.misc import toimage f = open("video.ogv") o = Ogg(f) Y, Cb, Cr = o.test() Cb2 = zeros_like(Y) for i in range(Cb2.shape[0]): for j in range(Cb2.shape[1]): Cb2[i, j] = Cb[i/2, j/2] Cr2 = zeros_like(Y) for i in range(Cr2.shape[0]): for j in range(Cr2.shape[1]): Cr2[i, j] = Cr[i/2, j/2] w, h = Y.shape Y = Y.reshape((1, w, h)) Cb = Cb2.reshape((1, w, h)) Cr = Cr2.reshape((1, w, h)) A = concatenate((Y, Cb, Cr)) img = toimage(A, mode="YCbCr", channel_axis=0) img.convert("RGB").save("frame.png") from pylab import imshow, show from matplotlib import cm imshow(img, origin="lower") show()
Save the image to png
Save the image to png
Python
bsd-3-clause
certik/python-theora,certik/python-theora
python
## Code Before: from theora import Ogg from numpy import concatenate, zeros_like from scipy.misc import toimage f = open("video.ogv") o = Ogg(f) Y, Cb, Cr = o.test() Cb2 = zeros_like(Y) for i in range(Cb2.shape[0]): for j in range(Cb2.shape[1]): Cb2[i, j] = Cb[i/2, j/2] Cr2 = zeros_like(Y) for i in range(Cr2.shape[0]): for j in range(Cr2.shape[1]): Cr2[i, j] = Cr[i/2, j/2] w, h = Y.shape Y = Y.reshape((1, w, h)) Cb = Cb2.reshape((1, w, h)) Cr = Cr2.reshape((1, w, h)) A = concatenate((Y, Cb, Cr)) img = toimage(A, mode="YCbCr", channel_axis=0) print img from pylab import imshow, show from matplotlib import cm imshow(img, origin="lower") show() ## Instruction: Save the image to png ## Code After: from theora import Ogg from numpy import concatenate, zeros_like from scipy.misc import toimage f = open("video.ogv") o = Ogg(f) Y, Cb, Cr = o.test() Cb2 = zeros_like(Y) for i in range(Cb2.shape[0]): for j in range(Cb2.shape[1]): Cb2[i, j] = Cb[i/2, j/2] Cr2 = zeros_like(Y) for i in range(Cr2.shape[0]): for j in range(Cr2.shape[1]): Cr2[i, j] = Cr[i/2, j/2] w, h = Y.shape Y = Y.reshape((1, w, h)) Cb = Cb2.reshape((1, w, h)) Cr = Cr2.reshape((1, w, h)) A = concatenate((Y, Cb, Cr)) img = toimage(A, mode="YCbCr", channel_axis=0) img.convert("RGB").save("frame.png") from pylab import imshow, show from matplotlib import cm imshow(img, origin="lower") show()
// ... existing code ... Cr = Cr2.reshape((1, w, h)) A = concatenate((Y, Cb, Cr)) img = toimage(A, mode="YCbCr", channel_axis=0) img.convert("RGB").save("frame.png") from pylab import imshow, show from matplotlib import cm // ... rest of the code ...
6c322211e6d46e37dd3f558ce15ad5b84062bd22
deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/CSVDataSetIteratorTest.java
deeplearning4j-core/src/test/java/org/deeplearning4j/datasets/iterator/CSVDataSetIteratorTest.java
package org.deeplearning4j.datasets.iterator; import org.nd4j.linalg.dataset.DataSet; import org.junit.Test; import org.springframework.core.io.ClassPathResource; /** * CSV Test * @author Adam Gibson */ public class CSVDataSetIteratorTest { @Test public void testCSV() throws Exception { DataSetIterator iter = new CSVDataSetIterator(10,10,new ClassPathResource("csv-example.csv").getFile(),1,1); DataSet next = iter.next(); } }
package org.deeplearning4j.datasets.iterator; import org.nd4j.linalg.dataset.DataSet; import org.junit.Test; import org.springframework.core.io.ClassPathResource; import static org.junit.Assert.*; /** * CSV Test * @author Adam Gibson */ public class CSVDataSetIteratorTest { @Test public void testCSV() throws Exception { DataSetIterator iter = new CSVDataSetIterator(10, 10, new ClassPathResource("csv-example.csv").getFile(), 1, 1); DataSet next = iter.next(); assertEquals("", 10, next.numExamples()); assertEquals("", 479, next.numInputs()); } }
Make testCSV actually test something
Make testCSV actually test something Former-commit-id: 28e06f42df7e5c5a6fb0c7dcf4351fc582f1ed9a
Java
apache-2.0
huitseeker/deeplearning4j,dmmiller612/deeplearning4j,dmmiller612/deeplearning4j,kinbod/deeplearning4j,huitseeker/deeplearning4j,huitseeker/deeplearning4j,kinbod/deeplearning4j,huitseeker/deeplearning4j,kinbod/deeplearning4j,huitseeker/deeplearning4j,huitseeker/deeplearning4j,kinbod/deeplearning4j,dmmiller612/deeplearning4j,dmmiller612/deeplearning4j,kinbod/deeplearning4j,dmmiller612/deeplearning4j,huitseeker/deeplearning4j,dmmiller612/deeplearning4j,kinbod/deeplearning4j
java
## Code Before: package org.deeplearning4j.datasets.iterator; import org.nd4j.linalg.dataset.DataSet; import org.junit.Test; import org.springframework.core.io.ClassPathResource; /** * CSV Test * @author Adam Gibson */ public class CSVDataSetIteratorTest { @Test public void testCSV() throws Exception { DataSetIterator iter = new CSVDataSetIterator(10,10,new ClassPathResource("csv-example.csv").getFile(),1,1); DataSet next = iter.next(); } } ## Instruction: Make testCSV actually test something Former-commit-id: 28e06f42df7e5c5a6fb0c7dcf4351fc582f1ed9a ## Code After: package org.deeplearning4j.datasets.iterator; import org.nd4j.linalg.dataset.DataSet; import org.junit.Test; import org.springframework.core.io.ClassPathResource; import static org.junit.Assert.*; /** * CSV Test * @author Adam Gibson */ public class CSVDataSetIteratorTest { @Test public void testCSV() throws Exception { DataSetIterator iter = new CSVDataSetIterator(10, 10, new ClassPathResource("csv-example.csv").getFile(), 1, 1); DataSet next = iter.next(); assertEquals("", 10, next.numExamples()); assertEquals("", 479, next.numInputs()); } }
// ... existing code ... import org.nd4j.linalg.dataset.DataSet; import org.junit.Test; import org.springframework.core.io.ClassPathResource; import static org.junit.Assert.*; /** * CSV Test // ... modified code ... public class CSVDataSetIteratorTest { @Test public void testCSV() throws Exception { DataSetIterator iter = new CSVDataSetIterator(10, 10, new ClassPathResource("csv-example.csv").getFile(), 1, 1); DataSet next = iter.next(); assertEquals("", 10, next.numExamples()); assertEquals("", 479, next.numInputs()); } // ... rest of the code ...
855718c59f77594a0911f80592875daff05d8b8c
t_gc.h
t_gc.h
/* $Id: t_gc.h,v 1.7 2011/09/04 13:00:54 mit-sato Exp $ */ #ifndef __T_GC__ #define __T_GC__ #ifdef PROF # define GC_INIT() 0 # define GC_MALLOC(s) malloc(s) # define GC_MALLOC_ATOMIC(s) malloc(s) #else # include <gc.h> #endif /* PROF */ #endif /* __T_GC__ */
/* $Id: t_gc.h,v 1.7 2011/09/04 13:00:54 mit-sato Exp $ */ #ifndef __T_GC__ #define __T_GC__ #ifdef PROF # define GC_INIT() 0 # define GC_MALLOC(s) malloc(s) # define GC_MALLOC_ATOMIC(s) malloc(s) # define GC_register_finalizer_ignore_self(o,f,c,x,y) 0 # define GC_add_roots(s,e) 0 #else # include <gc.h> #endif /* PROF */ #endif /* __T_GC__ */
Add dummy macro GC_register_finalizer_ignore_self and GC_add_roots.
Add dummy macro GC_register_finalizer_ignore_self and GC_add_roots.
C
mit
mitchan0321/perfume,mitchan0321/perfume,mitchan0321/perfume,mitchan0321/perfume,mitchan0321/perfume
c
## Code Before: /* $Id: t_gc.h,v 1.7 2011/09/04 13:00:54 mit-sato Exp $ */ #ifndef __T_GC__ #define __T_GC__ #ifdef PROF # define GC_INIT() 0 # define GC_MALLOC(s) malloc(s) # define GC_MALLOC_ATOMIC(s) malloc(s) #else # include <gc.h> #endif /* PROF */ #endif /* __T_GC__ */ ## Instruction: Add dummy macro GC_register_finalizer_ignore_self and GC_add_roots. ## Code After: /* $Id: t_gc.h,v 1.7 2011/09/04 13:00:54 mit-sato Exp $ */ #ifndef __T_GC__ #define __T_GC__ #ifdef PROF # define GC_INIT() 0 # define GC_MALLOC(s) malloc(s) # define GC_MALLOC_ATOMIC(s) malloc(s) # define GC_register_finalizer_ignore_self(o,f,c,x,y) 0 # define GC_add_roots(s,e) 0 #else # include <gc.h> #endif /* PROF */ #endif /* __T_GC__ */
... #define __T_GC__ #ifdef PROF # define GC_INIT() 0 # define GC_MALLOC(s) malloc(s) # define GC_MALLOC_ATOMIC(s) malloc(s) # define GC_register_finalizer_ignore_self(o,f,c,x,y) 0 # define GC_add_roots(s,e) 0 #else # include <gc.h> #endif /* PROF */ ...
305ba7ee3fff41a7d866968c5332394301c0e83f
digi/wagtail_hooks.py
digi/wagtail_hooks.py
from wagtail.contrib.modeladmin.options import \ ModelAdmin, ModelAdminGroup, modeladmin_register from .models import Indicator, FooterLinkSection class IndicatorAdmin(ModelAdmin): model = Indicator menu_icon = 'user' class FooterLinkSectionAdmin(ModelAdmin): model = FooterLinkSection menu_icon = 'redirect' class DigiHelAdminGroup(ModelAdminGroup): label = "DigiHel" items = (IndicatorAdmin, FooterLinkSectionAdmin) modeladmin_register(DigiHelAdminGroup)
from wagtail.contrib.modeladmin.options import \ ModelAdmin, ModelAdminGroup, modeladmin_register from .models import Indicator, FooterLinkSection from django.utils.html import format_html from wagtail.wagtailcore import hooks class IndicatorAdmin(ModelAdmin): model = Indicator menu_icon = 'user' class FooterLinkSectionAdmin(ModelAdmin): model = FooterLinkSection menu_icon = 'redirect' class DigiHelAdminGroup(ModelAdminGroup): label = "DigiHel" items = (IndicatorAdmin, FooterLinkSectionAdmin) modeladmin_register(DigiHelAdminGroup) # Enable editing of raw HTML @hooks.register('insert_editor_js') def enable_source_editing(): return format_html( """ <script> registerHalloPlugin('hallohtml'); </script> """ )
Enable HTML source editing in the content editor
Enable HTML source editing in the content editor
Python
mit
terotic/digihel,City-of-Helsinki/digihel,terotic/digihel,City-of-Helsinki/digihel,City-of-Helsinki/digihel,terotic/digihel,City-of-Helsinki/digihel
python
## Code Before: from wagtail.contrib.modeladmin.options import \ ModelAdmin, ModelAdminGroup, modeladmin_register from .models import Indicator, FooterLinkSection class IndicatorAdmin(ModelAdmin): model = Indicator menu_icon = 'user' class FooterLinkSectionAdmin(ModelAdmin): model = FooterLinkSection menu_icon = 'redirect' class DigiHelAdminGroup(ModelAdminGroup): label = "DigiHel" items = (IndicatorAdmin, FooterLinkSectionAdmin) modeladmin_register(DigiHelAdminGroup) ## Instruction: Enable HTML source editing in the content editor ## Code After: from wagtail.contrib.modeladmin.options import \ ModelAdmin, ModelAdminGroup, modeladmin_register from .models import Indicator, FooterLinkSection from django.utils.html import format_html from wagtail.wagtailcore import hooks class IndicatorAdmin(ModelAdmin): model = Indicator menu_icon = 'user' class FooterLinkSectionAdmin(ModelAdmin): model = FooterLinkSection menu_icon = 'redirect' class DigiHelAdminGroup(ModelAdminGroup): label = "DigiHel" items = (IndicatorAdmin, FooterLinkSectionAdmin) modeladmin_register(DigiHelAdminGroup) # Enable editing of raw HTML @hooks.register('insert_editor_js') def enable_source_editing(): return format_html( """ <script> registerHalloPlugin('hallohtml'); </script> """ )
// ... existing code ... from wagtail.contrib.modeladmin.options import \ ModelAdmin, ModelAdminGroup, modeladmin_register from .models import Indicator, FooterLinkSection from django.utils.html import format_html from wagtail.wagtailcore import hooks class IndicatorAdmin(ModelAdmin): // ... modified code ... items = (IndicatorAdmin, FooterLinkSectionAdmin) modeladmin_register(DigiHelAdminGroup) # Enable editing of raw HTML @hooks.register('insert_editor_js') def enable_source_editing(): return format_html( """ <script> registerHalloPlugin('hallohtml'); </script> """ ) // ... rest of the code ...
acf3819d433f3ebc3d3eed17c61f2542f7429f8e
trimesh/resources/__init__.py
trimesh/resources/__init__.py
import os import inspect # find the current absolute path using inspect _pwd = os.path.dirname( os.path.abspath( inspect.getfile( inspect.currentframe()))) def get_resource(name, decode=True): """ Get a resource from the trimesh/resources folder. Parameters ------------- name : str File path relative to `trimesh/resources` decode : bool Whether or not to decode result as UTF-8 Returns ------------- resource : str or bytes File data """ # get the resource using relative names with open(os.path.join(_pwd, name), 'rb') as f: resource = f.read() # make sure we return it as a string if asked if decode and hasattr(resource, 'decode'): return resource.decode('utf-8') return resource
import os # find the current absolute path to this directory _pwd = os.path.dirname(__file__) def get_resource(name, decode=True): """ Get a resource from the trimesh/resources folder. Parameters ------------- name : str File path relative to `trimesh/resources` decode : bool Whether or not to decode result as UTF-8 Returns ------------- resource : str or bytes File data """ # get the resource using relative names with open(os.path.join(_pwd, name), 'rb') as f: resource = f.read() # make sure we return it as a string if asked if decode and hasattr(resource, 'decode'): return resource.decode('utf-8') return resource
Use __file__ instead of inspect, for compatibility with frozen environments
RF: Use __file__ instead of inspect, for compatibility with frozen environments
Python
mit
mikedh/trimesh,mikedh/trimesh,dajusc/trimesh,mikedh/trimesh,mikedh/trimesh,dajusc/trimesh
python
## Code Before: import os import inspect # find the current absolute path using inspect _pwd = os.path.dirname( os.path.abspath( inspect.getfile( inspect.currentframe()))) def get_resource(name, decode=True): """ Get a resource from the trimesh/resources folder. Parameters ------------- name : str File path relative to `trimesh/resources` decode : bool Whether or not to decode result as UTF-8 Returns ------------- resource : str or bytes File data """ # get the resource using relative names with open(os.path.join(_pwd, name), 'rb') as f: resource = f.read() # make sure we return it as a string if asked if decode and hasattr(resource, 'decode'): return resource.decode('utf-8') return resource ## Instruction: RF: Use __file__ instead of inspect, for compatibility with frozen environments ## Code After: import os # find the current absolute path to this directory _pwd = os.path.dirname(__file__) def get_resource(name, decode=True): """ Get a resource from the trimesh/resources folder. Parameters ------------- name : str File path relative to `trimesh/resources` decode : bool Whether or not to decode result as UTF-8 Returns ------------- resource : str or bytes File data """ # get the resource using relative names with open(os.path.join(_pwd, name), 'rb') as f: resource = f.read() # make sure we return it as a string if asked if decode and hasattr(resource, 'decode'): return resource.decode('utf-8') return resource
# ... existing code ... import os # find the current absolute path to this directory _pwd = os.path.dirname(__file__) def get_resource(name, decode=True): # ... rest of the code ...
9c762d01b6dafd48d227c0ef927b844a257ff1b9
joommf/energies/test_demag.py
joommf/energies/test_demag.py
from demag import Demag def test_demag_mif(): demag = Demag() mif_string = demag.get_mif() assert 'Specify Oxs_Demag {}' in mif_string def test_demag_formatting(): demag = Demag() mif_string = demag.get_mif() assert mif_string[0] == 'S' assert mif_string[-1] == '\n' assert mif_string[-2] == '\n'
from demag import Demag def test_demag_mif(): demag = Demag() mif_string = demag.get_mif() assert 'Specify Oxs_Demag {}' in mif_string assert demag.__repr__() == "This is the energy class of type Demag" def test_demag_formatting(): demag = Demag() mif_string = demag.get_mif() assert mif_string[0] == 'S' assert mif_string[-1] == '\n' assert mif_string[-2] == '\n'
Increase test coverage for energy classes
Increase test coverage for energy classes
Python
bsd-2-clause
ryanpepper/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python
python
## Code Before: from demag import Demag def test_demag_mif(): demag = Demag() mif_string = demag.get_mif() assert 'Specify Oxs_Demag {}' in mif_string def test_demag_formatting(): demag = Demag() mif_string = demag.get_mif() assert mif_string[0] == 'S' assert mif_string[-1] == '\n' assert mif_string[-2] == '\n' ## Instruction: Increase test coverage for energy classes ## Code After: from demag import Demag def test_demag_mif(): demag = Demag() mif_string = demag.get_mif() assert 'Specify Oxs_Demag {}' in mif_string assert demag.__repr__() == "This is the energy class of type Demag" def test_demag_formatting(): demag = Demag() mif_string = demag.get_mif() assert mif_string[0] == 'S' assert mif_string[-1] == '\n' assert mif_string[-2] == '\n'
// ... existing code ... demag = Demag() mif_string = demag.get_mif() assert 'Specify Oxs_Demag {}' in mif_string assert demag.__repr__() == "This is the energy class of type Demag" def test_demag_formatting(): demag = Demag() // ... rest of the code ...
cea891a2de493ce211ccf13f4bf0c487f945985d
test_audio_files.py
test_audio_files.py
import fore.apikeys import fore.mixer import fore.database import pyechonest.track for file in fore.database.get_many_mp3(status='all'): print("Name: {} Length: {}".format(file.filename, file.track_details['length'])) track.track_from_filename('audio/'+file.filename, force_upload=True)
import fore.apikeys import fore.mixer import fore.database import pyechonest.track for file in fore.database.get_many_mp3(status='all'): print("Name: {} Length: {}".format(file.filename, file.track_details['length'])) track = track.track_from_filename('audio/'+file.filename, force_upload=True) print(track.id)
Print out the new track id.
Print out the new track id.
Python
artistic-2.0
Rosuav/appension,MikeiLL/appension,MikeiLL/appension,MikeiLL/appension,Rosuav/appension,MikeiLL/appension,Rosuav/appension,Rosuav/appension
python
## Code Before: import fore.apikeys import fore.mixer import fore.database import pyechonest.track for file in fore.database.get_many_mp3(status='all'): print("Name: {} Length: {}".format(file.filename, file.track_details['length'])) track.track_from_filename('audio/'+file.filename, force_upload=True) ## Instruction: Print out the new track id. ## Code After: import fore.apikeys import fore.mixer import fore.database import pyechonest.track for file in fore.database.get_many_mp3(status='all'): print("Name: {} Length: {}".format(file.filename, file.track_details['length'])) track = track.track_from_filename('audio/'+file.filename, force_upload=True) print(track.id)
// ... existing code ... for file in fore.database.get_many_mp3(status='all'): print("Name: {} Length: {}".format(file.filename, file.track_details['length'])) track = track.track_from_filename('audio/'+file.filename, force_upload=True) print(track.id) // ... rest of the code ...
9d9704f631156e01d55d1d1217a41ab3704bdc03
tests/unit/test_context.py
tests/unit/test_context.py
import testtools from openstack.common import context class ContextTest(testtools.TestCase): def test_context(self): ctx = context.RequestContext() self.assertTrue(ctx)
from openstack.common import context from tests import utils class ContextTest(utils.BaseTestCase): def test_context(self): ctx = context.RequestContext() self.assertTrue(ctx)
Replace direct use of testtools BaseTestCase.
Replace direct use of testtools BaseTestCase. Using the BaseTestCase across the tests in the tree lets us put in log fixtures and consistently handle mox and stubout. Part of blueprint grizzly-testtools. Change-Id: Iba7eb2c63b0c514009b2c28e5930b27726a147b0
Python
apache-2.0
dims/oslo.context,JioCloud/oslo.context,citrix-openstack-build/oslo.context,varunarya10/oslo.context,openstack/oslo.context,yanheven/oslo.middleware
python
## Code Before: import testtools from openstack.common import context class ContextTest(testtools.TestCase): def test_context(self): ctx = context.RequestContext() self.assertTrue(ctx) ## Instruction: Replace direct use of testtools BaseTestCase. Using the BaseTestCase across the tests in the tree lets us put in log fixtures and consistently handle mox and stubout. Part of blueprint grizzly-testtools. Change-Id: Iba7eb2c63b0c514009b2c28e5930b27726a147b0 ## Code After: from openstack.common import context from tests import utils class ContextTest(utils.BaseTestCase): def test_context(self): ctx = context.RequestContext() self.assertTrue(ctx)
// ... existing code ... from openstack.common import context from tests import utils class ContextTest(utils.BaseTestCase): def test_context(self): ctx = context.RequestContext() // ... rest of the code ...
da510e3156b1a92bc9139263f9e27e793dd6316c
importlib_metadata/abc.py
importlib_metadata/abc.py
from __future__ import absolute_import import abc import sys if sys.version_info >= (3,): # pragma: nocover from importlib.abc import MetaPathFinder else: # pragma: nocover from abc import ABCMeta as MetaPathFinder class DistributionFinder(MetaPathFinder): """ A MetaPathFinder capable of discovering installed distributions. """ @abc.abstractmethod def find_distributions(self, name=None, path=None): """ Return an iterable of all Distribution instances capable of loading the metadata for packages matching the name (or all names if not supplied) along the paths in the list of directories ``path`` (defaults to sys.path). """
from __future__ import absolute_import import abc import sys if sys.version_info >= (3,): # pragma: nocover from importlib.abc import MetaPathFinder else: # pragma: nocover class MetaPathFinder(object): __metaclass__ = abc.ABCMeta class DistributionFinder(MetaPathFinder): """ A MetaPathFinder capable of discovering installed distributions. """ @abc.abstractmethod def find_distributions(self, name=None, path=None): """ Return an iterable of all Distribution instances capable of loading the metadata for packages matching the name (or all names if not supplied) along the paths in the list of directories ``path`` (defaults to sys.path). """
Fix MetaPathFinder compatibility stub on Python 2.7
Fix MetaPathFinder compatibility stub on Python 2.7
Python
apache-2.0
python/importlib_metadata
python
## Code Before: from __future__ import absolute_import import abc import sys if sys.version_info >= (3,): # pragma: nocover from importlib.abc import MetaPathFinder else: # pragma: nocover from abc import ABCMeta as MetaPathFinder class DistributionFinder(MetaPathFinder): """ A MetaPathFinder capable of discovering installed distributions. """ @abc.abstractmethod def find_distributions(self, name=None, path=None): """ Return an iterable of all Distribution instances capable of loading the metadata for packages matching the name (or all names if not supplied) along the paths in the list of directories ``path`` (defaults to sys.path). """ ## Instruction: Fix MetaPathFinder compatibility stub on Python 2.7 ## Code After: from __future__ import absolute_import import abc import sys if sys.version_info >= (3,): # pragma: nocover from importlib.abc import MetaPathFinder else: # pragma: nocover class MetaPathFinder(object): __metaclass__ = abc.ABCMeta class DistributionFinder(MetaPathFinder): """ A MetaPathFinder capable of discovering installed distributions. """ @abc.abstractmethod def find_distributions(self, name=None, path=None): """ Return an iterable of all Distribution instances capable of loading the metadata for packages matching the name (or all names if not supplied) along the paths in the list of directories ``path`` (defaults to sys.path). """
// ... existing code ... if sys.version_info >= (3,): # pragma: nocover from importlib.abc import MetaPathFinder else: # pragma: nocover class MetaPathFinder(object): __metaclass__ = abc.ABCMeta class DistributionFinder(MetaPathFinder): // ... rest of the code ...
fccf3df85eb79ea7f270e454f5bb9eda162985f9
test_api_project/test_api_project/autocomplete_light_registry.py
test_api_project/test_api_project/autocomplete_light_registry.py
import autocomplete_light from cities_light.contrib.autocomplete_light_restframework import RemoteCountryChannel, RemoteCityChannel from cities_light.models import City, Country class RemoteCountryChannel(RemoteCountryChannel): source_url = 'http://localhost:8000/cities_light/country/' class RemoteCityChannel(RemoteCityChannel): source_url = 'http://localhost:8000/cities_light/city/' autocomplete_light.register(Country, RemoteCountryChannel) autocomplete_light.register(City, RemoteCityChannel)
import autocomplete_light from cities_light.contrib.autocomplete_light_restframework import RemoteCountryChannel, RemoteCityChannel from cities_light.models import City, Country autocomplete_light.register(Country, RemoteCountryChannel, source_url = 'http://localhost:8000/cities_light/country/') autocomplete_light.register(City, RemoteCityChannel, source_url = 'http://localhost:8000/cities_light/city/')
Update example to match current register signature, avoids subclassing
Update example to match current register signature, avoids subclassing
Python
mit
jonashaag/django-autocomplete-light,Visgean/django-autocomplete-light,dsanders11/django-autocomplete-light,Eraldo/django-autocomplete-light,jonashaag/django-autocomplete-light,dsanders11/django-autocomplete-light,luzfcb/django-autocomplete-light,spookylukey/django-autocomplete-light,Eraldo/django-autocomplete-light,blueyed/django-autocomplete-light,Perkville/django-autocomplete-light,jonashaag/django-autocomplete-light,Visgean/django-autocomplete-light,Eraldo/django-autocomplete-light,blueyed/django-autocomplete-light,shubhamdipt/django-autocomplete-light,luzfcb/django-autocomplete-light,dsanders11/django-autocomplete-light,yourlabs/django-autocomplete-light,yourlabs/django-autocomplete-light,shubhamdipt/django-autocomplete-light,Perkville/django-autocomplete-light,luzfcb/django-autocomplete-light,dsanders11/django-autocomplete-light,Visgean/django-autocomplete-light,luzfcb/django-autocomplete-light,yourlabs/django-autocomplete-light,blueyed/django-autocomplete-light,Perkville/django-autocomplete-light,Perkville/django-autocomplete-light,shubhamdipt/django-autocomplete-light,Eraldo/django-autocomplete-light,spookylukey/django-autocomplete-light,Visgean/django-autocomplete-light,spookylukey/django-autocomplete-light,yourlabs/django-autocomplete-light,shubhamdipt/django-autocomplete-light
python
## Code Before: import autocomplete_light from cities_light.contrib.autocomplete_light_restframework import RemoteCountryChannel, RemoteCityChannel from cities_light.models import City, Country class RemoteCountryChannel(RemoteCountryChannel): source_url = 'http://localhost:8000/cities_light/country/' class RemoteCityChannel(RemoteCityChannel): source_url = 'http://localhost:8000/cities_light/city/' autocomplete_light.register(Country, RemoteCountryChannel) autocomplete_light.register(City, RemoteCityChannel) ## Instruction: Update example to match current register signature, avoids subclassing ## Code After: import autocomplete_light from cities_light.contrib.autocomplete_light_restframework import RemoteCountryChannel, RemoteCityChannel from cities_light.models import City, Country autocomplete_light.register(Country, RemoteCountryChannel, source_url = 'http://localhost:8000/cities_light/country/') autocomplete_light.register(City, RemoteCityChannel, source_url = 'http://localhost:8000/cities_light/city/')
// ... existing code ... from cities_light.contrib.autocomplete_light_restframework import RemoteCountryChannel, RemoteCityChannel from cities_light.models import City, Country autocomplete_light.register(Country, RemoteCountryChannel, source_url = 'http://localhost:8000/cities_light/country/') autocomplete_light.register(City, RemoteCityChannel, source_url = 'http://localhost:8000/cities_light/city/') // ... rest of the code ...
34369635a22bf05abbabe47e708a2ed80db258e5
MeetingMinutes.py
MeetingMinutes.py
import sublime, sublime_plugin from .mistune import markdown class CreateMinuteCommand(sublime_plugin.TextCommand): def run(self, edit): region = sublime.Region(0, self.view.size()) md_source = self.view.substr(region) md_source.encode(encoding='UTF-8',errors='strict') html_source = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>' + markdown(md_source) + '</body></html>' print(html_source)
import sublime, sublime_plugin import os import re from subprocess import call from .mistune import markdown class CreateMinuteCommand(sublime_plugin.TextCommand): def run(self, edit): region = sublime.Region(0, self.view.size()) md_source = self.view.substr(region) md_source.encode(encoding='UTF-8',errors='strict') html_source = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>' + markdown(md_source) + '</body></html>' file_name = self.view.file_name() html_file, extension = os.path.splitext(file_name) html_file += ".html" with open(html_file, 'w+') as file_: file_.write(html_source) print(file_name) print(html_file)
Save the created html in a HTML file.
Save the created html in a HTML file.
Python
mit
Txarli/sublimetext-meeting-minutes,Txarli/sublimetext-meeting-minutes
python
## Code Before: import sublime, sublime_plugin from .mistune import markdown class CreateMinuteCommand(sublime_plugin.TextCommand): def run(self, edit): region = sublime.Region(0, self.view.size()) md_source = self.view.substr(region) md_source.encode(encoding='UTF-8',errors='strict') html_source = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>' + markdown(md_source) + '</body></html>' print(html_source) ## Instruction: Save the created html in a HTML file. ## Code After: import sublime, sublime_plugin import os import re from subprocess import call from .mistune import markdown class CreateMinuteCommand(sublime_plugin.TextCommand): def run(self, edit): region = sublime.Region(0, self.view.size()) md_source = self.view.substr(region) md_source.encode(encoding='UTF-8',errors='strict') html_source = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>' + markdown(md_source) + '</body></html>' file_name = self.view.file_name() html_file, extension = os.path.splitext(file_name) html_file += ".html" with open(html_file, 'w+') as file_: file_.write(html_source) print(file_name) print(html_file)
... import sublime, sublime_plugin import os import re from subprocess import call from .mistune import markdown class CreateMinuteCommand(sublime_plugin.TextCommand): def run(self, edit): ... md_source = self.view.substr(region) md_source.encode(encoding='UTF-8',errors='strict') html_source = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>' + markdown(md_source) + '</body></html>' file_name = self.view.file_name() html_file, extension = os.path.splitext(file_name) html_file += ".html" with open(html_file, 'w+') as file_: file_.write(html_source) print(file_name) print(html_file) ...
993c4c98fb9529946669b4d13e6c5a9ff4ab3f67
tests/test_mpi.py
tests/test_mpi.py
from mpi4py import MPI import pytest from devito import Grid, Function, Distributor @pytest.mark.parallel(nprocs=2) def test_hello_mpi(): size = MPI.COMM_WORLD.Get_size() rank = MPI.COMM_WORLD.Get_rank() name = MPI.Get_processor_name() print("Hello, World! I am rank %d of %d on %s" % (rank, size, name), flush=True) @pytest.mark.parallel(nprocs=2) def test_basic_partitioning(): grid = Grid(shape=(10, 10, 10)) # Gonna use a default distributor underneath f = Function(name='f', grid=grid) from IPython import embed; embed()
from mpi4py import MPI import pytest from devito import Grid, Function @pytest.mark.parallel(nprocs=2) def test_hello_mpi(): size = MPI.COMM_WORLD.Get_size() rank = MPI.COMM_WORLD.Get_rank() name = MPI.Get_processor_name() print("Hello, World! I am rank %d of %d on %s" % (rank, size, name), flush=True) @pytest.mark.parallel(nprocs=[2, 4]) def test_basic_partitioning(): grid = Grid(shape=(15, 15)) # Gonna use a default distributor underneath f = Function(name='f', grid=grid) distributor = grid._distributor expected = { # nprocs -> [(rank0 shape), (rank1 shape), ...] 2: [(8, 15), (7, 15)], 4: [(8, 8), (8, 7), (7, 8), (7, 7)] } assert f.shape == expected[distributor.nprocs][distributor.rank]
Check domain decomposition over Functions
tests: Check domain decomposition over Functions
Python
mit
opesci/devito,opesci/devito
python
## Code Before: from mpi4py import MPI import pytest from devito import Grid, Function, Distributor @pytest.mark.parallel(nprocs=2) def test_hello_mpi(): size = MPI.COMM_WORLD.Get_size() rank = MPI.COMM_WORLD.Get_rank() name = MPI.Get_processor_name() print("Hello, World! I am rank %d of %d on %s" % (rank, size, name), flush=True) @pytest.mark.parallel(nprocs=2) def test_basic_partitioning(): grid = Grid(shape=(10, 10, 10)) # Gonna use a default distributor underneath f = Function(name='f', grid=grid) from IPython import embed; embed() ## Instruction: tests: Check domain decomposition over Functions ## Code After: from mpi4py import MPI import pytest from devito import Grid, Function @pytest.mark.parallel(nprocs=2) def test_hello_mpi(): size = MPI.COMM_WORLD.Get_size() rank = MPI.COMM_WORLD.Get_rank() name = MPI.Get_processor_name() print("Hello, World! I am rank %d of %d on %s" % (rank, size, name), flush=True) @pytest.mark.parallel(nprocs=[2, 4]) def test_basic_partitioning(): grid = Grid(shape=(15, 15)) # Gonna use a default distributor underneath f = Function(name='f', grid=grid) distributor = grid._distributor expected = { # nprocs -> [(rank0 shape), (rank1 shape), ...] 2: [(8, 15), (7, 15)], 4: [(8, 8), (8, 7), (7, 8), (7, 7)] } assert f.shape == expected[distributor.nprocs][distributor.rank]
// ... existing code ... import pytest from devito import Grid, Function @pytest.mark.parallel(nprocs=2) // ... modified code ... print("Hello, World! I am rank %d of %d on %s" % (rank, size, name), flush=True) @pytest.mark.parallel(nprocs=[2, 4]) def test_basic_partitioning(): grid = Grid(shape=(15, 15)) # Gonna use a default distributor underneath f = Function(name='f', grid=grid) distributor = grid._distributor expected = { # nprocs -> [(rank0 shape), (rank1 shape), ...] 2: [(8, 15), (7, 15)], 4: [(8, 8), (8, 7), (7, 8), (7, 7)] } assert f.shape == expected[distributor.nprocs][distributor.rank] // ... rest of the code ...
fc94d60066692e6e8dc496bb854039bb66af3311
scout.py
scout.py
class Problem: def getStartState(self): return None def getEndState(self): return None def isValidState(self, state): return False def getSuccessors(self, state): return [] def getStringRepr(self, state): return "BadProblem" def search(problem): print "Searching..." if (__name__ == '__main__'): problem = Problem(); search(problem)
class Problem: def getStartState(self): return None def getEndState(self): return None def isValidState(self, state): return False def getSuccessors(self, state): return [] def getStringRepr(self, state): return "BadProblem" class SquareProblem(Problem): def __init__(self, size): self.size = size def getStartState(self): return (0, 0) def getEndState(self): return (self.size, self.size) def isValidState(self, state): return 0 <= state[0] <= self.size and 0 <= state[1] <= self.size def getSuccessors(self, state): return [(state[0]+dx, state[1]+dy) for (dx, dy) in [(1, 0), (0, 1), (-1, 0), (0, -1)]] def getStringRepr(self, state): return "(%d, %d)" % state def search(problem): print "Searching..." if (__name__ == '__main__'): problem = SquareProblem(2); search(problem)
Add a simple problem for testing
Add a simple problem for testing
Python
mit
SpexGuy/Scout
python
## Code Before: class Problem: def getStartState(self): return None def getEndState(self): return None def isValidState(self, state): return False def getSuccessors(self, state): return [] def getStringRepr(self, state): return "BadProblem" def search(problem): print "Searching..." if (__name__ == '__main__'): problem = Problem(); search(problem) ## Instruction: Add a simple problem for testing ## Code After: class Problem: def getStartState(self): return None def getEndState(self): return None def isValidState(self, state): return False def getSuccessors(self, state): return [] def getStringRepr(self, state): return "BadProblem" class SquareProblem(Problem): def __init__(self, size): self.size = size def getStartState(self): return (0, 0) def getEndState(self): return (self.size, self.size) def isValidState(self, state): return 0 <= state[0] <= self.size and 0 <= state[1] <= self.size def getSuccessors(self, state): return [(state[0]+dx, state[1]+dy) for (dx, dy) in [(1, 0), (0, 1), (-1, 0), (0, -1)]] def getStringRepr(self, state): return "(%d, %d)" % state def search(problem): print "Searching..." if (__name__ == '__main__'): problem = SquareProblem(2); search(problem)
... return "BadProblem" class SquareProblem(Problem): def __init__(self, size): self.size = size def getStartState(self): return (0, 0) def getEndState(self): return (self.size, self.size) def isValidState(self, state): return 0 <= state[0] <= self.size and 0 <= state[1] <= self.size def getSuccessors(self, state): return [(state[0]+dx, state[1]+dy) for (dx, dy) in [(1, 0), (0, 1), (-1, 0), (0, -1)]] def getStringRepr(self, state): return "(%d, %d)" % state def search(problem): print "Searching..." ... if (__name__ == '__main__'): problem = SquareProblem(2); search(problem) ...
cdc43f6f6ee2d040675f10028af6372b0bf42a08
msmbuilder/tests/__init__.py
msmbuilder/tests/__init__.py
import sys import warnings from warnings import warn as orig_warn def my_warn(message, category=None, stacklevel=1): # taken from warnings module # Get context information try: caller = sys._getframe(stacklevel) except ValueError: globals = sys.__dict__ lineno = 1 else: globals = caller.f_globals lineno = caller.f_lineno module = globals['__name__'] filename = globals.get('__file__') m = { 'argspec': 'inspect.getargspec() is deprecated' } if module == 'scipy._lib.decorator' and m['argspec'] in message: return if module == 'mdtraj.formats.hdf5' and m['argspec'] in message: return if module == 'statsmodels.base.wrapper' and m['argspec'] in message: return if module == 'nose.util' and m['argspec'] in message: return print("Warning: module: ", module) print("Warning: message: ", message) return orig_warn(message=message, category=category, stacklevel=stacklevel + 1) warnings.warn = my_warn
import sys import warnings from warnings import warn as orig_warn def my_warn(message, category=None, stacklevel=1): # taken from warnings module # Get context information try: caller = sys._getframe(stacklevel) except ValueError: globals = sys.__dict__ lineno = 1 else: globals = caller.f_globals lineno = caller.f_lineno module = globals['__name__'] filename = globals.get('__file__') m = { 'argspec': 'inspect.getargspec() is deprecated' } if module == 'scipy._lib.decorator' and m['argspec'] in message: return if module == 'mdtraj.formats.hdf5' and m['argspec'] in message: return if module == 'statsmodels.base.wrapper' and m['argspec'] in message: return if module == 'nose.util' and m['argspec'] in message: return print("Warning: module: ", module) print("Warning: message: ", message) # This explicit check is necessary for python < 3.5 maybe?? if category is None: category = UserWarning return orig_warn(message=message, category=category, stacklevel=stacklevel + 1) warnings.warn = my_warn
Fix for my nefarious `warn` replacement
Fix for my nefarious `warn` replacement
Python
lgpl-2.1
dr-nate/msmbuilder,brookehus/msmbuilder,cxhernandez/msmbuilder,msmbuilder/msmbuilder,dr-nate/msmbuilder,rafwiewiora/msmbuilder,Eigenstate/msmbuilder,rafwiewiora/msmbuilder,rafwiewiora/msmbuilder,msultan/msmbuilder,cxhernandez/msmbuilder,dr-nate/msmbuilder,msultan/msmbuilder,msmbuilder/msmbuilder,stephenliu1989/msmbuilder,msultan/msmbuilder,peastman/msmbuilder,brookehus/msmbuilder,peastman/msmbuilder,mpharrigan/mixtape,mpharrigan/mixtape,msmbuilder/msmbuilder,msmbuilder/msmbuilder,Eigenstate/msmbuilder,cxhernandez/msmbuilder,rafwiewiora/msmbuilder,mpharrigan/mixtape,stephenliu1989/msmbuilder,msultan/msmbuilder,peastman/msmbuilder,brookehus/msmbuilder,stephenliu1989/msmbuilder,brookehus/msmbuilder,mpharrigan/mixtape,dr-nate/msmbuilder,cxhernandez/msmbuilder,mpharrigan/mixtape,peastman/msmbuilder,stephenliu1989/msmbuilder,Eigenstate/msmbuilder,msultan/msmbuilder,Eigenstate/msmbuilder,cxhernandez/msmbuilder,brookehus/msmbuilder,peastman/msmbuilder,rafwiewiora/msmbuilder,dr-nate/msmbuilder,msmbuilder/msmbuilder,Eigenstate/msmbuilder
python
## Code Before: import sys import warnings from warnings import warn as orig_warn def my_warn(message, category=None, stacklevel=1): # taken from warnings module # Get context information try: caller = sys._getframe(stacklevel) except ValueError: globals = sys.__dict__ lineno = 1 else: globals = caller.f_globals lineno = caller.f_lineno module = globals['__name__'] filename = globals.get('__file__') m = { 'argspec': 'inspect.getargspec() is deprecated' } if module == 'scipy._lib.decorator' and m['argspec'] in message: return if module == 'mdtraj.formats.hdf5' and m['argspec'] in message: return if module == 'statsmodels.base.wrapper' and m['argspec'] in message: return if module == 'nose.util' and m['argspec'] in message: return print("Warning: module: ", module) print("Warning: message: ", message) return orig_warn(message=message, category=category, stacklevel=stacklevel + 1) warnings.warn = my_warn ## Instruction: Fix for my nefarious `warn` replacement ## Code After: import sys import warnings from warnings import warn as orig_warn def my_warn(message, category=None, stacklevel=1): # taken from warnings module # Get context information try: caller = sys._getframe(stacklevel) except ValueError: globals = sys.__dict__ lineno = 1 else: globals = caller.f_globals lineno = caller.f_lineno module = globals['__name__'] filename = globals.get('__file__') m = { 'argspec': 'inspect.getargspec() is deprecated' } if module == 'scipy._lib.decorator' and m['argspec'] in message: return if module == 'mdtraj.formats.hdf5' and m['argspec'] in message: return if module == 'statsmodels.base.wrapper' and m['argspec'] in message: return if module == 'nose.util' and m['argspec'] in message: return print("Warning: module: ", module) print("Warning: message: ", message) # This explicit check is necessary for python < 3.5 maybe?? if category is None: category = UserWarning return orig_warn(message=message, category=category, stacklevel=stacklevel + 1) warnings.warn = my_warn
// ... existing code ... print("Warning: module: ", module) print("Warning: message: ", message) # This explicit check is necessary for python < 3.5 maybe?? if category is None: category = UserWarning return orig_warn(message=message, category=category, stacklevel=stacklevel + 1) // ... rest of the code ...
35d80ac6af0a546f138f6db31511e9dade7aae8e
feder/es_search/queries.py
feder/es_search/queries.py
from elasticsearch_dsl import Search, Index from elasticsearch_dsl.query import MultiMatch, Match, Q, MoreLikeThis from elasticsearch_dsl.connections import get_connection, connections from .documents import LetterDocument def serialize_document(doc): return { "_id": doc.__dict__["meta"]["id"], "_index": doc.__dict__["meta"]["index"], } def search_keywords(query): q = MultiMatch(query=query, fields=["title", "body", "content"]) return LetterDocument.search().query(q).execute() def more_like_this(doc): like = serialize_document(doc) q = MoreLikeThis(like=like, fields=["title", "body"],) query = LetterDocument.search().query(q) print(query.to_dict()) x = query.execute() print(x) return x
from elasticsearch_dsl import Search, Index from elasticsearch_dsl.query import MultiMatch, Match, Q, MoreLikeThis from elasticsearch_dsl.connections import get_connection, connections from .documents import LetterDocument def serialize_document(doc): return { "_id": doc.__dict__["meta"]["id"], "_index": doc.__dict__["meta"]["index"], } def search_keywords(query): q = MultiMatch(query=query, fields=["title", "body", "content"]) return LetterDocument.search().query(q).execute() def more_like_this(doc): like = serialize_document(doc) q = MoreLikeThis(like=like, fields=["title", "body"],) query = LetterDocument.search().query(q) # print(query.to_dict()) return query.execute()
Reduce debug logging in more_like_this
Reduce debug logging in more_like_this
Python
mit
watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder
python
## Code Before: from elasticsearch_dsl import Search, Index from elasticsearch_dsl.query import MultiMatch, Match, Q, MoreLikeThis from elasticsearch_dsl.connections import get_connection, connections from .documents import LetterDocument def serialize_document(doc): return { "_id": doc.__dict__["meta"]["id"], "_index": doc.__dict__["meta"]["index"], } def search_keywords(query): q = MultiMatch(query=query, fields=["title", "body", "content"]) return LetterDocument.search().query(q).execute() def more_like_this(doc): like = serialize_document(doc) q = MoreLikeThis(like=like, fields=["title", "body"],) query = LetterDocument.search().query(q) print(query.to_dict()) x = query.execute() print(x) return x ## Instruction: Reduce debug logging in more_like_this ## Code After: from elasticsearch_dsl import Search, Index from elasticsearch_dsl.query import MultiMatch, Match, Q, MoreLikeThis from elasticsearch_dsl.connections import get_connection, connections from .documents import LetterDocument def serialize_document(doc): return { "_id": doc.__dict__["meta"]["id"], "_index": doc.__dict__["meta"]["index"], } def search_keywords(query): q = MultiMatch(query=query, fields=["title", "body", "content"]) return LetterDocument.search().query(q).execute() def more_like_this(doc): like = serialize_document(doc) q = MoreLikeThis(like=like, fields=["title", "body"],) query = LetterDocument.search().query(q) # print(query.to_dict()) return query.execute()
... like = serialize_document(doc) q = MoreLikeThis(like=like, fields=["title", "body"],) query = LetterDocument.search().query(q) # print(query.to_dict()) return query.execute() ...
b2bc77023ed3e19f6f7483645e2a11952c061de0
tests/registryd/test_registry_startup.py
tests/registryd/test_registry_startup.py
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties' ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible' def test_accessible_iface_properties(registry, session_manager): val = registry.Get(ACCESSIBLE_IFACE, 'Name', dbus_interface=PROPERTIES_IFACE) assert str(val) == 'main'
PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties' ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible' def get_property(proxy, iface_name, prop_name): return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE) def test_accessible_iface_properties(registry, session_manager): values = [ ('Name', 'main'), ('Description', ''), ] for prop_name, expected in values: assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected
Test the Description property of the registry's root
Test the Description property of the registry's root
Python
lgpl-2.1
GNOME/at-spi2-core,GNOME/at-spi2-core,GNOME/at-spi2-core
python
## Code Before: PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties' ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible' def test_accessible_iface_properties(registry, session_manager): val = registry.Get(ACCESSIBLE_IFACE, 'Name', dbus_interface=PROPERTIES_IFACE) assert str(val) == 'main' ## Instruction: Test the Description property of the registry's root ## Code After: PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties' ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible' def get_property(proxy, iface_name, prop_name): return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE) def test_accessible_iface_properties(registry, session_manager): values = [ ('Name', 'main'), ('Description', ''), ] for prop_name, expected in values: assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected
// ... existing code ... PROPERTIES_IFACE = 'org.freedesktop.DBus.Properties' ACCESSIBLE_IFACE = 'org.a11y.atspi.Accessible' def get_property(proxy, iface_name, prop_name): return proxy.Get(iface_name, prop_name, dbus_interface=PROPERTIES_IFACE) def test_accessible_iface_properties(registry, session_manager): values = [ ('Name', 'main'), ('Description', ''), ] for prop_name, expected in values: assert get_property(registry, ACCESSIBLE_IFACE, prop_name) == expected // ... rest of the code ...
e9fc291faca8af35398b958d046e951aa8471cbf
apps/core/tests/test_factories.py
apps/core/tests/test_factories.py
from .. import factories from . import CoreFixturesTestCase class AnalysisFactoryTestCase(CoreFixturesTestCase): def test_new_factory_with_Experiments(self): experiments = factories.ExperimentFactory.create_batch(3) # build analysis = factories.AnalysisFactory.build(experiments=experiments) self.assertEqual(analysis.experiments.count(), 0) # create analysis = factories.AnalysisFactory(experiments=experiments) experiments_ids = list( analysis.experiments.values_list('id', flat=True) ) expected_experiments_ids = [e.id for e in experiments] self.assertEqual(experiments_ids, expected_experiments_ids)
from .. import factories, models from . import CoreFixturesTestCase class AnalysisFactoryTestCase(CoreFixturesTestCase): def test_new_factory_with_Experiments(self): experiments = factories.ExperimentFactory.create_batch(3) # build analysis = factories.AnalysisFactory.build(experiments=experiments) self.assertEqual(analysis.experiments.count(), 0) # create analysis = factories.AnalysisFactory(experiments=experiments) experiments_ids = analysis.experiments.values_list( 'id', flat=True ) expected_experiments_ids = models.Experiment.objects.values_list( 'id', flat=True ) self.assertEqual( list(experiments_ids), list(expected_experiments_ids) )
Fix broken test since models new default ordering
Fix broken test since models new default ordering
Python
bsd-3-clause
Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel
python
## Code Before: from .. import factories from . import CoreFixturesTestCase class AnalysisFactoryTestCase(CoreFixturesTestCase): def test_new_factory_with_Experiments(self): experiments = factories.ExperimentFactory.create_batch(3) # build analysis = factories.AnalysisFactory.build(experiments=experiments) self.assertEqual(analysis.experiments.count(), 0) # create analysis = factories.AnalysisFactory(experiments=experiments) experiments_ids = list( analysis.experiments.values_list('id', flat=True) ) expected_experiments_ids = [e.id for e in experiments] self.assertEqual(experiments_ids, expected_experiments_ids) ## Instruction: Fix broken test since models new default ordering ## Code After: from .. import factories, models from . import CoreFixturesTestCase class AnalysisFactoryTestCase(CoreFixturesTestCase): def test_new_factory_with_Experiments(self): experiments = factories.ExperimentFactory.create_batch(3) # build analysis = factories.AnalysisFactory.build(experiments=experiments) self.assertEqual(analysis.experiments.count(), 0) # create analysis = factories.AnalysisFactory(experiments=experiments) experiments_ids = analysis.experiments.values_list( 'id', flat=True ) expected_experiments_ids = models.Experiment.objects.values_list( 'id', flat=True ) self.assertEqual( list(experiments_ids), list(expected_experiments_ids) )
... from .. import factories, models from . import CoreFixturesTestCase ... # create analysis = factories.AnalysisFactory(experiments=experiments) experiments_ids = analysis.experiments.values_list( 'id', flat=True ) expected_experiments_ids = models.Experiment.objects.values_list( 'id', flat=True ) self.assertEqual( list(experiments_ids), list(expected_experiments_ids) ) ...
d21d090df0fe1d1daed089670b4df90c9aa9c126
nsu-connect/src/main/java/ru/tulupov/nsuconnect/database/loader/ChatLoader.java
nsu-connect/src/main/java/ru/tulupov/nsuconnect/database/loader/ChatLoader.java
package ru.tulupov.nsuconnect.database.loader; import android.content.Context; import android.support.v4.content.AsyncTaskLoader; import android.util.Log; import com.j256.ormlite.stmt.PreparedQuery; import com.j256.ormlite.stmt.QueryBuilder; import java.sql.SQLException; import java.util.List; import ru.tulupov.nsuconnect.database.DatabaseContract; import ru.tulupov.nsuconnect.database.HelperFactory; import ru.tulupov.nsuconnect.model.Chat; import ru.tulupov.nsuconnect.model.Message; public class ChatLoader extends AsyncTaskLoader<List<Chat>> { private static final String TAG = ChatLoader.class.getSimpleName(); public ChatLoader(Context context) { super(context); } @Override public List<Chat> loadInBackground() { try { QueryBuilder<Chat, Integer> queryBuilder = HelperFactory.getHelper().getChatDao().queryBuilder(); queryBuilder.orderBy(DatabaseContract.Chat.DATE, false); PreparedQuery<Chat> preparedQuery = queryBuilder.prepare(); List<Chat> chats = HelperFactory.getHelper().getChatDao().query(preparedQuery); return chats; } catch (SQLException e) { Log.e(TAG, "Error", e); } return null; } }
package ru.tulupov.nsuconnect.database.loader; import android.content.Context; import android.support.v4.content.AsyncTaskLoader; import android.util.Log; import com.j256.ormlite.stmt.PreparedQuery; import com.j256.ormlite.stmt.QueryBuilder; import java.sql.SQLException; import java.util.List; import ru.tulupov.nsuconnect.database.DatabaseContract; import ru.tulupov.nsuconnect.database.HelperFactory; import ru.tulupov.nsuconnect.model.Chat; import ru.tulupov.nsuconnect.model.Message; public class ChatLoader extends AsyncTaskLoader<List<Chat>> { private static final String TAG = ChatLoader.class.getSimpleName(); public ChatLoader(Context context) { super(context); } @Override public List<Chat> loadInBackground() { try { QueryBuilder<Chat, Integer> queryBuilder = HelperFactory.getHelper().getChatDao().queryBuilder(); queryBuilder.orderBy(DatabaseContract.Chat.DATE, false); queryBuilder.orderBy(DatabaseContract.Chat.ACTIVE_FLAG, false); PreparedQuery<Chat> preparedQuery = queryBuilder.prepare(); List<Chat> chats = HelperFactory.getHelper().getChatDao().query(preparedQuery); return chats; } catch (SQLException e) { Log.e(TAG, "Error", e); } return null; } }
Change sort order in chat list
Change sort order in chat list
Java
apache-2.0
etulupov/nsu-connect-android,etulupov/nsu-connect-android
java
## Code Before: package ru.tulupov.nsuconnect.database.loader; import android.content.Context; import android.support.v4.content.AsyncTaskLoader; import android.util.Log; import com.j256.ormlite.stmt.PreparedQuery; import com.j256.ormlite.stmt.QueryBuilder; import java.sql.SQLException; import java.util.List; import ru.tulupov.nsuconnect.database.DatabaseContract; import ru.tulupov.nsuconnect.database.HelperFactory; import ru.tulupov.nsuconnect.model.Chat; import ru.tulupov.nsuconnect.model.Message; public class ChatLoader extends AsyncTaskLoader<List<Chat>> { private static final String TAG = ChatLoader.class.getSimpleName(); public ChatLoader(Context context) { super(context); } @Override public List<Chat> loadInBackground() { try { QueryBuilder<Chat, Integer> queryBuilder = HelperFactory.getHelper().getChatDao().queryBuilder(); queryBuilder.orderBy(DatabaseContract.Chat.DATE, false); PreparedQuery<Chat> preparedQuery = queryBuilder.prepare(); List<Chat> chats = HelperFactory.getHelper().getChatDao().query(preparedQuery); return chats; } catch (SQLException e) { Log.e(TAG, "Error", e); } return null; } } ## Instruction: Change sort order in chat list ## Code After: package ru.tulupov.nsuconnect.database.loader; import android.content.Context; import android.support.v4.content.AsyncTaskLoader; import android.util.Log; import com.j256.ormlite.stmt.PreparedQuery; import com.j256.ormlite.stmt.QueryBuilder; import java.sql.SQLException; import java.util.List; import ru.tulupov.nsuconnect.database.DatabaseContract; import ru.tulupov.nsuconnect.database.HelperFactory; import ru.tulupov.nsuconnect.model.Chat; import ru.tulupov.nsuconnect.model.Message; public class ChatLoader extends AsyncTaskLoader<List<Chat>> { private static final String TAG = ChatLoader.class.getSimpleName(); public ChatLoader(Context context) { super(context); } @Override public List<Chat> loadInBackground() { try { QueryBuilder<Chat, Integer> queryBuilder = HelperFactory.getHelper().getChatDao().queryBuilder(); queryBuilder.orderBy(DatabaseContract.Chat.DATE, false); queryBuilder.orderBy(DatabaseContract.Chat.ACTIVE_FLAG, false); PreparedQuery<Chat> preparedQuery = queryBuilder.prepare(); List<Chat> chats = HelperFactory.getHelper().getChatDao().query(preparedQuery); return chats; } catch (SQLException e) { Log.e(TAG, "Error", e); } return null; } }
# ... existing code ... try { QueryBuilder<Chat, Integer> queryBuilder = HelperFactory.getHelper().getChatDao().queryBuilder(); queryBuilder.orderBy(DatabaseContract.Chat.DATE, false); queryBuilder.orderBy(DatabaseContract.Chat.ACTIVE_FLAG, false); PreparedQuery<Chat> preparedQuery = queryBuilder.prepare(); List<Chat> chats = HelperFactory.getHelper().getChatDao().query(preparedQuery); # ... rest of the code ...
922db591ca726acae07e2628119b95aa705f414c
leetcode/ds_string_word_pattern.py
leetcode/ds_string_word_pattern.py
''' Given a pattern and a string str, find if str follows the same pattern. Here follow means a full match, such that there is a bijection between a letter in pattern and a non-empty word in str. Examples: pattern = "abba", str = "dog cat cat dog" should return true. pattern = "abba", str = "dog cat cat fish" should return false. pattern = "aaaa", str = "dog cat cat dog" should return false. pattern = "abba", str = "dog dog dog dog" should return false. Notes: You may assume pattern contains only lowercase letters, and str contains lowercase letters separated by a single space. '''
''' Given a pattern and a string str, find if str follows the same pattern. Here follow means a full match, such that there is a bijection between a letter in pattern and a non-empty word in str. Examples: pattern = "abba", str = "dog cat cat dog" should return true. pattern = "abba", str = "dog cat cat fish" should return false. pattern = "aaaa", str = "dog cat cat dog" should return false. pattern = "abba", str = "dog dog dog dog" should return false. Notes: You may assume pattern contains only lowercase letters, and str contains lowercase letters separated by a single space. ''' # Approach 1: def wordPattern(self, pattern, str): clist = pattern #treat string as a list of chars wlist = str.split() #split string into a list of words # map(function, sequence): map applies the given function to every element in the sequence and returns a list # index - finds the index of the first occurence of every element in both list and string return map(clist.index, clist) == map(wlist.index, wlist) # Approach 2: def wordPattern(self, pattern, str): clist = pattern wlist = str.split() # zip returns a tuple, cpupling the ith elements from both lists return len(clist) == len(wlist) and len(set(clist)) == len(set(wlist)) == len(set(zip(clist, wlist))) # "abba", "dog cat cat dog", True. # "abba", "dog cat cat fish" False. # "aaaa", "dog cat cat dog" False. # "abba", "dog dog dog dog" False.
Add two approaches for string word pattern
Add two approaches for string word pattern
Python
mit
ngovindaraj/Python
python
## Code Before: ''' Given a pattern and a string str, find if str follows the same pattern. Here follow means a full match, such that there is a bijection between a letter in pattern and a non-empty word in str. Examples: pattern = "abba", str = "dog cat cat dog" should return true. pattern = "abba", str = "dog cat cat fish" should return false. pattern = "aaaa", str = "dog cat cat dog" should return false. pattern = "abba", str = "dog dog dog dog" should return false. Notes: You may assume pattern contains only lowercase letters, and str contains lowercase letters separated by a single space. ''' ## Instruction: Add two approaches for string word pattern ## Code After: ''' Given a pattern and a string str, find if str follows the same pattern. Here follow means a full match, such that there is a bijection between a letter in pattern and a non-empty word in str. Examples: pattern = "abba", str = "dog cat cat dog" should return true. pattern = "abba", str = "dog cat cat fish" should return false. pattern = "aaaa", str = "dog cat cat dog" should return false. pattern = "abba", str = "dog dog dog dog" should return false. Notes: You may assume pattern contains only lowercase letters, and str contains lowercase letters separated by a single space. ''' # Approach 1: def wordPattern(self, pattern, str): clist = pattern #treat string as a list of chars wlist = str.split() #split string into a list of words # map(function, sequence): map applies the given function to every element in the sequence and returns a list # index - finds the index of the first occurence of every element in both list and string return map(clist.index, clist) == map(wlist.index, wlist) # Approach 2: def wordPattern(self, pattern, str): clist = pattern wlist = str.split() # zip returns a tuple, cpupling the ith elements from both lists return len(clist) == len(wlist) and len(set(clist)) == len(set(wlist)) == len(set(zip(clist, wlist))) # "abba", "dog cat cat dog", True. # "abba", "dog cat cat fish" False. # "aaaa", "dog cat cat dog" False. # "abba", "dog dog dog dog" False.
// ... existing code ... lowercase letters separated by a single space. ''' # Approach 1: def wordPattern(self, pattern, str): clist = pattern #treat string as a list of chars wlist = str.split() #split string into a list of words # map(function, sequence): map applies the given function to every element in the sequence and returns a list # index - finds the index of the first occurence of every element in both list and string return map(clist.index, clist) == map(wlist.index, wlist) # Approach 2: def wordPattern(self, pattern, str): clist = pattern wlist = str.split() # zip returns a tuple, cpupling the ith elements from both lists return len(clist) == len(wlist) and len(set(clist)) == len(set(wlist)) == len(set(zip(clist, wlist))) # "abba", "dog cat cat dog", True. # "abba", "dog cat cat fish" False. # "aaaa", "dog cat cat dog" False. # "abba", "dog dog dog dog" False. // ... rest of the code ...
de441445dbdade4d937783626f1beeb9f439ee11
helpers.py
helpers.py
import feedparser import datetime from .models import RssEntry class RssSyncHelper(object): def __init__(self, feed): self.feed = feed def save_entry(self, result): pub_date = result.updated_parsed published = datetime.date(pub_date[0], pub_date[1], pub_date[2]) return RssEntry.objects.get_or_create( title=result.title, feed=self.feed, summary=result.content[0]['value'], link=result.link, date=published, ) def sync(self): feed = feedparser.parse(self.feed.url) for entry in feed.entries: self.save_entry(entry) def sync_wordpress_paginated(self, page): """Sync a Wordpress paginated feed""" feed = feedparser.parse('%s&paged=%d' % (self.feed.url, page)) for entry in feed.entries: self.save_entry(entry)
import feedparser import datetime from .models import RssEntry def add_custom_acceptable_elements(elements): """ Add custom acceptable elements so iframes and other potential video elements will get synched. """ elements += list(feedparser._HTMLSanitizer.acceptable_elements) feedparser._HTMLSanitizer.acceptable_elements = set(elements) custom_acceptable_elements = ['iframe', 'embed', 'object',] add_custom_acceptable_elements(custom_acceptable_elements) class RssSyncHelper(object): def __init__(self, feed): self.feed = feed def save_entry(self, result): pub_date = result.updated_parsed published = datetime.date(pub_date[0], pub_date[1], pub_date[2]) return RssEntry.objects.get_or_create( title=result.title, feed=self.feed, summary=result.content[0]['value'], link=result.link, date=published, ) def sync(self): feed = feedparser.parse(self.feed.url) for entry in feed.entries: self.save_entry(entry) def sync_wordpress_paginated(self, page): """Sync a Wordpress paginated feed""" feed = feedparser.parse('%s&paged=%d' % (self.feed.url, page)) for entry in feed.entries: self.save_entry(entry)
Allow iframes to be synched
Allow iframes to be synched
Python
bsd-3-clause
ebrelsford/django-rsssync
python
## Code Before: import feedparser import datetime from .models import RssEntry class RssSyncHelper(object): def __init__(self, feed): self.feed = feed def save_entry(self, result): pub_date = result.updated_parsed published = datetime.date(pub_date[0], pub_date[1], pub_date[2]) return RssEntry.objects.get_or_create( title=result.title, feed=self.feed, summary=result.content[0]['value'], link=result.link, date=published, ) def sync(self): feed = feedparser.parse(self.feed.url) for entry in feed.entries: self.save_entry(entry) def sync_wordpress_paginated(self, page): """Sync a Wordpress paginated feed""" feed = feedparser.parse('%s&paged=%d' % (self.feed.url, page)) for entry in feed.entries: self.save_entry(entry) ## Instruction: Allow iframes to be synched ## Code After: import feedparser import datetime from .models import RssEntry def add_custom_acceptable_elements(elements): """ Add custom acceptable elements so iframes and other potential video elements will get synched. """ elements += list(feedparser._HTMLSanitizer.acceptable_elements) feedparser._HTMLSanitizer.acceptable_elements = set(elements) custom_acceptable_elements = ['iframe', 'embed', 'object',] add_custom_acceptable_elements(custom_acceptable_elements) class RssSyncHelper(object): def __init__(self, feed): self.feed = feed def save_entry(self, result): pub_date = result.updated_parsed published = datetime.date(pub_date[0], pub_date[1], pub_date[2]) return RssEntry.objects.get_or_create( title=result.title, feed=self.feed, summary=result.content[0]['value'], link=result.link, date=published, ) def sync(self): feed = feedparser.parse(self.feed.url) for entry in feed.entries: self.save_entry(entry) def sync_wordpress_paginated(self, page): """Sync a Wordpress paginated feed""" feed = feedparser.parse('%s&paged=%d' % (self.feed.url, page)) for entry in feed.entries: self.save_entry(entry)
// ... existing code ... import datetime from .models import RssEntry def add_custom_acceptable_elements(elements): """ Add custom acceptable elements so iframes and other potential video elements will get synched. """ elements += list(feedparser._HTMLSanitizer.acceptable_elements) feedparser._HTMLSanitizer.acceptable_elements = set(elements) custom_acceptable_elements = ['iframe', 'embed', 'object',] add_custom_acceptable_elements(custom_acceptable_elements) class RssSyncHelper(object): // ... rest of the code ...
51b1f612ab8058da89cc8aaa6b1db99139c7eda0
versions/settings.py
versions/settings.py
from django.conf import settings from django.utils import importlib def import_from_string(val, setting_name): """ Attempt to import a class from a string representation. Based on the method of the same name in Django Rest Framework. """ try: parts = val.split('.') module_path, class_name = '.'.join(parts[:-1]), parts[-1] module = importlib.import_module(module_path) return getattr(module, class_name) except ImportError as e: raise ImportError("Could not import '{}' for CleanerVersion setting '{}'. {}: {}.".format( (val, setting_name, e.__class__.__name__, e))) _cache = {} def get_versioned_delete_collector_class(): """ Gets the class to use for deletion collection. This is done as a method instead of just defining a module-level variable because Django doesn't like attributes of the django.conf.settings object to be accessed in top-level module scope. :return: class """ key = 'VERSIONED_DELETE_COLLECTOR' try: cls = _cache[key] except KeyError: cls = import_from_string(getattr(settings, key, 'versions.deletion.VersionedCollector'), key) _cache[key] = cls return cls
from django.conf import settings import importlib def import_from_string(val, setting_name): """ Attempt to import a class from a string representation. Based on the method of the same name in Django Rest Framework. """ try: parts = val.split('.') module_path, class_name = '.'.join(parts[:-1]), parts[-1] module = importlib.import_module(module_path) return getattr(module, class_name) except ImportError as e: raise ImportError("Could not import '{}' for CleanerVersion setting '{}'. {}: {}.".format( (val, setting_name, e.__class__.__name__, e))) _cache = {} def get_versioned_delete_collector_class(): """ Gets the class to use for deletion collection. This is done as a method instead of just defining a module-level variable because Django doesn't like attributes of the django.conf.settings object to be accessed in top-level module scope. :return: class """ key = 'VERSIONED_DELETE_COLLECTOR' try: cls = _cache[key] except KeyError: cls = import_from_string(getattr(settings, key, 'versions.deletion.VersionedCollector'), key) _cache[key] = cls return cls
Use python 2.7+ standard importlib instead of deprecated django importlib
Use python 2.7+ standard importlib instead of deprecated django importlib
Python
apache-2.0
swisscom/cleanerversion,anfema/cleanerversion,anfema/cleanerversion,pretix/cleanerversion,pretix/cleanerversion,swisscom/cleanerversion,swisscom/cleanerversion,pretix/cleanerversion,anfema/cleanerversion
python
## Code Before: from django.conf import settings from django.utils import importlib def import_from_string(val, setting_name): """ Attempt to import a class from a string representation. Based on the method of the same name in Django Rest Framework. """ try: parts = val.split('.') module_path, class_name = '.'.join(parts[:-1]), parts[-1] module = importlib.import_module(module_path) return getattr(module, class_name) except ImportError as e: raise ImportError("Could not import '{}' for CleanerVersion setting '{}'. {}: {}.".format( (val, setting_name, e.__class__.__name__, e))) _cache = {} def get_versioned_delete_collector_class(): """ Gets the class to use for deletion collection. This is done as a method instead of just defining a module-level variable because Django doesn't like attributes of the django.conf.settings object to be accessed in top-level module scope. :return: class """ key = 'VERSIONED_DELETE_COLLECTOR' try: cls = _cache[key] except KeyError: cls = import_from_string(getattr(settings, key, 'versions.deletion.VersionedCollector'), key) _cache[key] = cls return cls ## Instruction: Use python 2.7+ standard importlib instead of deprecated django importlib ## Code After: from django.conf import settings import importlib def import_from_string(val, setting_name): """ Attempt to import a class from a string representation. Based on the method of the same name in Django Rest Framework. """ try: parts = val.split('.') module_path, class_name = '.'.join(parts[:-1]), parts[-1] module = importlib.import_module(module_path) return getattr(module, class_name) except ImportError as e: raise ImportError("Could not import '{}' for CleanerVersion setting '{}'. {}: {}.".format( (val, setting_name, e.__class__.__name__, e))) _cache = {} def get_versioned_delete_collector_class(): """ Gets the class to use for deletion collection. This is done as a method instead of just defining a module-level variable because Django doesn't like attributes of the django.conf.settings object to be accessed in top-level module scope. :return: class """ key = 'VERSIONED_DELETE_COLLECTOR' try: cls = _cache[key] except KeyError: cls = import_from_string(getattr(settings, key, 'versions.deletion.VersionedCollector'), key) _cache[key] = cls return cls
// ... existing code ... from django.conf import settings import importlib def import_from_string(val, setting_name): // ... rest of the code ...
a89b6ec1bda46c63c0ff0e0a8bb44eb3eda41c1b
repo_health/gh_issues/serializers/GhIssueStatsSerializer.py
repo_health/gh_issues/serializers/GhIssueStatsSerializer.py
from rest_framework import serializers as s from ..models import GhIssueEvent from repo_health.index.mixins import CountForPastYearMixin class GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin): _label_names = None issues_count = s.SerializerMethodField() issues_closed_last_year = s.SerializerMethodField() issues_opened_last_year = s.SerializerMethodField() merged_count = s.SerializerMethodField() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) repo = args[0] self._label_names = repo.labels.values_list('name', flat=True) def get_issues_count(self, repo): return repo.issues_count def get_issues_closed_last_year(self, repo): return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct()) def get_issues_opened_last_year(self, repo): return self.get_count_list_for_year(repo.issues)
from rest_framework import serializers as s from ..models import GhIssueEvent from repo_health.index.mixins import CountForPastYearMixin class GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin): _label_names = None issues_count = s.SerializerMethodField() issues_closed_last_year = s.SerializerMethodField() issues_opened_last_year = s.SerializerMethodField() merged_count = s.SerializerMethodField() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) repo = args[0] self._label_names = repo.labels.values_list('name', flat=True) def get_issues_count(self, repo): return repo.issues_count def get_issues_closed_last_year(self, repo): return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct()) def get_issues_opened_last_year(self, repo): return self.get_count_list_for_year(repo.issues) def get_merged_count(self, repo): return repo.issues.filter(events__action=GhIssueEvent.MERGED_ACTION).count()
Add get merged count method.
Add get merged count method.
Python
mit
jakeharding/repo-health,jakeharding/repo-health,jakeharding/repo-health,jakeharding/repo-health
python
## Code Before: from rest_framework import serializers as s from ..models import GhIssueEvent from repo_health.index.mixins import CountForPastYearMixin class GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin): _label_names = None issues_count = s.SerializerMethodField() issues_closed_last_year = s.SerializerMethodField() issues_opened_last_year = s.SerializerMethodField() merged_count = s.SerializerMethodField() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) repo = args[0] self._label_names = repo.labels.values_list('name', flat=True) def get_issues_count(self, repo): return repo.issues_count def get_issues_closed_last_year(self, repo): return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct()) def get_issues_opened_last_year(self, repo): return self.get_count_list_for_year(repo.issues) ## Instruction: Add get merged count method. ## Code After: from rest_framework import serializers as s from ..models import GhIssueEvent from repo_health.index.mixins import CountForPastYearMixin class GhIssueStatsSerializer(s.Serializer, CountForPastYearMixin): _label_names = None issues_count = s.SerializerMethodField() issues_closed_last_year = s.SerializerMethodField() issues_opened_last_year = s.SerializerMethodField() merged_count = s.SerializerMethodField() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) repo = args[0] self._label_names = repo.labels.values_list('name', flat=True) def get_issues_count(self, repo): return repo.issues_count def get_issues_closed_last_year(self, repo): return self.get_count_list_for_year(repo.issues.filter(events__action=GhIssueEvent.CLOSED_ACTION).distinct()) def get_issues_opened_last_year(self, repo): return self.get_count_list_for_year(repo.issues) def get_merged_count(self, repo): return repo.issues.filter(events__action=GhIssueEvent.MERGED_ACTION).count()
# ... existing code ... def get_issues_opened_last_year(self, repo): return self.get_count_list_for_year(repo.issues) def get_merged_count(self, repo): return repo.issues.filter(events__action=GhIssueEvent.MERGED_ACTION).count() # ... rest of the code ...
b0ffd67ae29d88641bb8d90b577eafe847aa5eff
OTCAnalyser/src/uk/ac/cam/cstibhotel/otcanalyser/gui/GUI.java
OTCAnalyser/src/uk/ac/cam/cstibhotel/otcanalyser/gui/GUI.java
package uk.ac.cam.cstibhotel.otcanalyser.gui; import java.awt.BorderLayout; import javax.swing.JFrame; import uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchListener; import uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchResult; public class GUI extends JFrame implements SearchListener { private static final long serialVersionUID = 1L; public StatusBar statusBar; public static GUI gui; static SearchWindow searchWindow; DataViewer dataViewer; public static GUI getInstance() { if (gui==null) { gui = new GUI(); } return gui; } public GUI() { setTitle("OTC Analyser"); setSize(1000,600); setDefaultCloseOperation(EXIT_ON_CLOSE); searchWindow = SearchWindow.getInstance(); add(searchWindow,BorderLayout.WEST); searchWindow.setVisible(true); statusBar = StatusBar.getInstance(); add(statusBar,BorderLayout.SOUTH); statusBar.setVisible(true); dataViewer = DataViewer.dataViewer; this.add(dataViewer); dataViewer.setVisible(true); this.setVisible(true); } @Override public void getSearchResult(SearchResult s) { DataViewer.addTrades(s.getResultData()); } }
package uk.ac.cam.cstibhotel.otcanalyser.gui; import java.awt.BorderLayout; import javax.swing.JFrame; import uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchListener; import uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchResult; public class GUI extends JFrame implements SearchListener { private static final long serialVersionUID = 1L; public StatusBar statusBar; public static GUI gui; static SearchWindow searchWindow; DataViewer dataViewer; public static GUI getInstance() { if (gui==null) { gui = new GUI(); } return gui; } public GUI() { setTitle("OTC Analyser"); setSize(1000,600); setDefaultCloseOperation(EXIT_ON_CLOSE); searchWindow = SearchWindow.getInstance(); add(searchWindow,BorderLayout.WEST); searchWindow.setVisible(true); statusBar = StatusBar.getInstance(); add(statusBar,BorderLayout.SOUTH); statusBar.setVisible(true); dataViewer = DataViewer.dataViewer; this.add(dataViewer); dataViewer.setVisible(true); this.setVisible(true); } @Override public void getSearchResult(SearchResult s) { DataViewer.clearTrades(); DataViewer.addTrades(s.getResultData()); } }
Clear trades from old search from data viewer before adding new ones
Clear trades from old search from data viewer before adding new ones
Java
mit
CSTIB-Hotel/OTCAnalyser
java
## Code Before: package uk.ac.cam.cstibhotel.otcanalyser.gui; import java.awt.BorderLayout; import javax.swing.JFrame; import uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchListener; import uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchResult; public class GUI extends JFrame implements SearchListener { private static final long serialVersionUID = 1L; public StatusBar statusBar; public static GUI gui; static SearchWindow searchWindow; DataViewer dataViewer; public static GUI getInstance() { if (gui==null) { gui = new GUI(); } return gui; } public GUI() { setTitle("OTC Analyser"); setSize(1000,600); setDefaultCloseOperation(EXIT_ON_CLOSE); searchWindow = SearchWindow.getInstance(); add(searchWindow,BorderLayout.WEST); searchWindow.setVisible(true); statusBar = StatusBar.getInstance(); add(statusBar,BorderLayout.SOUTH); statusBar.setVisible(true); dataViewer = DataViewer.dataViewer; this.add(dataViewer); dataViewer.setVisible(true); this.setVisible(true); } @Override public void getSearchResult(SearchResult s) { DataViewer.addTrades(s.getResultData()); } } ## Instruction: Clear trades from old search from data viewer before adding new ones ## Code After: package uk.ac.cam.cstibhotel.otcanalyser.gui; import java.awt.BorderLayout; import javax.swing.JFrame; import uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchListener; import uk.ac.cam.cstibhotel.otcanalyser.communicationlayer.SearchResult; public class GUI extends JFrame implements SearchListener { private static final long serialVersionUID = 1L; public StatusBar statusBar; public static GUI gui; static SearchWindow searchWindow; DataViewer dataViewer; public static GUI getInstance() { if (gui==null) { gui = new GUI(); } return gui; } public GUI() { setTitle("OTC Analyser"); setSize(1000,600); setDefaultCloseOperation(EXIT_ON_CLOSE); searchWindow = SearchWindow.getInstance(); add(searchWindow,BorderLayout.WEST); searchWindow.setVisible(true); statusBar = StatusBar.getInstance(); add(statusBar,BorderLayout.SOUTH); statusBar.setVisible(true); dataViewer = DataViewer.dataViewer; this.add(dataViewer); dataViewer.setVisible(true); this.setVisible(true); } @Override public void getSearchResult(SearchResult s) { DataViewer.clearTrades(); DataViewer.addTrades(s.getResultData()); } }
# ... existing code ... @Override public void getSearchResult(SearchResult s) { DataViewer.clearTrades(); DataViewer.addTrades(s.getResultData()); } # ... rest of the code ...
c32e87894d4baf404d5b300459fc68a6d9d973c8
zun/db/__init__.py
zun/db/__init__.py
from oslo_db import options from zun.common import paths import zun.conf _DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite') options.set_defaults(zun.conf.CONF) options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION, 'zun.sqlite')
from oslo_db import options from zun.common import paths import zun.conf _DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite') options.set_defaults(zun.conf.CONF) options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION)
Remove the duplicated config sqlite_db
Remove the duplicated config sqlite_db The config sqlite_db has been removed from oslo.db. See here: https://review.openstack.org/#/c/449437/ Change-Id: I9197b08aeb7baabf2d3fdd4cf4bd06b57a6782ff
Python
apache-2.0
kevin-zhaoshuai/zun,kevin-zhaoshuai/zun,kevin-zhaoshuai/zun
python
## Code Before: from oslo_db import options from zun.common import paths import zun.conf _DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite') options.set_defaults(zun.conf.CONF) options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION, 'zun.sqlite') ## Instruction: Remove the duplicated config sqlite_db The config sqlite_db has been removed from oslo.db. See here: https://review.openstack.org/#/c/449437/ Change-Id: I9197b08aeb7baabf2d3fdd4cf4bd06b57a6782ff ## Code After: from oslo_db import options from zun.common import paths import zun.conf _DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite') options.set_defaults(zun.conf.CONF) options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION)
... _DEFAULT_SQL_CONNECTION = 'sqlite:///' + paths.state_path_def('zun.sqlite') options.set_defaults(zun.conf.CONF) options.set_defaults(zun.conf.CONF, _DEFAULT_SQL_CONNECTION) ...
162e975f5d1e7d435f1fb77356751e94cc81da6d
inventi-wicket-bootstrap/src/main/java/lt/inventi/wicket/component/bootstrap/tab/RememberingTabbedPanel.java
inventi-wicket-bootstrap/src/main/java/lt/inventi/wicket/component/bootstrap/tab/RememberingTabbedPanel.java
package lt.inventi.wicket.component.bootstrap.tab; import java.util.List; import org.apache.wicket.extensions.markup.html.tabs.ITab; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.link.Link; import org.apache.wicket.model.IModel; import org.apache.wicket.util.string.StringValue; import de.agilecoders.wicket.markup.html.bootstrap.tabs.BootstrapTabbedPanel; public class RememberingTabbedPanel<T extends ITab> extends BootstrapTabbedPanel<T> { public RememberingTabbedPanel(String id, List<T> tabs, IModel<Integer> model) { super(id, tabs, model); } public RememberingTabbedPanel(String id, List<T> tabs) { super(id, tabs); } @Override protected void onInitialize() { super.onInitialize(); StringValue selectedTabId = getPage().getPageParameters().get(getId()); if (selectedTabId.isEmpty()) { setSelectedTab(0); } else { setSelectedTab(selectedTabId.toInt()); } } @Override protected WebMarkupContainer newLink(String linkId, final int index) { return new Link<Void>(linkId) { @Override public void onClick() { setSelectedTab(index); getPage().getPageParameters().set(RememberingTabbedPanel.this.getId(), String.valueOf(index)); } }; } }
package lt.inventi.wicket.component.bootstrap.tab; import java.util.Arrays; import java.util.List; import org.apache.wicket.extensions.markup.html.tabs.ITab; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.link.Link; import org.apache.wicket.model.IModel; import org.apache.wicket.util.string.StringValue; import de.agilecoders.wicket.markup.html.bootstrap.tabs.BootstrapTabbedPanel; public class RememberingTabbedPanel<T extends ITab> extends BootstrapTabbedPanel<T> { public RememberingTabbedPanel(String id, List<T> tabs, IModel<Integer> model) { super(id, tabs, model); } public RememberingTabbedPanel(String id, List<T> tabs) { super(id, tabs); } public RememberingTabbedPanel(String id, T... tabs) { super(id, Arrays.asList(tabs)); } @Override protected void onInitialize() { super.onInitialize(); StringValue selectedTabId = getPage().getPageParameters().get(getId()); if (selectedTabId.isEmpty()) { setSelectedTab(0); } else { setSelectedTab(selectedTabId.toInt()); } } @Override protected WebMarkupContainer newLink(String linkId, final int index) { return new Link<Void>(linkId) { @Override public void onClick() { setSelectedTab(index); getPage().getPageParameters().set(RememberingTabbedPanel.this.getId(), String.valueOf(index)); } }; } }
Add a vararg constructor for the Tabbed Panel
Add a vararg constructor for the Tabbed Panel
Java
apache-2.0
inventiLT/inventi-wicket,inventiLT/inventi-wicket,inventiLT/inventi-wicket
java
## Code Before: package lt.inventi.wicket.component.bootstrap.tab; import java.util.List; import org.apache.wicket.extensions.markup.html.tabs.ITab; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.link.Link; import org.apache.wicket.model.IModel; import org.apache.wicket.util.string.StringValue; import de.agilecoders.wicket.markup.html.bootstrap.tabs.BootstrapTabbedPanel; public class RememberingTabbedPanel<T extends ITab> extends BootstrapTabbedPanel<T> { public RememberingTabbedPanel(String id, List<T> tabs, IModel<Integer> model) { super(id, tabs, model); } public RememberingTabbedPanel(String id, List<T> tabs) { super(id, tabs); } @Override protected void onInitialize() { super.onInitialize(); StringValue selectedTabId = getPage().getPageParameters().get(getId()); if (selectedTabId.isEmpty()) { setSelectedTab(0); } else { setSelectedTab(selectedTabId.toInt()); } } @Override protected WebMarkupContainer newLink(String linkId, final int index) { return new Link<Void>(linkId) { @Override public void onClick() { setSelectedTab(index); getPage().getPageParameters().set(RememberingTabbedPanel.this.getId(), String.valueOf(index)); } }; } } ## Instruction: Add a vararg constructor for the Tabbed Panel ## Code After: package lt.inventi.wicket.component.bootstrap.tab; import java.util.Arrays; import java.util.List; import org.apache.wicket.extensions.markup.html.tabs.ITab; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.link.Link; import org.apache.wicket.model.IModel; import org.apache.wicket.util.string.StringValue; import de.agilecoders.wicket.markup.html.bootstrap.tabs.BootstrapTabbedPanel; public class RememberingTabbedPanel<T extends ITab> extends BootstrapTabbedPanel<T> { public RememberingTabbedPanel(String id, List<T> tabs, IModel<Integer> model) { super(id, tabs, model); } public RememberingTabbedPanel(String id, List<T> tabs) { super(id, tabs); } public RememberingTabbedPanel(String id, T... tabs) { super(id, Arrays.asList(tabs)); } @Override protected void onInitialize() { super.onInitialize(); StringValue selectedTabId = getPage().getPageParameters().get(getId()); if (selectedTabId.isEmpty()) { setSelectedTab(0); } else { setSelectedTab(selectedTabId.toInt()); } } @Override protected WebMarkupContainer newLink(String linkId, final int index) { return new Link<Void>(linkId) { @Override public void onClick() { setSelectedTab(index); getPage().getPageParameters().set(RememberingTabbedPanel.this.getId(), String.valueOf(index)); } }; } }
// ... existing code ... package lt.inventi.wicket.component.bootstrap.tab; import java.util.Arrays; import java.util.List; import org.apache.wicket.extensions.markup.html.tabs.ITab; // ... modified code ... public RememberingTabbedPanel(String id, List<T> tabs) { super(id, tabs); } public RememberingTabbedPanel(String id, T... tabs) { super(id, Arrays.asList(tabs)); } @Override // ... rest of the code ...
3a28eac6c49bdf2acfc5bde4ed7f6e317f39f3ab
ui-tests/src/test/java/io/syndesis/qe/steps/other/DropBoxSteps.java
ui-tests/src/test/java/io/syndesis/qe/steps/other/DropBoxSteps.java
package io.syndesis.qe.steps.other; import com.dropbox.core.DbxException; import cucumber.api.java.en.When; import io.syndesis.qe.utils.DropBoxUtils; import org.assertj.core.api.Assertions; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; import java.io.IOException; import java.util.concurrent.TimeoutException; public class DropBoxSteps { @Lazy @Autowired private DropBoxUtils dropBoxUtils; @When("^.*uploads? file with path \"([^\"]*)\" and content \"([^\"]*)\" on Dropbox$") public void uploadFile(String filePath, String content) throws IOException, DbxException, TimeoutException, InterruptedException { dropBoxUtils.uploadFile(filePath, content); } @When("^.*checks? that file with path \"([^\"]*)\" exists? on Dropbox$") public void checkThatFileExists(String filePath) throws IOException, DbxException, TimeoutException, InterruptedException { Assertions.assertThat(dropBoxUtils.checkIfFileExists(filePath)).isTrue(); } @When("^.*deletes? file with path \"([^\"]*)\" from Dropbox$") public void deleteFile(String filePath) throws DbxException { dropBoxUtils.deleteFile(filePath); Assertions.assertThat(dropBoxUtils.checkIfFileExists(filePath)).isFalse(); } }
package io.syndesis.qe.steps.other; import com.dropbox.core.DbxException; import cucumber.api.java.en.When; import io.syndesis.qe.utils.DropBoxUtils; import io.syndesis.qe.utils.TestUtils; import org.assertj.core.api.Assertions; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; import java.io.IOException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; public class DropBoxSteps { @Lazy @Autowired private DropBoxUtils dropBoxUtils; @When("^.*uploads? file with path \"([^\"]*)\" and content \"([^\"]*)\" on Dropbox$") public void uploadFile(String filePath, String content) throws IOException, DbxException, TimeoutException, InterruptedException { dropBoxUtils.uploadFile(filePath, content); } @When("^.*checks? that file with path \"([^\"]*)\" exists? on Dropbox$") public void checkThatFileExists(String filePath) throws IOException, DbxException, TimeoutException, InterruptedException { boolean fileExists = TestUtils.waitForEvent(r -> r, () -> dropBoxUtils.checkIfFileExists(filePath), TimeUnit.MINUTES, 2, TimeUnit.SECONDS, 15); Assertions.assertThat(fileExists).isTrue(); } @When("^.*deletes? file with path \"([^\"]*)\" from Dropbox$") public void deleteFile(String filePath) throws DbxException { dropBoxUtils.deleteFile(filePath); Assertions.assertThat(dropBoxUtils.checkIfFileExists(filePath)).isFalse(); } }
Add wait for file to appear in Dropbox
fix(ui): Add wait for file to appear in Dropbox
Java
apache-2.0
mcada/syndesis-qe,mcada/syndesis-qe,mcada/syndesis-qe
java
## Code Before: package io.syndesis.qe.steps.other; import com.dropbox.core.DbxException; import cucumber.api.java.en.When; import io.syndesis.qe.utils.DropBoxUtils; import org.assertj.core.api.Assertions; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; import java.io.IOException; import java.util.concurrent.TimeoutException; public class DropBoxSteps { @Lazy @Autowired private DropBoxUtils dropBoxUtils; @When("^.*uploads? file with path \"([^\"]*)\" and content \"([^\"]*)\" on Dropbox$") public void uploadFile(String filePath, String content) throws IOException, DbxException, TimeoutException, InterruptedException { dropBoxUtils.uploadFile(filePath, content); } @When("^.*checks? that file with path \"([^\"]*)\" exists? on Dropbox$") public void checkThatFileExists(String filePath) throws IOException, DbxException, TimeoutException, InterruptedException { Assertions.assertThat(dropBoxUtils.checkIfFileExists(filePath)).isTrue(); } @When("^.*deletes? file with path \"([^\"]*)\" from Dropbox$") public void deleteFile(String filePath) throws DbxException { dropBoxUtils.deleteFile(filePath); Assertions.assertThat(dropBoxUtils.checkIfFileExists(filePath)).isFalse(); } } ## Instruction: fix(ui): Add wait for file to appear in Dropbox ## Code After: package io.syndesis.qe.steps.other; import com.dropbox.core.DbxException; import cucumber.api.java.en.When; import io.syndesis.qe.utils.DropBoxUtils; import io.syndesis.qe.utils.TestUtils; import org.assertj.core.api.Assertions; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; import java.io.IOException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; public class DropBoxSteps { @Lazy @Autowired private DropBoxUtils dropBoxUtils; @When("^.*uploads? file with path \"([^\"]*)\" and content \"([^\"]*)\" on Dropbox$") public void uploadFile(String filePath, String content) throws IOException, DbxException, TimeoutException, InterruptedException { dropBoxUtils.uploadFile(filePath, content); } @When("^.*checks? that file with path \"([^\"]*)\" exists? on Dropbox$") public void checkThatFileExists(String filePath) throws IOException, DbxException, TimeoutException, InterruptedException { boolean fileExists = TestUtils.waitForEvent(r -> r, () -> dropBoxUtils.checkIfFileExists(filePath), TimeUnit.MINUTES, 2, TimeUnit.SECONDS, 15); Assertions.assertThat(fileExists).isTrue(); } @When("^.*deletes? file with path \"([^\"]*)\" from Dropbox$") public void deleteFile(String filePath) throws DbxException { dropBoxUtils.deleteFile(filePath); Assertions.assertThat(dropBoxUtils.checkIfFileExists(filePath)).isFalse(); } }
# ... existing code ... import com.dropbox.core.DbxException; import cucumber.api.java.en.When; import io.syndesis.qe.utils.DropBoxUtils; import io.syndesis.qe.utils.TestUtils; import org.assertj.core.api.Assertions; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Lazy; import java.io.IOException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; public class DropBoxSteps { # ... modified code ... @When("^.*checks? that file with path \"([^\"]*)\" exists? on Dropbox$") public void checkThatFileExists(String filePath) throws IOException, DbxException, TimeoutException, InterruptedException { boolean fileExists = TestUtils.waitForEvent(r -> r, () -> dropBoxUtils.checkIfFileExists(filePath), TimeUnit.MINUTES, 2, TimeUnit.SECONDS, 15); Assertions.assertThat(fileExists).isTrue(); } @When("^.*deletes? file with path \"([^\"]*)\" from Dropbox$") # ... rest of the code ...
8e7eb5dec20ee75d34b566341af3c22b57503dcb
setup.py
setup.py
from setuptools import setup setup( name="setquery", version="0.1", description="Set arithmetic evaluator", author="Paul Scott", author_email="[email protected]", url="https://github.com/icio/setquery", download_url="https://github.com/icio/setquery/tarball/0.1", setup_requires=["nose", "rednose"], py_modules=["setquery"], license="MIT", keywords=['set', 'expression', 'eval', 'evaluate'], classifiers=[], )
from setuptools import setup setup( name="setquery", version="0.1", description="Set arithmetic evaluator", author="Paul Scott", author_email="[email protected]", url="https://github.com/icio/setquery", download_url="https://github.com/icio/setquery/tarball/0.1", setup_requires=["nose", "rednose"], py_modules=["setquery", "test_setquery"], license="MIT", keywords=['set', 'expression', 'eval', 'evaluate'], classifiers=[], )
Include test_setquery module in distribution
Include test_setquery module in distribution
Python
mit
icio/evil
python
## Code Before: from setuptools import setup setup( name="setquery", version="0.1", description="Set arithmetic evaluator", author="Paul Scott", author_email="[email protected]", url="https://github.com/icio/setquery", download_url="https://github.com/icio/setquery/tarball/0.1", setup_requires=["nose", "rednose"], py_modules=["setquery"], license="MIT", keywords=['set', 'expression', 'eval', 'evaluate'], classifiers=[], ) ## Instruction: Include test_setquery module in distribution ## Code After: from setuptools import setup setup( name="setquery", version="0.1", description="Set arithmetic evaluator", author="Paul Scott", author_email="[email protected]", url="https://github.com/icio/setquery", download_url="https://github.com/icio/setquery/tarball/0.1", setup_requires=["nose", "rednose"], py_modules=["setquery", "test_setquery"], license="MIT", keywords=['set', 'expression', 'eval', 'evaluate'], classifiers=[], )
... url="https://github.com/icio/setquery", download_url="https://github.com/icio/setquery/tarball/0.1", setup_requires=["nose", "rednose"], py_modules=["setquery", "test_setquery"], license="MIT", keywords=['set', 'expression', 'eval', 'evaluate'], classifiers=[], ...
7350422a1364f996b7ac362e8457e2a5e04afc7c
sympy/interactive/tests/test_ipythonprinting.py
sympy/interactive/tests/test_ipythonprinting.py
"""Tests that the IPython printing module is properly loaded. """ from sympy.interactive.session import init_ipython_session from sympy.external import import_module ipython = import_module("IPython", min_module_version="0.11") # disable tests if ipython is not present if not ipython: disabled = True def test_ipythonprinting(): # Initialize and setup IPython session app = init_ipython_session() app.run_cell("from IPython.core.interactiveshell import InteractiveShell") app.run_cell("inst = InteractiveShell.instance()") app.run_cell("format = inst.display_formatter.format") app.run_cell("from sympy import Symbol") # Printing without printing extension app.run_cell("a = format(Symbol('pi'))") assert app.user_ns['a']['text/plain'] == "pi" # Load printing extension app.run_cell("%load_ext sympy.interactive.ipythonprinting") # Printing with printing extension app.run_cell("a = format(Symbol('pi'))") assert app.user_ns['a']['text/plain'] == u'\u03c0'
"""Tests that the IPython printing module is properly loaded. """ from sympy.interactive.session import init_ipython_session from sympy.external import import_module ipython = import_module("IPython", min_module_version="0.11") # disable tests if ipython is not present if not ipython: disabled = True def test_ipythonprinting(): # Initialize and setup IPython session app = init_ipython_session() app.run_cell("ip = get_ipython()") app.run_cell("inst = ip.instance()") app.run_cell("format = inst.display_formatter.format") app.run_cell("from sympy import Symbol") # Printing without printing extension app.run_cell("a = format(Symbol('pi'))") assert app.user_ns['a']['text/plain'] == "pi" # Load printing extension app.run_cell("%load_ext sympy.interactive.ipythonprinting") # Printing with printing extension app.run_cell("a = format(Symbol('pi'))") assert app.user_ns['a']['text/plain'] == u'\u03c0'
Make ipythonprinting test more robust
Make ipythonprinting test more robust
Python
bsd-3-clause
Vishluck/sympy,Mitchkoens/sympy,Davidjohnwilson/sympy,pandeyadarsh/sympy,hrashk/sympy,Davidjohnwilson/sympy,sahmed95/sympy,yukoba/sympy,Sumith1896/sympy,jamesblunt/sympy,moble/sympy,chaffra/sympy,Mitchkoens/sympy,Shaswat27/sympy,saurabhjn76/sympy,abhiii5459/sympy,jerli/sympy,jaimahajan1997/sympy,ahhda/sympy,sunny94/temp,wanglongqi/sympy,meghana1995/sympy,jaimahajan1997/sympy,lindsayad/sympy,Sumith1896/sympy,atsao72/sympy,sampadsaha5/sympy,atreyv/sympy,kaichogami/sympy,Curious72/sympy,kumarkrishna/sympy,Designist/sympy,abhiii5459/sympy,mafiya69/sympy,lidavidm/sympy,meghana1995/sympy,maniteja123/sympy,amitjamadagni/sympy,Titan-C/sympy,yashsharan/sympy,debugger22/sympy,skidzo/sympy,ga7g08/sympy,ga7g08/sympy,liangjiaxing/sympy,skidzo/sympy,cswiercz/sympy,beni55/sympy,liangjiaxing/sympy,Curious72/sympy,MechCoder/sympy,cccfran/sympy,Designist/sympy,cswiercz/sympy,skidzo/sympy,kumarkrishna/sympy,Gadal/sympy,cswiercz/sympy,emon10005/sympy,ahhda/sympy,AkademieOlympia/sympy,mcdaniel67/sympy,ChristinaZografou/sympy,drufat/sympy,rahuldan/sympy,toolforger/sympy,Davidjohnwilson/sympy,rahuldan/sympy,wanglongqi/sympy,souravsingh/sympy,AkademieOlympia/sympy,MechCoder/sympy,kaushik94/sympy,kevalds51/sympy,AunShiLord/sympy,sampadsaha5/sympy,wyom/sympy,asm666/sympy,jbbskinny/sympy,kumarkrishna/sympy,shikil/sympy,sunny94/temp,dqnykamp/sympy,lidavidm/sympy,madan96/sympy,ahhda/sympy,AunShiLord/sympy,bukzor/sympy,saurabhjn76/sympy,MridulS/sympy,vipulroxx/sympy,shipci/sympy,Vishluck/sympy,Vishluck/sympy,ChristinaZografou/sympy,jamesblunt/sympy,hrashk/sympy,jerli/sympy,abloomston/sympy,oliverlee/sympy,Sumith1896/sympy,emon10005/sympy,farhaanbukhsh/sympy,sunny94/temp,abloomston/sympy,moble/sympy,madan96/sympy,grevutiu-gabriel/sympy,AunShiLord/sympy,cccfran/sympy,oliverlee/sympy,kevalds51/sympy,iamutkarshtiwari/sympy,shipci/sympy,souravsingh/sympy,farhaanbukhsh/sympy,vipulroxx/sympy,Gadal/sympy,hrashk/sympy,maniteja123/sympy,sahmed95/sympy,kmacinnis/sympy,jbbskinny/sympy,Titan-C/sympy,cccfran/sympy,VaibhavAgarwalVA/sympy,asm666/sympy,rahuldan/sympy,postvakje/sympy,emon10005/sympy,diofant/diofant,yukoba/sympy,aktech/sympy,sahilshekhawat/sympy,drufat/sympy,pandeyadarsh/sympy,MechCoder/sympy,Designist/sympy,MridulS/sympy,aktech/sympy,grevutiu-gabriel/sympy,garvitr/sympy,meghana1995/sympy,Gadal/sympy,moble/sympy,Curious72/sympy,jamesblunt/sympy,kmacinnis/sympy,lindsayad/sympy,postvakje/sympy,atsao72/sympy,iamutkarshtiwari/sympy,debugger22/sympy,toolforger/sympy,hargup/sympy,hargup/sympy,amitjamadagni/sympy,ga7g08/sympy,atreyv/sympy,Shaswat27/sympy,hargup/sympy,Arafatk/sympy,mcdaniel67/sympy,skirpichev/omg,pbrady/sympy,jerli/sympy,Mitchkoens/sympy,liangjiaxing/sympy,dqnykamp/sympy,sahilshekhawat/sympy,beni55/sympy,mafiya69/sympy,MridulS/sympy,Shaswat27/sympy,Arafatk/sympy,souravsingh/sympy,dqnykamp/sympy,jaimahajan1997/sympy,oliverlee/sympy,shipci/sympy,lindsayad/sympy,kaushik94/sympy,abloomston/sympy,atsao72/sympy,wanglongqi/sympy,kaushik94/sympy,toolforger/sympy,shikil/sympy,postvakje/sympy,Arafatk/sympy,VaibhavAgarwalVA/sympy,yukoba/sympy,maniteja123/sympy,beni55/sympy,yashsharan/sympy,mcdaniel67/sympy,pbrady/sympy,yashsharan/sympy,VaibhavAgarwalVA/sympy,farhaanbukhsh/sympy,wyom/sympy,jbbskinny/sympy,vipulroxx/sympy,lidavidm/sympy,iamutkarshtiwari/sympy,atreyv/sympy,garvitr/sympy,bukzor/sympy,kaichogami/sympy,AkademieOlympia/sympy,bukzor/sympy,chaffra/sympy,asm666/sympy,garvitr/sympy,grevutiu-gabriel/sympy,Titan-C/sympy,kmacinnis/sympy,aktech/sympy,madan96/sympy,abhiii5459/sympy,chaffra/sympy,drufat/sympy,saurabhjn76/sympy,pbrady/sympy,sampadsaha5/sympy,wyom/sympy,shikil/sympy,ChristinaZografou/sympy,pandeyadarsh/sympy,debugger22/sympy,kaichogami/sympy,sahmed95/sympy,kevalds51/sympy,sahilshekhawat/sympy,mafiya69/sympy
python
## Code Before: """Tests that the IPython printing module is properly loaded. """ from sympy.interactive.session import init_ipython_session from sympy.external import import_module ipython = import_module("IPython", min_module_version="0.11") # disable tests if ipython is not present if not ipython: disabled = True def test_ipythonprinting(): # Initialize and setup IPython session app = init_ipython_session() app.run_cell("from IPython.core.interactiveshell import InteractiveShell") app.run_cell("inst = InteractiveShell.instance()") app.run_cell("format = inst.display_formatter.format") app.run_cell("from sympy import Symbol") # Printing without printing extension app.run_cell("a = format(Symbol('pi'))") assert app.user_ns['a']['text/plain'] == "pi" # Load printing extension app.run_cell("%load_ext sympy.interactive.ipythonprinting") # Printing with printing extension app.run_cell("a = format(Symbol('pi'))") assert app.user_ns['a']['text/plain'] == u'\u03c0' ## Instruction: Make ipythonprinting test more robust ## Code After: """Tests that the IPython printing module is properly loaded. """ from sympy.interactive.session import init_ipython_session from sympy.external import import_module ipython = import_module("IPython", min_module_version="0.11") # disable tests if ipython is not present if not ipython: disabled = True def test_ipythonprinting(): # Initialize and setup IPython session app = init_ipython_session() app.run_cell("ip = get_ipython()") app.run_cell("inst = ip.instance()") app.run_cell("format = inst.display_formatter.format") app.run_cell("from sympy import Symbol") # Printing without printing extension app.run_cell("a = format(Symbol('pi'))") assert app.user_ns['a']['text/plain'] == "pi" # Load printing extension app.run_cell("%load_ext sympy.interactive.ipythonprinting") # Printing with printing extension app.run_cell("a = format(Symbol('pi'))") assert app.user_ns['a']['text/plain'] == u'\u03c0'
... def test_ipythonprinting(): # Initialize and setup IPython session app = init_ipython_session() app.run_cell("ip = get_ipython()") app.run_cell("inst = ip.instance()") app.run_cell("format = inst.display_formatter.format") app.run_cell("from sympy import Symbol") ...
1e66aba5a2c82b09a6485842948aad49c654efb4
scripts/load_topics_to_mongodb.py
scripts/load_topics_to_mongodb.py
import os import csv from pymongo import MongoClient print('Parsing topics') topics = {} with open('topics.csv', 'rb') as csvfile: reader = csv.reader(csvfile) for line in reader: if line[0] == 1: continue topics[line[0]] = line[1:] print('Connecting to MongoDB') mongodb_client = MongoClient(os.environ['MONGODB_URL']) db = mongodb_client.tvrain articles = db.articles for article in topics: articles.update({'_id': article}, {'$set': { 'topics': topics[article] }})
import os import sys import csv from pymongo import MongoClient print('Parsing topics') topics = {} with open(sys.argv[1], 'r') as csvfile: reader = csv.reader(csvfile) for line in reader: if line[0] == 1: continue topics[line[0]] = line[1:] print('Connecting to MongoDB') mongodb_client = MongoClient(os.environ['MONGODB_URL']) db = mongodb_client.tvrain articles = db.articles for article in topics: articles.update({'_id': article}, {'$set': { 'topics': topics[article] }})
Fix script for loading topics into mongodb
Fix script for loading topics into mongodb
Python
mit
xenx/recommendation_system,xenx/recommendation_system
python
## Code Before: import os import csv from pymongo import MongoClient print('Parsing topics') topics = {} with open('topics.csv', 'rb') as csvfile: reader = csv.reader(csvfile) for line in reader: if line[0] == 1: continue topics[line[0]] = line[1:] print('Connecting to MongoDB') mongodb_client = MongoClient(os.environ['MONGODB_URL']) db = mongodb_client.tvrain articles = db.articles for article in topics: articles.update({'_id': article}, {'$set': { 'topics': topics[article] }}) ## Instruction: Fix script for loading topics into mongodb ## Code After: import os import sys import csv from pymongo import MongoClient print('Parsing topics') topics = {} with open(sys.argv[1], 'r') as csvfile: reader = csv.reader(csvfile) for line in reader: if line[0] == 1: continue topics[line[0]] = line[1:] print('Connecting to MongoDB') mongodb_client = MongoClient(os.environ['MONGODB_URL']) db = mongodb_client.tvrain articles = db.articles for article in topics: articles.update({'_id': article}, {'$set': { 'topics': topics[article] }})
// ... existing code ... import os import sys import csv from pymongo import MongoClient print('Parsing topics') topics = {} with open(sys.argv[1], 'r') as csvfile: reader = csv.reader(csvfile) for line in reader: if line[0] == 1: // ... rest of the code ...
9e1eae8b5d63a046b0dbfdb738419abd2a8edd69
setup.py
setup.py
from os import path from setuptools import setup, find_packages here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() install_requires = [ 'tornado', 'pyserial' ] setup_requires = [ 'pytest-runner' ] tests_require = [ 'pytest', 'coverage', 'pytest-cov' ] extras_require = { 'tests': tests_require, 'all': install_requires + tests_require } setup( name="dusty-acorn", version="2.0", description="Air Quality monitoring web application", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/niwa/dusty-acorn", packages=find_packages(), python_requires='>=3.7', install_requires=install_requires, setup_requires=setup_requires, tests_require=tests_require, extras_require=extras_require, entry_points={ 'console_scripts': [ 'dusty-acorn=dusty_acorn:main' ] } )
from os import path from setuptools import setup, find_packages here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() install_requires = [ 'tornado', 'pyserial' ] setup_requires = [ 'pytest-runner' ] tests_require = [ 'pytest', 'coverage', 'pytest-cov' ] extras_require = { 'tests': tests_require, 'all': install_requires + tests_require } setup( name="dusty-acorn", version="2.0", description="Air Quality monitoring web application", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/niwa/dusty-acorn", packages=find_packages(), # find . -name "*.*" -exec sh -c 'echo "${0##*.}"' {} \; | sort | uniq package_data={ '': [ '*.css', '*.eot', '*.html', '*.jpg', '*.js', '*.json', '*.mp3', '*.mp4', '*.ods', '*.otf', '*.png', '*.svg', '*.ttf', '*.woff', '*.woff2' ], }, python_requires='>=3.7', install_requires=install_requires, setup_requires=setup_requires, tests_require=tests_require, extras_require=extras_require, entry_points={ 'console_scripts': [ 'dusty-acorn=dusty_acorn:main' ] } )
Include non-py files in the final package too
Include non-py files in the final package too
Python
mit
guolivar/dusty-acorn,guolivar/dusty-acorn,guolivar/dusty-acorn,guolivar/dusty-acorn
python
## Code Before: from os import path from setuptools import setup, find_packages here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() install_requires = [ 'tornado', 'pyserial' ] setup_requires = [ 'pytest-runner' ] tests_require = [ 'pytest', 'coverage', 'pytest-cov' ] extras_require = { 'tests': tests_require, 'all': install_requires + tests_require } setup( name="dusty-acorn", version="2.0", description="Air Quality monitoring web application", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/niwa/dusty-acorn", packages=find_packages(), python_requires='>=3.7', install_requires=install_requires, setup_requires=setup_requires, tests_require=tests_require, extras_require=extras_require, entry_points={ 'console_scripts': [ 'dusty-acorn=dusty_acorn:main' ] } ) ## Instruction: Include non-py files in the final package too ## Code After: from os import path from setuptools import setup, find_packages here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() install_requires = [ 'tornado', 'pyserial' ] setup_requires = [ 'pytest-runner' ] tests_require = [ 'pytest', 'coverage', 'pytest-cov' ] extras_require = { 'tests': tests_require, 'all': install_requires + tests_require } setup( name="dusty-acorn", version="2.0", description="Air Quality monitoring web application", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/niwa/dusty-acorn", packages=find_packages(), # find . -name "*.*" -exec sh -c 'echo "${0##*.}"' {} \; | sort | uniq package_data={ '': [ '*.css', '*.eot', '*.html', '*.jpg', '*.js', '*.json', '*.mp3', '*.mp4', '*.ods', '*.otf', '*.png', '*.svg', '*.ttf', '*.woff', '*.woff2' ], }, python_requires='>=3.7', install_requires=install_requires, setup_requires=setup_requires, tests_require=tests_require, extras_require=extras_require, entry_points={ 'console_scripts': [ 'dusty-acorn=dusty_acorn:main' ] } )
// ... existing code ... from os import path from setuptools import setup, find_packages here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() install_requires = [ 'tornado', // ... modified code ... long_description_content_type="text/markdown", url="https://github.com/niwa/dusty-acorn", packages=find_packages(), # find . -name "*.*" -exec sh -c 'echo "${0##*.}"' {} \; | sort | uniq package_data={ '': [ '*.css', '*.eot', '*.html', '*.jpg', '*.js', '*.json', '*.mp3', '*.mp4', '*.ods', '*.otf', '*.png', '*.svg', '*.ttf', '*.woff', '*.woff2' ], }, python_requires='>=3.7', install_requires=install_requires, setup_requires=setup_requires, // ... rest of the code ...
c76734ea034f2a48de0eab995c5db5667086e0c8
common/util/log.py
common/util/log.py
import sublime def universal_newlines(string): return string.replace('\r\n', '\n').replace('\r', '\n') def panel(message, run_async=True): message = universal_newlines(str(message)) view = sublime.active_window().active_view() if run_async: sublime.set_timeout_async( lambda: view.run_command("gs_display_panel", {"msg": message}) ) else: view.run_command("gs_display_panel", {"msg": message}) def panel_append(message, run_async=True): message = universal_newlines(str(message)) view = sublime.active_window().active_view() if run_async: sublime.set_timeout_async( lambda: view.run_command("gs_append_panel", {"msg": message}) ) else: view.run_command("gs_append_panel", {"msg": message})
import re import sublime ANSI_ESCAPE_RE = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]') def normalize(string): return ANSI_ESCAPE_RE.sub('', string.replace('\r\n', '\n').replace('\r', '\n')) def panel(message, run_async=True): message = normalize(str(message)) view = sublime.active_window().active_view() if run_async: sublime.set_timeout_async( lambda: view.run_command("gs_display_panel", {"msg": message}) ) else: view.run_command("gs_display_panel", {"msg": message}) def panel_append(message, run_async=True): message = normalize(str(message)) view = sublime.active_window().active_view() if run_async: sublime.set_timeout_async( lambda: view.run_command("gs_append_panel", {"msg": message}) ) else: view.run_command("gs_append_panel", {"msg": message})
Remove ANSI escape sequences from panel output
Remove ANSI escape sequences from panel output
Python
mit
divmain/GitSavvy,divmain/GitSavvy,divmain/GitSavvy
python
## Code Before: import sublime def universal_newlines(string): return string.replace('\r\n', '\n').replace('\r', '\n') def panel(message, run_async=True): message = universal_newlines(str(message)) view = sublime.active_window().active_view() if run_async: sublime.set_timeout_async( lambda: view.run_command("gs_display_panel", {"msg": message}) ) else: view.run_command("gs_display_panel", {"msg": message}) def panel_append(message, run_async=True): message = universal_newlines(str(message)) view = sublime.active_window().active_view() if run_async: sublime.set_timeout_async( lambda: view.run_command("gs_append_panel", {"msg": message}) ) else: view.run_command("gs_append_panel", {"msg": message}) ## Instruction: Remove ANSI escape sequences from panel output ## Code After: import re import sublime ANSI_ESCAPE_RE = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]') def normalize(string): return ANSI_ESCAPE_RE.sub('', string.replace('\r\n', '\n').replace('\r', '\n')) def panel(message, run_async=True): message = normalize(str(message)) view = sublime.active_window().active_view() if run_async: sublime.set_timeout_async( lambda: view.run_command("gs_display_panel", {"msg": message}) ) else: view.run_command("gs_display_panel", {"msg": message}) def panel_append(message, run_async=True): message = normalize(str(message)) view = sublime.active_window().active_view() if run_async: sublime.set_timeout_async( lambda: view.run_command("gs_append_panel", {"msg": message}) ) else: view.run_command("gs_append_panel", {"msg": message})
// ... existing code ... import re import sublime ANSI_ESCAPE_RE = re.compile(r'\x1B\[[0-?]*[ -/]*[@-~]') def normalize(string): return ANSI_ESCAPE_RE.sub('', string.replace('\r\n', '\n').replace('\r', '\n')) def panel(message, run_async=True): message = normalize(str(message)) view = sublime.active_window().active_view() if run_async: sublime.set_timeout_async( // ... modified code ... def panel_append(message, run_async=True): message = normalize(str(message)) view = sublime.active_window().active_view() if run_async: sublime.set_timeout_async( // ... rest of the code ...
266027514c740c30c0efae5fcd1e2932f1be9933
perfrunner/tests/ycsb2.py
perfrunner/tests/ycsb2.py
from perfrunner.helpers.cbmonitor import with_stats from perfrunner.helpers.local import clone_ycsb from perfrunner.helpers.worker import ycsb_data_load_task, ycsb_task from perfrunner.tests import PerfTest from perfrunner.tests.n1ql import N1QLTest class YCSBTest(PerfTest): def download_ycsb(self): clone_ycsb(repo=self.test_config.ycsb_settings.repo, branch=self.test_config.ycsb_settings.branch) def load(self, *args, **kwargs): PerfTest.load(self, task=ycsb_data_load_task) self.check_num_items() @with_stats def access(self, *args, **kwargs): PerfTest.access(self, task=ycsb_task) def _report_kpi(self): self.reporter.post_to_sf( self.metric_helper.parse_ycsb_throughput() ) def run(self): self.download_ycsb() self.load() self.wait_for_persistence() self.access() self.report_kpi() class YCSBN1QLTest(YCSBTest, N1QLTest): def run(self): self.download_ycsb() self.load() self.wait_for_persistence() self.build_index() self.access() self.report_kpi()
from perfrunner.helpers.cbmonitor import with_stats from perfrunner.helpers.local import clone_ycsb from perfrunner.helpers.worker import ycsb_data_load_task, ycsb_task from perfrunner.tests import PerfTest from perfrunner.tests.n1ql import N1QLTest class YCSBTest(PerfTest): def download_ycsb(self): clone_ycsb(repo=self.test_config.ycsb_settings.repo, branch=self.test_config.ycsb_settings.branch) def load(self, *args, **kwargs): PerfTest.load(self, task=ycsb_data_load_task) @with_stats def access(self, *args, **kwargs): PerfTest.access(self, task=ycsb_task) def _report_kpi(self): self.reporter.post_to_sf( self.metric_helper.parse_ycsb_throughput() ) def run(self): self.download_ycsb() self.load() self.wait_for_persistence() self.check_num_items() self.access() self.report_kpi() class YCSBN1QLTest(YCSBTest, N1QLTest): def run(self): self.download_ycsb() self.load() self.wait_for_persistence() self.check_num_items() self.build_index() self.access() self.report_kpi()
Check the number of items a little bit later
Check the number of items a little bit later Due to MB-22749 Change-Id: Icffe46201223efa5645644ca40b99dffe4f0fb31 Reviewed-on: http://review.couchbase.org/76413 Tested-by: Build Bot <[email protected]> Reviewed-by: Pavel Paulau <[email protected]>
Python
apache-2.0
couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner
python
## Code Before: from perfrunner.helpers.cbmonitor import with_stats from perfrunner.helpers.local import clone_ycsb from perfrunner.helpers.worker import ycsb_data_load_task, ycsb_task from perfrunner.tests import PerfTest from perfrunner.tests.n1ql import N1QLTest class YCSBTest(PerfTest): def download_ycsb(self): clone_ycsb(repo=self.test_config.ycsb_settings.repo, branch=self.test_config.ycsb_settings.branch) def load(self, *args, **kwargs): PerfTest.load(self, task=ycsb_data_load_task) self.check_num_items() @with_stats def access(self, *args, **kwargs): PerfTest.access(self, task=ycsb_task) def _report_kpi(self): self.reporter.post_to_sf( self.metric_helper.parse_ycsb_throughput() ) def run(self): self.download_ycsb() self.load() self.wait_for_persistence() self.access() self.report_kpi() class YCSBN1QLTest(YCSBTest, N1QLTest): def run(self): self.download_ycsb() self.load() self.wait_for_persistence() self.build_index() self.access() self.report_kpi() ## Instruction: Check the number of items a little bit later Due to MB-22749 Change-Id: Icffe46201223efa5645644ca40b99dffe4f0fb31 Reviewed-on: http://review.couchbase.org/76413 Tested-by: Build Bot <[email protected]> Reviewed-by: Pavel Paulau <[email protected]> ## Code After: from perfrunner.helpers.cbmonitor import with_stats from perfrunner.helpers.local import clone_ycsb from perfrunner.helpers.worker import ycsb_data_load_task, ycsb_task from perfrunner.tests import PerfTest from perfrunner.tests.n1ql import N1QLTest class YCSBTest(PerfTest): def download_ycsb(self): clone_ycsb(repo=self.test_config.ycsb_settings.repo, branch=self.test_config.ycsb_settings.branch) def load(self, *args, **kwargs): PerfTest.load(self, task=ycsb_data_load_task) @with_stats def access(self, *args, **kwargs): PerfTest.access(self, task=ycsb_task) def _report_kpi(self): self.reporter.post_to_sf( self.metric_helper.parse_ycsb_throughput() ) def run(self): self.download_ycsb() self.load() self.wait_for_persistence() self.check_num_items() self.access() self.report_kpi() class YCSBN1QLTest(YCSBTest, N1QLTest): def run(self): self.download_ycsb() self.load() self.wait_for_persistence() self.check_num_items() self.build_index() self.access() self.report_kpi()
# ... existing code ... def load(self, *args, **kwargs): PerfTest.load(self, task=ycsb_data_load_task) @with_stats def access(self, *args, **kwargs): # ... modified code ... self.load() self.wait_for_persistence() self.check_num_items() self.access() ... self.load() self.wait_for_persistence() self.check_num_items() self.build_index() # ... rest of the code ...
5db82bfd52cdc4778384b1291b35d1d8791b3046
app/src/main/java/com/pr0gramm/app/util/Affiliate.kt
app/src/main/java/com/pr0gramm/app/util/Affiliate.kt
package com.pr0gramm.app.util import android.net.Uri object Affiliate { private val paramsHubTraffic = mapOf( "utm_source" to "paid", "utm_medium" to "hubtraffic", "utm_campaign" to "hubtraffic_pr0grammapp") private val reHubTraffic = "(?:pornhub|redtube|tube8|youporn|xtube|spankwire|keezmovies|extremetube)\\.com".toRegex() private fun hubTraffic(url: String): String { val uri = Uri.parse(url) val updated = uri.buildUpon().clearQuery() for (name in uri.queryParameterNames - paramsHubTraffic.keys) { val value = uri.getQueryParameter(name) ?: continue updated.appendQueryParameter(name, value) } for ((name, value) in paramsHubTraffic) { updated.appendQueryParameter(name, value) } return updated.build().toString() } fun get(url: String): String? { return when { reHubTraffic.containsMatchIn(url) -> hubTraffic(url) // no affiliate url else -> null } } }
package com.pr0gramm.app.util import android.util.Base64 object Affiliate { private val paramsHubTraffic = mapOf( "utm_source" to "paid", "utm_medium" to "hubtraffic", "utm_campaign" to "hubtraffic_pr0grammapp") private val reHubTraffic = "(?:pornhub|redtube|tube8|youporn|xtube|spankwire|keezmovies|extremetube)\\.com".toRegex() private fun hubTraffic(url: String): String { val encoded = Base64.encodeToString(url.toByteArray(), Base64.URL_SAFE) return "https://app.pr0gramm.com/redirect.html?u=$encoded" } fun get(url: String): String? { return when { reHubTraffic.containsMatchIn(url) -> hubTraffic(url) // no affiliate url else -> null } } }
Fix hubtraffic by redirecting over a app.pr0gramm.com domain.
Fix hubtraffic by redirecting over a app.pr0gramm.com domain.
Kotlin
mit
mopsalarm/Pr0,mopsalarm/Pr0,mopsalarm/Pr0
kotlin
## Code Before: package com.pr0gramm.app.util import android.net.Uri object Affiliate { private val paramsHubTraffic = mapOf( "utm_source" to "paid", "utm_medium" to "hubtraffic", "utm_campaign" to "hubtraffic_pr0grammapp") private val reHubTraffic = "(?:pornhub|redtube|tube8|youporn|xtube|spankwire|keezmovies|extremetube)\\.com".toRegex() private fun hubTraffic(url: String): String { val uri = Uri.parse(url) val updated = uri.buildUpon().clearQuery() for (name in uri.queryParameterNames - paramsHubTraffic.keys) { val value = uri.getQueryParameter(name) ?: continue updated.appendQueryParameter(name, value) } for ((name, value) in paramsHubTraffic) { updated.appendQueryParameter(name, value) } return updated.build().toString() } fun get(url: String): String? { return when { reHubTraffic.containsMatchIn(url) -> hubTraffic(url) // no affiliate url else -> null } } } ## Instruction: Fix hubtraffic by redirecting over a app.pr0gramm.com domain. ## Code After: package com.pr0gramm.app.util import android.util.Base64 object Affiliate { private val paramsHubTraffic = mapOf( "utm_source" to "paid", "utm_medium" to "hubtraffic", "utm_campaign" to "hubtraffic_pr0grammapp") private val reHubTraffic = "(?:pornhub|redtube|tube8|youporn|xtube|spankwire|keezmovies|extremetube)\\.com".toRegex() private fun hubTraffic(url: String): String { val encoded = Base64.encodeToString(url.toByteArray(), Base64.URL_SAFE) return "https://app.pr0gramm.com/redirect.html?u=$encoded" } fun get(url: String): String? { return when { reHubTraffic.containsMatchIn(url) -> hubTraffic(url) // no affiliate url else -> null } } }
// ... existing code ... package com.pr0gramm.app.util import android.util.Base64 object Affiliate { private val paramsHubTraffic = mapOf( // ... modified code ... private val reHubTraffic = "(?:pornhub|redtube|tube8|youporn|xtube|spankwire|keezmovies|extremetube)\\.com".toRegex() private fun hubTraffic(url: String): String { val encoded = Base64.encodeToString(url.toByteArray(), Base64.URL_SAFE) return "https://app.pr0gramm.com/redirect.html?u=$encoded" } fun get(url: String): String? { // ... rest of the code ...
16b07dd961cbe55ee452ed6057048ec452ffbd72
custom/icds/management/commands/copy_icds_app.py
custom/icds/management/commands/copy_icds_app.py
from __future__ import absolute_import, print_function, unicode_literals from django.core.management import BaseCommand from corehq.apps.app_manager.dbaccessors import get_build_doc_by_version, wrap_app from corehq.apps.app_manager.models import import_app class Command(BaseCommand): help = "Make a copy of a specific version of an application on the same domain" def add_arguments(self, parser): parser.add_argument('domain') parser.add_argument('app_id') parser.add_argument('version') parser.add_argument('new_name') def handle(self, domain, app_id, version, new_name, **options): old_app = get_build_doc_by_version(domain, app_id, version) if not old_app: raise Exception("No app found with id '{}' and version '{}', on '{}'" .format(app_id, version, domain)) old_app = wrap_app(old_app) old_app.convert_build_to_app() new_app = import_app(old_app.to_json(), domain, source_properties={'name': new_name})
from __future__ import absolute_import, print_function, unicode_literals from django.core.management import BaseCommand from corehq.apps.app_manager.dbaccessors import get_build_doc_by_version, wrap_app from corehq.apps.app_manager.models import import_app class Command(BaseCommand): help = "Make a copy of a specific version of an application on the same domain" def add_arguments(self, parser): parser.add_argument('domain') parser.add_argument('app_id') parser.add_argument('version') parser.add_argument('new_name') def handle(self, domain, app_id, version, new_name, **options): old_app = get_build_doc_by_version(domain, app_id, version) if not old_app: raise Exception("No app found with id '{}' and version '{}', on '{}'" .format(app_id, version, domain)) old_app = wrap_app(old_app) old_app.convert_build_to_app() new_app = import_app(old_app.to_json(), domain, source_properties={'name': new_name}) old_to_new = get_old_to_new_config_ids(old_app, new_app) for form in new_app.get_forms(): for old_id, new_id in old_to_new: form.source = form.source.replace(old_id, new_id) new_app.save() def get_old_to_new_config_ids(old_app, new_app): return [ (old_config.uuid, new_config.uuid) for old_module, new_module in zip(old_app.get_report_modules(), new_app.get_report_modules()) for old_config, new_config in zip(old_module.report_configs, new_module.report_configs) ]
Replace old config IDs with the new ones
Replace old config IDs with the new ones
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
python
## Code Before: from __future__ import absolute_import, print_function, unicode_literals from django.core.management import BaseCommand from corehq.apps.app_manager.dbaccessors import get_build_doc_by_version, wrap_app from corehq.apps.app_manager.models import import_app class Command(BaseCommand): help = "Make a copy of a specific version of an application on the same domain" def add_arguments(self, parser): parser.add_argument('domain') parser.add_argument('app_id') parser.add_argument('version') parser.add_argument('new_name') def handle(self, domain, app_id, version, new_name, **options): old_app = get_build_doc_by_version(domain, app_id, version) if not old_app: raise Exception("No app found with id '{}' and version '{}', on '{}'" .format(app_id, version, domain)) old_app = wrap_app(old_app) old_app.convert_build_to_app() new_app = import_app(old_app.to_json(), domain, source_properties={'name': new_name}) ## Instruction: Replace old config IDs with the new ones ## Code After: from __future__ import absolute_import, print_function, unicode_literals from django.core.management import BaseCommand from corehq.apps.app_manager.dbaccessors import get_build_doc_by_version, wrap_app from corehq.apps.app_manager.models import import_app class Command(BaseCommand): help = "Make a copy of a specific version of an application on the same domain" def add_arguments(self, parser): parser.add_argument('domain') parser.add_argument('app_id') parser.add_argument('version') parser.add_argument('new_name') def handle(self, domain, app_id, version, new_name, **options): old_app = get_build_doc_by_version(domain, app_id, version) if not old_app: raise Exception("No app found with id '{}' and version '{}', on '{}'" .format(app_id, version, domain)) old_app = wrap_app(old_app) old_app.convert_build_to_app() new_app = import_app(old_app.to_json(), domain, source_properties={'name': new_name}) old_to_new = get_old_to_new_config_ids(old_app, new_app) for form in new_app.get_forms(): for old_id, new_id in old_to_new: form.source = form.source.replace(old_id, new_id) new_app.save() def get_old_to_new_config_ids(old_app, new_app): return [ (old_config.uuid, new_config.uuid) for old_module, new_module in zip(old_app.get_report_modules(), new_app.get_report_modules()) for old_config, new_config in zip(old_module.report_configs, new_module.report_configs) ]
# ... existing code ... old_app = wrap_app(old_app) old_app.convert_build_to_app() new_app = import_app(old_app.to_json(), domain, source_properties={'name': new_name}) old_to_new = get_old_to_new_config_ids(old_app, new_app) for form in new_app.get_forms(): for old_id, new_id in old_to_new: form.source = form.source.replace(old_id, new_id) new_app.save() def get_old_to_new_config_ids(old_app, new_app): return [ (old_config.uuid, new_config.uuid) for old_module, new_module in zip(old_app.get_report_modules(), new_app.get_report_modules()) for old_config, new_config in zip(old_module.report_configs, new_module.report_configs) ] # ... rest of the code ...
9808e97747785c27387ad1ce9ffc3e9a05c80f08
enigma.py
enigma.py
import string class Steckerbrett: def __init__(self): pass class Walzen: def __init__(self): pass class Enigma: def __init__(self): pass def cipher(self, message): pass
import string class Steckerbrett: def __init__(self): pass class Umkehrwalze: def __init__(self, wiring): self.wiring = wiring def encode(self, letter): return self.wiring[string.ascii_uppercase.index(letter)] class Walzen: def __init__(self): pass class Enigma: def __init__(self): pass def cipher(self, message): pass
Create class for the reflectors
Create class for the reflectors
Python
mit
ranisalt/enigma
python
## Code Before: import string class Steckerbrett: def __init__(self): pass class Walzen: def __init__(self): pass class Enigma: def __init__(self): pass def cipher(self, message): pass ## Instruction: Create class for the reflectors ## Code After: import string class Steckerbrett: def __init__(self): pass class Umkehrwalze: def __init__(self, wiring): self.wiring = wiring def encode(self, letter): return self.wiring[string.ascii_uppercase.index(letter)] class Walzen: def __init__(self): pass class Enigma: def __init__(self): pass def cipher(self, message): pass
// ... existing code ... class Steckerbrett: def __init__(self): pass class Umkehrwalze: def __init__(self, wiring): self.wiring = wiring def encode(self, letter): return self.wiring[string.ascii_uppercase.index(letter)] class Walzen: // ... rest of the code ...
545f688f0dd59df009e2392cbf27ef06865a4b89
src/azure/cli/__main__.py
src/azure/cli/__main__.py
import sys import azure.cli.main from azure.cli._telemetry import init_telemetry, user_agrees_to_telemetry, telemetry_flush try: try: if user_agrees_to_telemetry(): init_telemetry() except Exception: #pylint: disable=broad-except pass sys.exit(azure.cli.main.main(sys.argv[1:])) finally: telemetry_flush()
import sys import os import azure.cli.main from azure.cli._telemetry import init_telemetry, user_agrees_to_telemetry, telemetry_flush try: try: if user_agrees_to_telemetry(): init_telemetry() except Exception: #pylint: disable=broad-except pass args = sys.argv[1:] # Check if we are in argcomplete mode - if so, we # need to pick up our args from environment variables if os.environ.get('_ARGCOMPLETE'): comp_line = os.environ.get('COMP_LINE') if comp_line: args = comp_line.split()[1:] sys.exit(azure.cli.main.main(args)) finally: telemetry_flush()
Speed up argument completions by not loading all command packages unless we have to...
Speed up argument completions by not loading all command packages unless we have to...
Python
mit
yugangw-msft/azure-cli,BurtBiel/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,QingChenmsft/azure-cli,samedder/azure-cli,BurtBiel/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli,QingChenmsft/azure-cli,samedder/azure-cli,QingChenmsft/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli
python
## Code Before: import sys import azure.cli.main from azure.cli._telemetry import init_telemetry, user_agrees_to_telemetry, telemetry_flush try: try: if user_agrees_to_telemetry(): init_telemetry() except Exception: #pylint: disable=broad-except pass sys.exit(azure.cli.main.main(sys.argv[1:])) finally: telemetry_flush() ## Instruction: Speed up argument completions by not loading all command packages unless we have to... ## Code After: import sys import os import azure.cli.main from azure.cli._telemetry import init_telemetry, user_agrees_to_telemetry, telemetry_flush try: try: if user_agrees_to_telemetry(): init_telemetry() except Exception: #pylint: disable=broad-except pass args = sys.argv[1:] # Check if we are in argcomplete mode - if so, we # need to pick up our args from environment variables if os.environ.get('_ARGCOMPLETE'): comp_line = os.environ.get('COMP_LINE') if comp_line: args = comp_line.split()[1:] sys.exit(azure.cli.main.main(args)) finally: telemetry_flush()
# ... existing code ... import sys import os import azure.cli.main # ... modified code ... except Exception: #pylint: disable=broad-except pass args = sys.argv[1:] # Check if we are in argcomplete mode - if so, we # need to pick up our args from environment variables if os.environ.get('_ARGCOMPLETE'): comp_line = os.environ.get('COMP_LINE') if comp_line: args = comp_line.split()[1:] sys.exit(azure.cli.main.main(args)) finally: telemetry_flush() # ... rest of the code ...
ae5b93c4e12f732a8c56de80b39f227c90ef4809
polls/models.py
polls/models.py
from django.db import models class Question(models.Model): question_text = models.CharField(max_length=140) published_at = models.DateTimeField(auto_now_add=True) class Meta: get_latest_by = 'published_at' def __str__(self): return self.question_text class Choice(models.Model): question = models.ForeignKey(Question, related_name='choices') choice_text = models.CharField(max_length=140) def __str__(self): return self.choice_text def vote(self): """ Create a vote on this choice. """ return Vote.objects.create(choice=self) class Vote(models.Model): choice = models.ForeignKey(Choice, related_name='votes')
from django.db import models class Question(models.Model): question_text = models.CharField(max_length=140) published_at = models.DateTimeField(auto_now_add=True) class Meta: get_latest_by = 'published_at' ordering = ('-published_at',) def __str__(self): return self.question_text class Choice(models.Model): question = models.ForeignKey(Question, related_name='choices') choice_text = models.CharField(max_length=140) def __str__(self): return self.choice_text def vote(self): """ Create a vote on this choice. """ return Vote.objects.create(choice=self) class Vote(models.Model): choice = models.ForeignKey(Choice, related_name='votes')
Order questions by published date
Order questions by published date Closes #23
Python
mit
apiaryio/polls-api
python
## Code Before: from django.db import models class Question(models.Model): question_text = models.CharField(max_length=140) published_at = models.DateTimeField(auto_now_add=True) class Meta: get_latest_by = 'published_at' def __str__(self): return self.question_text class Choice(models.Model): question = models.ForeignKey(Question, related_name='choices') choice_text = models.CharField(max_length=140) def __str__(self): return self.choice_text def vote(self): """ Create a vote on this choice. """ return Vote.objects.create(choice=self) class Vote(models.Model): choice = models.ForeignKey(Choice, related_name='votes') ## Instruction: Order questions by published date Closes #23 ## Code After: from django.db import models class Question(models.Model): question_text = models.CharField(max_length=140) published_at = models.DateTimeField(auto_now_add=True) class Meta: get_latest_by = 'published_at' ordering = ('-published_at',) def __str__(self): return self.question_text class Choice(models.Model): question = models.ForeignKey(Question, related_name='choices') choice_text = models.CharField(max_length=140) def __str__(self): return self.choice_text def vote(self): """ Create a vote on this choice. """ return Vote.objects.create(choice=self) class Vote(models.Model): choice = models.ForeignKey(Choice, related_name='votes')
// ... existing code ... class Meta: get_latest_by = 'published_at' ordering = ('-published_at',) def __str__(self): return self.question_text // ... rest of the code ...
0b53adc34259fedc23e42e7576517fb62f4cb33e
base_contact/models/ir_model.py
base_contact/models/ir_model.py
import logging from openerp import api, models _logger = logging.getLogger(__name__) class IrModel(models.Model): _inherit = "ir.model" @api.cr def _register_hook(self, cr): """Only warn in installed instances.""" _logger.warning("This module is DEPRECATED. See README.") return super(IrModel, self)._register_hook(cr)
import logging from openerp import api, models _logger = logging.getLogger(__name__) class IrModel(models.Model): _inherit = "ir.model" @api.cr def _register_hook(self, cr): """Only warn in installed instances.""" _logger.info("WARNING: This module is DEPRECATED. See README.") return super(IrModel, self)._register_hook(cr)
Downgrade to INFO, since runbots install this.
Downgrade to INFO, since runbots install this.
Python
agpl-3.0
open-synergy/partner-contact,acsone/partner-contact,diagramsoftware/partner-contact
python
## Code Before: import logging from openerp import api, models _logger = logging.getLogger(__name__) class IrModel(models.Model): _inherit = "ir.model" @api.cr def _register_hook(self, cr): """Only warn in installed instances.""" _logger.warning("This module is DEPRECATED. See README.") return super(IrModel, self)._register_hook(cr) ## Instruction: Downgrade to INFO, since runbots install this. ## Code After: import logging from openerp import api, models _logger = logging.getLogger(__name__) class IrModel(models.Model): _inherit = "ir.model" @api.cr def _register_hook(self, cr): """Only warn in installed instances.""" _logger.info("WARNING: This module is DEPRECATED. See README.") return super(IrModel, self)._register_hook(cr)
// ... existing code ... @api.cr def _register_hook(self, cr): """Only warn in installed instances.""" _logger.info("WARNING: This module is DEPRECATED. See README.") return super(IrModel, self)._register_hook(cr) // ... rest of the code ...
cbc69077016885ebf2b481eebd2f11511c8184ce
nbgrader/tests/apps/test_nbgrader.py
nbgrader/tests/apps/test_nbgrader.py
import os import sys from .. import run_nbgrader, run_command from .base import BaseTestApp class TestNbGrader(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["--help-all"]) def test_no_subapp(self): """Is the help displayed when no subapp is given?""" run_nbgrader([], retcode=0) def test_check_version(self, capfd): """Is the version the same regardless of how we run nbgrader?""" out1 = '\n'.join( run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:] ).strip() out2 = '\n'.join( run_nbgrader(["--version"], stdout=True).splitlines()[-3:] ).strip() assert out1 == out2 def test_logfile(self): # by default, there should be no logfile created files_before = set(os.listdir()) run_nbgrader([]) files_after = set(os.listdir()) assert files_before == files_after # if we specify a logfile, it should get used run_nbgrader(["--NbGrader.logfile=log.txt"]) assert os.path.exists("log.txt")
import os import sys from .. import run_nbgrader, run_command from .base import BaseTestApp class TestNbGrader(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["--help-all"]) def test_no_subapp(self): """Is the help displayed when no subapp is given?""" run_nbgrader([], retcode=0) def test_check_version(self, capfd): """Is the version the same regardless of how we run nbgrader?""" out1 = '\n'.join( run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:] ).strip() out2 = '\n'.join( run_nbgrader(["--version"], stdout=True).splitlines()[-3:] ).strip() assert out1 == out2 def test_logfile(self): # by default, there should be no logfile created cwd = os.getcwd() files_before = set(os.listdir(cwd)) run_nbgrader([]) files_after = set(os.listdir(cwd)) assert files_before == files_after # if we specify a logfile, it should get used run_nbgrader(["--NbGrader.logfile=log.txt"]) assert os.path.exists("log.txt")
Include directory name for python 2 compatibility
Include directory name for python 2 compatibility
Python
bsd-3-clause
jhamrick/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader
python
## Code Before: import os import sys from .. import run_nbgrader, run_command from .base import BaseTestApp class TestNbGrader(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["--help-all"]) def test_no_subapp(self): """Is the help displayed when no subapp is given?""" run_nbgrader([], retcode=0) def test_check_version(self, capfd): """Is the version the same regardless of how we run nbgrader?""" out1 = '\n'.join( run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:] ).strip() out2 = '\n'.join( run_nbgrader(["--version"], stdout=True).splitlines()[-3:] ).strip() assert out1 == out2 def test_logfile(self): # by default, there should be no logfile created files_before = set(os.listdir()) run_nbgrader([]) files_after = set(os.listdir()) assert files_before == files_after # if we specify a logfile, it should get used run_nbgrader(["--NbGrader.logfile=log.txt"]) assert os.path.exists("log.txt") ## Instruction: Include directory name for python 2 compatibility ## Code After: import os import sys from .. import run_nbgrader, run_command from .base import BaseTestApp class TestNbGrader(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["--help-all"]) def test_no_subapp(self): """Is the help displayed when no subapp is given?""" run_nbgrader([], retcode=0) def test_check_version(self, capfd): """Is the version the same regardless of how we run nbgrader?""" out1 = '\n'.join( run_command([sys.executable, "-m", "nbgrader", "--version"]).splitlines()[-3:] ).strip() out2 = '\n'.join( run_nbgrader(["--version"], stdout=True).splitlines()[-3:] ).strip() assert out1 == out2 def test_logfile(self): # by default, there should be no logfile created cwd = os.getcwd() files_before = set(os.listdir(cwd)) run_nbgrader([]) files_after = set(os.listdir(cwd)) assert files_before == files_after # if we specify a logfile, it should get used run_nbgrader(["--NbGrader.logfile=log.txt"]) assert os.path.exists("log.txt")
# ... existing code ... def test_logfile(self): # by default, there should be no logfile created cwd = os.getcwd() files_before = set(os.listdir(cwd)) run_nbgrader([]) files_after = set(os.listdir(cwd)) assert files_before == files_after # if we specify a logfile, it should get used # ... rest of the code ...
0d572d60522ae0e80105330981a66bc541434b99
rip/filter_operators.py
rip/filter_operators.py
EQUALS = 'equals' GT = 'gt' LT = 'lt' OPERATOR_SEPARATOR = '__' REVERSE_ORDER = '-' ALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1} def split_to_field_and_filter_type(filter_name): filter_split = filter_name.split(OPERATOR_SEPARATOR) filter_type = filter_split[-1] if len(filter_split) > 0 else None if filter_type in ALL_OPERATORS: return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type else: return filter_name, None def split_to_field_and_order_type(field_name_with_operator): if field_name_with_operator.startswith(REVERSE_ORDER): return field_name_with_operator[1:], REVERSE_ORDER else: return field_name_with_operator, None def transform_to_list(val): if isinstance(val, (list, tuple)): return val else: return [val]
EQUALS = 'equals' GT = 'gt' LT = 'lt' IN = 'in' OPERATOR_SEPARATOR = '__' REVERSE_ORDER = '-' ALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1, IN: 1} def split_to_field_and_filter_type(filter_name): filter_split = filter_name.split(OPERATOR_SEPARATOR) filter_type = filter_split[-1] if len(filter_split) > 0 else None if filter_type in ALL_OPERATORS: return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type else: return filter_name, None def split_to_field_and_order_type(field_name_with_operator): if field_name_with_operator.startswith(REVERSE_ORDER): return field_name_with_operator[1:], REVERSE_ORDER else: return field_name_with_operator, None def transform_to_list(val): if isinstance(val, (list, tuple)): return val else: return [val]
Support __in as operator for backwards comp
Support __in as operator for backwards comp
Python
mit
Aplopio/rip,Aplopio/django_rip
python
## Code Before: EQUALS = 'equals' GT = 'gt' LT = 'lt' OPERATOR_SEPARATOR = '__' REVERSE_ORDER = '-' ALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1} def split_to_field_and_filter_type(filter_name): filter_split = filter_name.split(OPERATOR_SEPARATOR) filter_type = filter_split[-1] if len(filter_split) > 0 else None if filter_type in ALL_OPERATORS: return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type else: return filter_name, None def split_to_field_and_order_type(field_name_with_operator): if field_name_with_operator.startswith(REVERSE_ORDER): return field_name_with_operator[1:], REVERSE_ORDER else: return field_name_with_operator, None def transform_to_list(val): if isinstance(val, (list, tuple)): return val else: return [val] ## Instruction: Support __in as operator for backwards comp ## Code After: EQUALS = 'equals' GT = 'gt' LT = 'lt' IN = 'in' OPERATOR_SEPARATOR = '__' REVERSE_ORDER = '-' ALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1, IN: 1} def split_to_field_and_filter_type(filter_name): filter_split = filter_name.split(OPERATOR_SEPARATOR) filter_type = filter_split[-1] if len(filter_split) > 0 else None if filter_type in ALL_OPERATORS: return OPERATOR_SEPARATOR.join(filter_split[:-1]), filter_type else: return filter_name, None def split_to_field_and_order_type(field_name_with_operator): if field_name_with_operator.startswith(REVERSE_ORDER): return field_name_with_operator[1:], REVERSE_ORDER else: return field_name_with_operator, None def transform_to_list(val): if isinstance(val, (list, tuple)): return val else: return [val]
... EQUALS = 'equals' GT = 'gt' LT = 'lt' IN = 'in' OPERATOR_SEPARATOR = '__' REVERSE_ORDER = '-' ALL_OPERATORS = {EQUALS: 1, GT: 1, LT: 1, IN: 1} def split_to_field_and_filter_type(filter_name): ...
179930370aabb17284a5de081eb6db42ab3d2c54
android/app/src/main/java/com/funnyhatsoftware/spacedock/activity/DetailsActivity.java
android/app/src/main/java/com/funnyhatsoftware/spacedock/activity/DetailsActivity.java
package com.funnyhatsoftware.spacedock.activity; import android.content.Context; import android.content.Intent; import android.support.v4.app.Fragment; import com.funnyhatsoftware.spacedock.fragment.DetailsFragment; public class DetailsActivity extends SinglePaneActivity { private static final String EXTRA_TYPE = "browsetype"; private static final String EXTRA_ITEM = "displayitem"; public static Intent getIntent(Context context, String itemType, String itemId) { if (itemType == null || itemId == null) { throw new IllegalArgumentException(); } Intent intent = new Intent(context, DetailsActivity.class); intent.putExtra(EXTRA_TYPE, itemType); intent.putExtra(EXTRA_ITEM, itemId); return intent; } public Fragment getFragment() { String itemType = getIntent().getStringExtra(EXTRA_TYPE); String itemId = getIntent().getStringExtra(EXTRA_ITEM); return DetailsFragment.newInstance(itemType, itemId); } }
package com.funnyhatsoftware.spacedock.activity; import android.content.Context; import android.content.Intent; import android.support.v4.app.Fragment; import com.funnyhatsoftware.spacedock.fragment.DetailsFragment; import com.funnyhatsoftware.spacedock.holder.ExpansionHolder; public class DetailsActivity extends SinglePaneActivity { private static final String EXTRA_TYPE = "browsetype"; private static final String EXTRA_ITEM = "displayitem"; public static Intent getIntent(Context context, String itemType, String itemId) { if (itemType == null || itemId == null) { throw new IllegalArgumentException(); } if (itemType.equals(ExpansionHolder.TYPE_STRING)) { return ExpansionDetailsActivity.getIntent(context, itemId); } Intent intent = new Intent(context, DetailsActivity.class); intent.putExtra(EXTRA_TYPE, itemType); intent.putExtra(EXTRA_ITEM, itemId); return intent; } public Fragment getFragment() { String itemType = getIntent().getStringExtra(EXTRA_TYPE); String itemId = getIntent().getStringExtra(EXTRA_ITEM); return DetailsFragment.newInstance(itemType, itemId); } }
Fix expansion detail display on phones
Fix expansion detail display on phones
Java
apache-2.0
tblackwe/spacedock,tblackwe/spacedock,spacedockapp/spacedock,spacedockapp/spacedock,spacedockapp/spacedock,spacedockapp/spacedock,tblackwe/spacedock,spacedockapp/spacedock,tblackwe/spacedock,spacedockapp/spacedock,tblackwe/spacedock,tblackwe/spacedock
java
## Code Before: package com.funnyhatsoftware.spacedock.activity; import android.content.Context; import android.content.Intent; import android.support.v4.app.Fragment; import com.funnyhatsoftware.spacedock.fragment.DetailsFragment; public class DetailsActivity extends SinglePaneActivity { private static final String EXTRA_TYPE = "browsetype"; private static final String EXTRA_ITEM = "displayitem"; public static Intent getIntent(Context context, String itemType, String itemId) { if (itemType == null || itemId == null) { throw new IllegalArgumentException(); } Intent intent = new Intent(context, DetailsActivity.class); intent.putExtra(EXTRA_TYPE, itemType); intent.putExtra(EXTRA_ITEM, itemId); return intent; } public Fragment getFragment() { String itemType = getIntent().getStringExtra(EXTRA_TYPE); String itemId = getIntent().getStringExtra(EXTRA_ITEM); return DetailsFragment.newInstance(itemType, itemId); } } ## Instruction: Fix expansion detail display on phones ## Code After: package com.funnyhatsoftware.spacedock.activity; import android.content.Context; import android.content.Intent; import android.support.v4.app.Fragment; import com.funnyhatsoftware.spacedock.fragment.DetailsFragment; import com.funnyhatsoftware.spacedock.holder.ExpansionHolder; public class DetailsActivity extends SinglePaneActivity { private static final String EXTRA_TYPE = "browsetype"; private static final String EXTRA_ITEM = "displayitem"; public static Intent getIntent(Context context, String itemType, String itemId) { if (itemType == null || itemId == null) { throw new IllegalArgumentException(); } if (itemType.equals(ExpansionHolder.TYPE_STRING)) { return ExpansionDetailsActivity.getIntent(context, itemId); } Intent intent = new Intent(context, DetailsActivity.class); intent.putExtra(EXTRA_TYPE, itemType); intent.putExtra(EXTRA_ITEM, itemId); return intent; } public Fragment getFragment() { String itemType = getIntent().getStringExtra(EXTRA_TYPE); String itemId = getIntent().getStringExtra(EXTRA_ITEM); return DetailsFragment.newInstance(itemType, itemId); } }
... import android.support.v4.app.Fragment; import com.funnyhatsoftware.spacedock.fragment.DetailsFragment; import com.funnyhatsoftware.spacedock.holder.ExpansionHolder; public class DetailsActivity extends SinglePaneActivity { private static final String EXTRA_TYPE = "browsetype"; ... public static Intent getIntent(Context context, String itemType, String itemId) { if (itemType == null || itemId == null) { throw new IllegalArgumentException(); } if (itemType.equals(ExpansionHolder.TYPE_STRING)) { return ExpansionDetailsActivity.getIntent(context, itemId); } Intent intent = new Intent(context, DetailsActivity.class); ...
e893a860f4a8ad9682f400507948ee20fce1c328
healthcheck/contrib/django/status_endpoint/views.py
healthcheck/contrib/django/status_endpoint/views.py
import json from django.conf import settings from django.views.decorators.http import require_http_methods from django.http import HttpResponse, HttpResponseServerError from healthcheck.healthcheck import ( DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker) @require_http_methods(['GET']) def status(request): checks = [] if getattr(settings, 'STATUS_CHECK_DBS', True): checks.append(DjangoDBsHealthCheck()) files_to_check = getattr( settings, 'STATUS_CHECK_FILES') if files_to_check: checks.append( FilesDontExistHealthCheck( files_to_check, check_id="quiesce file doesn't exist")) ok, details = HealthChecker(checks)() if not ok: return HttpResponseServerError((json.dumps(details))) return HttpResponse(json.dumps(details))
import json from django.conf import settings from django.views.decorators.http import require_http_methods from django.http import HttpResponse from healthcheck.healthcheck import ( DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker) class JsonResponse(HttpResponse): def __init__(self, data, **kwargs): kwargs.setdefault('content_type', 'application/json') data = json.dumps(data) super(JsonResponse, self).__init__(content=data, **kwargs) class JsonResponseServerError(JsonResponse): status_code = 500 @require_http_methods(['GET']) def status(request): checks = [] if getattr(settings, 'STATUS_CHECK_DBS', True): checks.append(DjangoDBsHealthCheck()) files_to_check = getattr(settings, 'STATUS_CHECK_FILES') if files_to_check: checks.append(FilesDontExistHealthCheck( files_to_check, check_id="quiesce file doesn't exist")) ok, details = HealthChecker(checks)() if not ok: return JsonResponseServerError(json.dumps(details)) return JsonResponse(details)
Fix content_type for JSON responses
Fix content_type for JSON responses
Python
mit
yola/healthcheck
python
## Code Before: import json from django.conf import settings from django.views.decorators.http import require_http_methods from django.http import HttpResponse, HttpResponseServerError from healthcheck.healthcheck import ( DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker) @require_http_methods(['GET']) def status(request): checks = [] if getattr(settings, 'STATUS_CHECK_DBS', True): checks.append(DjangoDBsHealthCheck()) files_to_check = getattr( settings, 'STATUS_CHECK_FILES') if files_to_check: checks.append( FilesDontExistHealthCheck( files_to_check, check_id="quiesce file doesn't exist")) ok, details = HealthChecker(checks)() if not ok: return HttpResponseServerError((json.dumps(details))) return HttpResponse(json.dumps(details)) ## Instruction: Fix content_type for JSON responses ## Code After: import json from django.conf import settings from django.views.decorators.http import require_http_methods from django.http import HttpResponse from healthcheck.healthcheck import ( DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker) class JsonResponse(HttpResponse): def __init__(self, data, **kwargs): kwargs.setdefault('content_type', 'application/json') data = json.dumps(data) super(JsonResponse, self).__init__(content=data, **kwargs) class JsonResponseServerError(JsonResponse): status_code = 500 @require_http_methods(['GET']) def status(request): checks = [] if getattr(settings, 'STATUS_CHECK_DBS', True): checks.append(DjangoDBsHealthCheck()) files_to_check = getattr(settings, 'STATUS_CHECK_FILES') if files_to_check: checks.append(FilesDontExistHealthCheck( files_to_check, check_id="quiesce file doesn't exist")) ok, details = HealthChecker(checks)() if not ok: return JsonResponseServerError(json.dumps(details)) return JsonResponse(details)
... from django.conf import settings from django.views.decorators.http import require_http_methods from django.http import HttpResponse from healthcheck.healthcheck import ( DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker) class JsonResponse(HttpResponse): def __init__(self, data, **kwargs): kwargs.setdefault('content_type', 'application/json') data = json.dumps(data) super(JsonResponse, self).__init__(content=data, **kwargs) class JsonResponseServerError(JsonResponse): status_code = 500 @require_http_methods(['GET']) ... if getattr(settings, 'STATUS_CHECK_DBS', True): checks.append(DjangoDBsHealthCheck()) files_to_check = getattr(settings, 'STATUS_CHECK_FILES') if files_to_check: checks.append(FilesDontExistHealthCheck( files_to_check, check_id="quiesce file doesn't exist")) ok, details = HealthChecker(checks)() if not ok: return JsonResponseServerError(json.dumps(details)) return JsonResponse(details) ...
c5946e378147f6d4d42c7a3e531388e6203f29e4
fantasyStocks/static/stockCleaner.py
fantasyStocks/static/stockCleaner.py
import json with open("stocks.json") as f:
from pprint import pprint import json import re REGEXP = re.compile("(?P<symbol>[A-Z]{1,4}).*") with open("stocks.json") as f: l = json.loads(f.read()) out = [] for i in l: if not "^" in i["symbol"]: out.append(i) with open("newStocks.json", "w") as w: w.write(json.dumps(out))
Write script to remove duplicates from stocks.json
Write script to remove duplicates from stocks.json
Python
apache-2.0
ddsnowboard/FantasyStocks,ddsnowboard/FantasyStocks,ddsnowboard/FantasyStocks
python
## Code Before: import json with open("stocks.json") as f: ## Instruction: Write script to remove duplicates from stocks.json ## Code After: from pprint import pprint import json import re REGEXP = re.compile("(?P<symbol>[A-Z]{1,4}).*") with open("stocks.json") as f: l = json.loads(f.read()) out = [] for i in l: if not "^" in i["symbol"]: out.append(i) with open("newStocks.json", "w") as w: w.write(json.dumps(out))
... from pprint import pprint import json import re REGEXP = re.compile("(?P<symbol>[A-Z]{1,4}).*") with open("stocks.json") as f: l = json.loads(f.read()) out = [] for i in l: if not "^" in i["symbol"]: out.append(i) with open("newStocks.json", "w") as w: w.write(json.dumps(out)) ...
98925a82dfb45a4c76496cd11af8d1483a678e6e
sigh/views/api.py
sigh/views/api.py
import json from functools import wraps from flask import Blueprint from flask import Response from ..models import Tag api_views = Blueprint('api', __name__, url_prefix='/api/') def jsonify(func): @wraps(func) def _(*args, **kwargs): result = func(*args, **kwargs) return Response(json.dumps(result), mimetype='application/json') return _ @api_views.route('tag/autocompletion/<q>') @jsonify def autocomplete_tag(q): tags = Tag.query.filter(Tag.searchable_name.ilike(u'%{}%'.format(q.lower()))).all() tags = [tag.to_dict('id_', 'display_name') for tag in tags] return tags
import json from functools import wraps from flask import Blueprint from flask import Response from ..models import Tag from ..models import User api_views = Blueprint('api', __name__, url_prefix='/api/') def jsonify(func): @wraps(func) def _(*args, **kwargs): result = func(*args, **kwargs) return Response(json.dumps(result), mimetype='application/json') return _ @api_views.route('tag/autocompletion/<q>') @jsonify def autocomplete_tag(q): tags = Tag.query.filter(Tag.searchable_name.ilike(u'%{}%'.format(q.lower()))).all() tags = [tag.to_dict('id_', 'display_name') for tag in tags] return tags @api_views.route('user/autocompletion/<q>') @jsonify def autocomplete_user(q): users = User.query.filter(User.username.ilike(u'%{}%'.format(q.lower()))).all() users = [user.to_dict('id_', 'name', 'username', 'avatar') for user in users] return users
Create a new API for User autocompletion
Create a new API for User autocompletion
Python
mit
kxxoling/Programmer-Sign,kxxoling/Programmer-Sign,kxxoling/Programmer-Sign
python
## Code Before: import json from functools import wraps from flask import Blueprint from flask import Response from ..models import Tag api_views = Blueprint('api', __name__, url_prefix='/api/') def jsonify(func): @wraps(func) def _(*args, **kwargs): result = func(*args, **kwargs) return Response(json.dumps(result), mimetype='application/json') return _ @api_views.route('tag/autocompletion/<q>') @jsonify def autocomplete_tag(q): tags = Tag.query.filter(Tag.searchable_name.ilike(u'%{}%'.format(q.lower()))).all() tags = [tag.to_dict('id_', 'display_name') for tag in tags] return tags ## Instruction: Create a new API for User autocompletion ## Code After: import json from functools import wraps from flask import Blueprint from flask import Response from ..models import Tag from ..models import User api_views = Blueprint('api', __name__, url_prefix='/api/') def jsonify(func): @wraps(func) def _(*args, **kwargs): result = func(*args, **kwargs) return Response(json.dumps(result), mimetype='application/json') return _ @api_views.route('tag/autocompletion/<q>') @jsonify def autocomplete_tag(q): tags = Tag.query.filter(Tag.searchable_name.ilike(u'%{}%'.format(q.lower()))).all() tags = [tag.to_dict('id_', 'display_name') for tag in tags] return tags @api_views.route('user/autocompletion/<q>') @jsonify def autocomplete_user(q): users = User.query.filter(User.username.ilike(u'%{}%'.format(q.lower()))).all() users = [user.to_dict('id_', 'name', 'username', 'avatar') for user in users] return users
# ... existing code ... from flask import Response from ..models import Tag from ..models import User api_views = Blueprint('api', __name__, url_prefix='/api/') # ... modified code ... tags = Tag.query.filter(Tag.searchable_name.ilike(u'%{}%'.format(q.lower()))).all() tags = [tag.to_dict('id_', 'display_name') for tag in tags] return tags @api_views.route('user/autocompletion/<q>') @jsonify def autocomplete_user(q): users = User.query.filter(User.username.ilike(u'%{}%'.format(q.lower()))).all() users = [user.to_dict('id_', 'name', 'username', 'avatar') for user in users] return users # ... rest of the code ...
d05c44ffd8bf5b61f9ab160323170cca42240346
samples/copy/build.gradle.kts
samples/copy/build.gradle.kts
import org.gradle.api.file.* import org.gradle.api.tasks.* import org.apache.tools.ant.filters.* //for including in the copy task val dataContent = copySpec { it.from("src/data") it.include("*.data") } task<Copy>("initConfig") { from("src/main/config").let { it as CopySpec }.apply { include("**/*.properties") include("**/*.xml") filter<ReplaceTokens>( "tokens" to mapOf("version" to "2.3.1")) } from("src/main/languages").let { it as CopySpec }.apply { rename("EN_US_(.*)", "$1") } into("build/target/config") exclude("**/*.bak") includeEmptyDirs = false with(dataContent) } task<Delete>("clean") { delete(buildDir) }
import org.gradle.api.tasks.* import org.apache.tools.ant.filters.* //for including in the copy task val dataContent = copySpec { it.from("src/data") it.include("*.data") } task<Copy>("initConfig") { from("src/main/config") { it.include("**/*.properties") it.include("**/*.xml") it.filter<ReplaceTokens>( "tokens" to mapOf("version" to "2.3.1")) } from("src/main/languages") { it.rename("EN_US_(.*)", "$1") } into("build/target/config") exclude("**/*.bak") includeEmptyDirs = false with(dataContent) } task<Delete>("clean") { delete(buildDir) }
Revert "Fix copy sample to work against latest Gradle API"
Revert "Fix copy sample to work against latest Gradle API" This reverts commit d49facf0025572e3a66987c48af4ed8914aeeb88 since the motivating changes in the Gradle API were also reverted.
Kotlin
apache-2.0
gradle/gradle,gradle/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,blindpirate/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,blindpirate/gradle,blindpirate/gradle,gradle/gradle,blindpirate/gradle,robinverduijn/gradle,gradle/gradle,blindpirate/gradle,gradle/gradle-script-kotlin,robinverduijn/gradle,blindpirate/gradle,gradle/gradle-script-kotlin,robinverduijn/gradle,gradle/gradle
kotlin
## Code Before: import org.gradle.api.file.* import org.gradle.api.tasks.* import org.apache.tools.ant.filters.* //for including in the copy task val dataContent = copySpec { it.from("src/data") it.include("*.data") } task<Copy>("initConfig") { from("src/main/config").let { it as CopySpec }.apply { include("**/*.properties") include("**/*.xml") filter<ReplaceTokens>( "tokens" to mapOf("version" to "2.3.1")) } from("src/main/languages").let { it as CopySpec }.apply { rename("EN_US_(.*)", "$1") } into("build/target/config") exclude("**/*.bak") includeEmptyDirs = false with(dataContent) } task<Delete>("clean") { delete(buildDir) } ## Instruction: Revert "Fix copy sample to work against latest Gradle API" This reverts commit d49facf0025572e3a66987c48af4ed8914aeeb88 since the motivating changes in the Gradle API were also reverted. ## Code After: import org.gradle.api.tasks.* import org.apache.tools.ant.filters.* //for including in the copy task val dataContent = copySpec { it.from("src/data") it.include("*.data") } task<Copy>("initConfig") { from("src/main/config") { it.include("**/*.properties") it.include("**/*.xml") it.filter<ReplaceTokens>( "tokens" to mapOf("version" to "2.3.1")) } from("src/main/languages") { it.rename("EN_US_(.*)", "$1") } into("build/target/config") exclude("**/*.bak") includeEmptyDirs = false with(dataContent) } task<Delete>("clean") { delete(buildDir) }
# ... existing code ... import org.gradle.api.tasks.* import org.apache.tools.ant.filters.* # ... modified code ... task<Copy>("initConfig") { from("src/main/config") { it.include("**/*.properties") it.include("**/*.xml") it.filter<ReplaceTokens>( "tokens" to mapOf("version" to "2.3.1")) } from("src/main/languages") { it.rename("EN_US_(.*)", "$1") } into("build/target/config") # ... rest of the code ...
a2c92c0be31e1d7a31625878e7bc68e23930224c
loop.py
loop.py
import speech import sys inputs = ["hi", "foo", "lemon", "hello world"] output = [] current_run = [] def callback(phrase, listener): speech.say(phrase) if phrase == "turn off": speech.say("Goodbye.") listener.stoplistening() sys.exit() print "Anything you type, speech will say back." print "Anything you say, speech will print out." print "Say or type 'turn off' to quit." print listener = speech.listenforanything(callback) while listener.islistening(): for i in range(0, len(inputs)): speech.say(str(inputs[i])) text = raw_input("> ") if text == "turn off": listener.stoplistening() sys.exit() else: speech.say(text)
import speech import sys import time lemon = "lemon" output = [] current_run = [] waiting = False hasDetect = False print "Say something." def callback(phrase, listener): speech.say(phrase) print phrase lemon = str(phrase) print lemon hasDetect = True waiting = False listener = speech.listenforanything(callback) while listener.islistening(): if not waiting and not hasDetect: waiting = True speech.say(lemon)
Change lemon if speech is detected
Change lemon if speech is detected If speech is detected, change lemon to whatever was detected. Also print it.
Python
mit
powderblock/SpeechLooper
python
## Code Before: import speech import sys inputs = ["hi", "foo", "lemon", "hello world"] output = [] current_run = [] def callback(phrase, listener): speech.say(phrase) if phrase == "turn off": speech.say("Goodbye.") listener.stoplistening() sys.exit() print "Anything you type, speech will say back." print "Anything you say, speech will print out." print "Say or type 'turn off' to quit." print listener = speech.listenforanything(callback) while listener.islistening(): for i in range(0, len(inputs)): speech.say(str(inputs[i])) text = raw_input("> ") if text == "turn off": listener.stoplistening() sys.exit() else: speech.say(text) ## Instruction: Change lemon if speech is detected If speech is detected, change lemon to whatever was detected. Also print it. ## Code After: import speech import sys import time lemon = "lemon" output = [] current_run = [] waiting = False hasDetect = False print "Say something." def callback(phrase, listener): speech.say(phrase) print phrase lemon = str(phrase) print lemon hasDetect = True waiting = False listener = speech.listenforanything(callback) while listener.islistening(): if not waiting and not hasDetect: waiting = True speech.say(lemon)
# ... existing code ... import speech import sys import time lemon = "lemon" output = [] current_run = [] waiting = False hasDetect = False print "Say something." def callback(phrase, listener): speech.say(phrase) print phrase lemon = str(phrase) print lemon hasDetect = True waiting = False listener = speech.listenforanything(callback) while listener.islistening(): if not waiting and not hasDetect: waiting = True speech.say(lemon) # ... rest of the code ...
b8ccf5a8022a002571a40900e6f725a0c8b0d860
src/udon2xml.c
src/udon2xml.c
int main (int argc, char *argv[]) { int i; int found = 0; pstate *state = init_from_file("../sjson-examples/big.txt"); for(i=0; i<10000; i++) { found += parse(state); reset_state(state); } free_state(state); printf("%d\n", found); }
int main (int argc, char *argv[]) { int i; int found = 0; if(argc < 2) return 1; pstate *state = init_from_file(argv[1]); for(i=0; i<10000; i++) { found += parse(state); reset_state(state); } free_state(state); printf("%d\n", found); }
Remove hardcoded file path. Still awkward and not generally usable, but _more_ general.
Remove hardcoded file path. Still awkward and not generally usable, but _more_ general.
C
mit
josephwecker/udon-c,josephwecker/udon-c,josephwecker/udon-c,josephwecker/udon-c
c
## Code Before: int main (int argc, char *argv[]) { int i; int found = 0; pstate *state = init_from_file("../sjson-examples/big.txt"); for(i=0; i<10000; i++) { found += parse(state); reset_state(state); } free_state(state); printf("%d\n", found); } ## Instruction: Remove hardcoded file path. Still awkward and not generally usable, but _more_ general. ## Code After: int main (int argc, char *argv[]) { int i; int found = 0; if(argc < 2) return 1; pstate *state = init_from_file(argv[1]); for(i=0; i<10000; i++) { found += parse(state); reset_state(state); } free_state(state); printf("%d\n", found); }
# ... existing code ... int main (int argc, char *argv[]) { int i; int found = 0; if(argc < 2) return 1; pstate *state = init_from_file(argv[1]); for(i=0; i<10000; i++) { found += parse(state); reset_state(state); # ... rest of the code ...
c5d2682b8fa25e5234b8452c861e609225e34c6f
src/test/java/guitests/RedoCommandTest.java
src/test/java/guitests/RedoCommandTest.java
package guitests; public class RedoCommandTest extends TaskManagerGuiTest { }
package guitests; import org.junit.Test; import seedu.address.logic.commands.UndoCommand; import seedu.address.testutil.TestTask; import seedu.address.testutil.TestUtil; public class RedoCommandTest extends TaskManagerGuiTest { TestTask[] expectedList = td.getTypicalTasks(); TestTask[] currentList = expectedList; public void redo() { redoAdd(); } /** * Tries to redo an add command */ @Test public void redoAdd() { TestTask taskToAdd = td.alice; commandBox.runCommand(taskToAdd.getAddCommand()); commandBox.runCommand(UndoCommand.COMMAND_WORD); expectedList = TestUtil.addTasksToList(expectedList, taskToAdd); assertRedoSuccess(currentList, expectedList); } }
Test case for redo add command
Test case for redo add command
Java
mit
CS2103JAN2017-W13-B2/main,CS2103JAN2017-W13-B2/main
java
## Code Before: package guitests; public class RedoCommandTest extends TaskManagerGuiTest { } ## Instruction: Test case for redo add command ## Code After: package guitests; import org.junit.Test; import seedu.address.logic.commands.UndoCommand; import seedu.address.testutil.TestTask; import seedu.address.testutil.TestUtil; public class RedoCommandTest extends TaskManagerGuiTest { TestTask[] expectedList = td.getTypicalTasks(); TestTask[] currentList = expectedList; public void redo() { redoAdd(); } /** * Tries to redo an add command */ @Test public void redoAdd() { TestTask taskToAdd = td.alice; commandBox.runCommand(taskToAdd.getAddCommand()); commandBox.runCommand(UndoCommand.COMMAND_WORD); expectedList = TestUtil.addTasksToList(expectedList, taskToAdd); assertRedoSuccess(currentList, expectedList); } }
# ... existing code ... package guitests; import org.junit.Test; import seedu.address.logic.commands.UndoCommand; import seedu.address.testutil.TestTask; import seedu.address.testutil.TestUtil; public class RedoCommandTest extends TaskManagerGuiTest { TestTask[] expectedList = td.getTypicalTasks(); TestTask[] currentList = expectedList; public void redo() { redoAdd(); } /** * Tries to redo an add command */ @Test public void redoAdd() { TestTask taskToAdd = td.alice; commandBox.runCommand(taskToAdd.getAddCommand()); commandBox.runCommand(UndoCommand.COMMAND_WORD); expectedList = TestUtil.addTasksToList(expectedList, taskToAdd); assertRedoSuccess(currentList, expectedList); } } # ... rest of the code ...
b89e210f95b8f41efa8019ee66d6449b7242d56f
tikplay/audio.py
tikplay/audio.py
import json import logging import pysoundcard import pysoundfile from tikplay.database import interface class API(): """ Implements the audio parsing interface for tikplay. Parses song metadata, handles database updating, and pushes the audio to soundcard Also implements basic song metadata fetching from the database """ def __init__(self, di=interface.DatabaseInterface): self.di = di() self.logger = logging.getLogger('AudioAPI') def play(self, song_hash): """ Play a song or add it to queue if a song is already playing Keyword arguments: song_hash: ... Return: true if started playing, false if added to queue """ soundcard = True for dev in list(pysoundcard.devices()): if '(hw:0,0)' in dev['name']: soundcard = dev break stream = pysoundcard.Stream(output_device=soundcard) soundfile = pysoundfile.SoundFile(song_hash) channels = soundfile.channels sample_rate = soundfile.sample_rate stream.output_channels = channels stream.start() stream.write(soundfile[:]) stream.end() def now_playing(self, queue_length=1): """ Shows the now playing or the queue if queue_length is defined Keyword arguments: queue_length (optional): integer stating the length of queue to return. Default: 1. Return: the song that is now playing in the format ("Artist - Title"[, "Artist - Title", ...]) or None if empty """ return None
import json import logging from pyglet import media from tikplay.database import interface class API(): """ Implements the audio parsing interface for tikplay. Parses song metadata, handles database updating, and pushes the audio to soundcard Also implements basic song metadata fetching from the database """ def __init__(self, di=interface.DatabaseInterface): self.player = media.Player() self.di = di() self.logger = logging.getLogger('AudioAPI') def play(self, song_hash): """ Play a song or add it to queue if a song is already playing Keyword arguments: song_hash: ... Return: true if started playing, false if added to queue """ # if cache: load audio metadata from cache # else: check that song_hash is actually a filename for an existing file audio_file = media.load(song_hash) self.player.queue(audio_file) if not self.player.playing: self.player.play() def next(self): self.player.next_source() def pause(self): self.player.pause() def resume(self): self.player.resume() def kill(self): while self.player.playing: self.player.next_source() def now_playing(self, queue_length=1): """ Shows the now playing or the queue if queue_length is defined Keyword arguments: queue_length (optional): integer stating the length of queue to return. Default: 1. Return: the song that is now playing in the format [(Artist, Title), (Artist, Title), ...) or None if empty """ src = self.player.source return [(src.info.author, src.info.title)]
Change pysoundcard and pysoundfile to pyglet
Change pysoundcard and pysoundfile to pyglet
Python
mit
tietokilta-saato/tikplay,tietokilta-saato/tikplay,tietokilta-saato/tikplay,tietokilta-saato/tikplay
python
## Code Before: import json import logging import pysoundcard import pysoundfile from tikplay.database import interface class API(): """ Implements the audio parsing interface for tikplay. Parses song metadata, handles database updating, and pushes the audio to soundcard Also implements basic song metadata fetching from the database """ def __init__(self, di=interface.DatabaseInterface): self.di = di() self.logger = logging.getLogger('AudioAPI') def play(self, song_hash): """ Play a song or add it to queue if a song is already playing Keyword arguments: song_hash: ... Return: true if started playing, false if added to queue """ soundcard = True for dev in list(pysoundcard.devices()): if '(hw:0,0)' in dev['name']: soundcard = dev break stream = pysoundcard.Stream(output_device=soundcard) soundfile = pysoundfile.SoundFile(song_hash) channels = soundfile.channels sample_rate = soundfile.sample_rate stream.output_channels = channels stream.start() stream.write(soundfile[:]) stream.end() def now_playing(self, queue_length=1): """ Shows the now playing or the queue if queue_length is defined Keyword arguments: queue_length (optional): integer stating the length of queue to return. Default: 1. Return: the song that is now playing in the format ("Artist - Title"[, "Artist - Title", ...]) or None if empty """ return None ## Instruction: Change pysoundcard and pysoundfile to pyglet ## Code After: import json import logging from pyglet import media from tikplay.database import interface class API(): """ Implements the audio parsing interface for tikplay. Parses song metadata, handles database updating, and pushes the audio to soundcard Also implements basic song metadata fetching from the database """ def __init__(self, di=interface.DatabaseInterface): self.player = media.Player() self.di = di() self.logger = logging.getLogger('AudioAPI') def play(self, song_hash): """ Play a song or add it to queue if a song is already playing Keyword arguments: song_hash: ... Return: true if started playing, false if added to queue """ # if cache: load audio metadata from cache # else: check that song_hash is actually a filename for an existing file audio_file = media.load(song_hash) self.player.queue(audio_file) if not self.player.playing: self.player.play() def next(self): self.player.next_source() def pause(self): self.player.pause() def resume(self): self.player.resume() def kill(self): while self.player.playing: self.player.next_source() def now_playing(self, queue_length=1): """ Shows the now playing or the queue if queue_length is defined Keyword arguments: queue_length (optional): integer stating the length of queue to return. Default: 1. Return: the song that is now playing in the format [(Artist, Title), (Artist, Title), ...) or None if empty """ src = self.player.source return [(src.info.author, src.info.title)]
// ... existing code ... import json import logging from pyglet import media from tikplay.database import interface // ... modified code ... Also implements basic song metadata fetching from the database """ def __init__(self, di=interface.DatabaseInterface): self.player = media.Player() self.di = di() self.logger = logging.getLogger('AudioAPI') ... Return: true if started playing, false if added to queue """ # if cache: load audio metadata from cache # else: check that song_hash is actually a filename for an existing file audio_file = media.load(song_hash) self.player.queue(audio_file) if not self.player.playing: self.player.play() def next(self): self.player.next_source() def pause(self): self.player.pause() def resume(self): self.player.resume() def kill(self): while self.player.playing: self.player.next_source() def now_playing(self, queue_length=1): """ Shows the now playing or the queue if queue_length is defined ... queue_length (optional): integer stating the length of queue to return. Default: 1. Return: the song that is now playing in the format [(Artist, Title), (Artist, Title), ...) or None if empty """ src = self.player.source return [(src.info.author, src.info.title)] // ... rest of the code ...
fd1f5bb3bf922fcfd5afdb5d6a0faced6eb995b3
include/Genes/Pawn_Structure_Gene.h
include/Genes/Pawn_Structure_Gene.h
class Board; class Pawn_Structure_Gene : public Clonable_Gene<Pawn_Structure_Gene> { public: Pawn_Structure_Gene() noexcept; std::string name() const noexcept override; double score_board(const Board& board, Piece_Color perspective, size_t depth, double game_progress) const noexcept override; private: double opening_guarded_by_pawn = 1.0; double opening_guarded_by_pawn_in_one_move = 1.0; double opening_guarded_by_piece = 1.0; double endgame_guarded_by_pawn = 1.0; double endgame_guarded_by_pawn_in_one_move = 1.0; double endgame_guarded_by_piece = 1.0; void gene_specific_mutation() noexcept override; void adjust_properties(std::map<std::string, double>& properties) const noexcept override; void load_gene_properties(const std::map<std::string, double>& properties) override; void normalize_guard_scores() noexcept; }; #endif // PAWN_STRUCTURE_GENE_H
class Board; //! \brief A gene to evaluate how well pawns are protected. class Pawn_Structure_Gene : public Clonable_Gene<Pawn_Structure_Gene> { public: Pawn_Structure_Gene() noexcept; std::string name() const noexcept override; private: double opening_guarded_by_pawn = 1.0; double opening_guarded_by_pawn_in_one_move = 1.0; double opening_guarded_by_piece = 1.0; double endgame_guarded_by_pawn = 1.0; double endgame_guarded_by_pawn_in_one_move = 1.0; double endgame_guarded_by_piece = 1.0; double score_board(const Board& board, Piece_Color perspective, size_t depth, double game_progress) const noexcept override; void gene_specific_mutation() noexcept override; void adjust_properties(std::map<std::string, double>& properties) const noexcept override; void load_gene_properties(const std::map<std::string, double>& properties) override; void normalize_guard_scores() noexcept; }; #endif // PAWN_STRUCTURE_GENE_H
Fix public/private members and doxygen comments
Fix public/private members and doxygen comments
C
mit
MarkZH/Genetic_Chess,MarkZH/Genetic_Chess,MarkZH/Genetic_Chess,MarkZH/Genetic_Chess,MarkZH/Genetic_Chess
c
## Code Before: class Board; class Pawn_Structure_Gene : public Clonable_Gene<Pawn_Structure_Gene> { public: Pawn_Structure_Gene() noexcept; std::string name() const noexcept override; double score_board(const Board& board, Piece_Color perspective, size_t depth, double game_progress) const noexcept override; private: double opening_guarded_by_pawn = 1.0; double opening_guarded_by_pawn_in_one_move = 1.0; double opening_guarded_by_piece = 1.0; double endgame_guarded_by_pawn = 1.0; double endgame_guarded_by_pawn_in_one_move = 1.0; double endgame_guarded_by_piece = 1.0; void gene_specific_mutation() noexcept override; void adjust_properties(std::map<std::string, double>& properties) const noexcept override; void load_gene_properties(const std::map<std::string, double>& properties) override; void normalize_guard_scores() noexcept; }; #endif // PAWN_STRUCTURE_GENE_H ## Instruction: Fix public/private members and doxygen comments ## Code After: class Board; //! \brief A gene to evaluate how well pawns are protected. class Pawn_Structure_Gene : public Clonable_Gene<Pawn_Structure_Gene> { public: Pawn_Structure_Gene() noexcept; std::string name() const noexcept override; private: double opening_guarded_by_pawn = 1.0; double opening_guarded_by_pawn_in_one_move = 1.0; double opening_guarded_by_piece = 1.0; double endgame_guarded_by_pawn = 1.0; double endgame_guarded_by_pawn_in_one_move = 1.0; double endgame_guarded_by_piece = 1.0; double score_board(const Board& board, Piece_Color perspective, size_t depth, double game_progress) const noexcept override; void gene_specific_mutation() noexcept override; void adjust_properties(std::map<std::string, double>& properties) const noexcept override; void load_gene_properties(const std::map<std::string, double>& properties) override; void normalize_guard_scores() noexcept; }; #endif // PAWN_STRUCTURE_GENE_H
... class Board; //! \brief A gene to evaluate how well pawns are protected. class Pawn_Structure_Gene : public Clonable_Gene<Pawn_Structure_Gene> { public: Pawn_Structure_Gene() noexcept; std::string name() const noexcept override; private: double opening_guarded_by_pawn = 1.0; ... double endgame_guarded_by_pawn_in_one_move = 1.0; double endgame_guarded_by_piece = 1.0; double score_board(const Board& board, Piece_Color perspective, size_t depth, double game_progress) const noexcept override; void gene_specific_mutation() noexcept override; void adjust_properties(std::map<std::string, double>& properties) const noexcept override; void load_gene_properties(const std::map<std::string, double>& properties) override; ...
4b03f4e9613e875794e7cc90d5fc6657ded2672d
src/main/java/org/odindelrio/dddaopentitybehaviour/domain/Follower.java
src/main/java/org/odindelrio/dddaopentitybehaviour/domain/Follower.java
package org.odindelrio.dddaopentitybehaviour.domain; import org.odindelrio.dddaopentitybehaviour.domain.exception.FollowerCantFollowHimSelfException; public class Follower { private final String followerId; public FollowersRepository followersRepository; public Follower(String followerId) { this.followerId = followerId; } public String getFollowerId() { return followerId; } public void follow(Followable followable) { if (followable.getFollowableId().equals(followerId)) { throw new FollowerCantFollowHimSelfException(); } System.out.println("INSIDE Follower.follow() method"); this.followersRepository.persistFollower(this, followable); } }
package org.odindelrio.dddaopentitybehaviour.domain; import org.odindelrio.dddaopentitybehaviour.domain.exception.FollowerCantFollowHimSelfException; public class Follower { private final String followerId; public transient FollowersRepository followersRepository; public Follower(String followerId) { this.followerId = followerId; } public String getFollowerId() { return followerId; } public void follow(Followable followable) { if (followable.getFollowableId().equals(followerId)) { throw new FollowerCantFollowHimSelfException(); } System.out.println("INSIDE Follower.follow() method"); this.followersRepository.persistFollower(this, followable); } }
Make entity dependency transient, telling serializers that repository shouldn't be serialized.
Make entity dependency transient, telling serializers that repository shouldn't be serialized.
Java
apache-2.0
odin-delrio/aop-for-entity-behaviour,odin-delrio/aop-for-entity-behaviour
java
## Code Before: package org.odindelrio.dddaopentitybehaviour.domain; import org.odindelrio.dddaopentitybehaviour.domain.exception.FollowerCantFollowHimSelfException; public class Follower { private final String followerId; public FollowersRepository followersRepository; public Follower(String followerId) { this.followerId = followerId; } public String getFollowerId() { return followerId; } public void follow(Followable followable) { if (followable.getFollowableId().equals(followerId)) { throw new FollowerCantFollowHimSelfException(); } System.out.println("INSIDE Follower.follow() method"); this.followersRepository.persistFollower(this, followable); } } ## Instruction: Make entity dependency transient, telling serializers that repository shouldn't be serialized. ## Code After: package org.odindelrio.dddaopentitybehaviour.domain; import org.odindelrio.dddaopentitybehaviour.domain.exception.FollowerCantFollowHimSelfException; public class Follower { private final String followerId; public transient FollowersRepository followersRepository; public Follower(String followerId) { this.followerId = followerId; } public String getFollowerId() { return followerId; } public void follow(Followable followable) { if (followable.getFollowableId().equals(followerId)) { throw new FollowerCantFollowHimSelfException(); } System.out.println("INSIDE Follower.follow() method"); this.followersRepository.persistFollower(this, followable); } }
// ... existing code ... public class Follower { private final String followerId; public transient FollowersRepository followersRepository; public Follower(String followerId) { this.followerId = followerId; // ... rest of the code ...
15c58fb05a9bfb06b87d8d00a1b26d50ee68c1f7
django/publicmapping/redistricting/management/commands/makelanguagefiles.py
django/publicmapping/redistricting/management/commands/makelanguagefiles.py
from django.core.management.base import BaseCommand from redistricting.utils import * class Command(BaseCommand): """ This command prints creates and compiles language message files """ args = None help = 'Create and compile language message files' def handle(self, *args, **options): """ Create and compile language message files """ # Make messages for each language defined in settings for language in settings.LANGUAGES: management.call_command('makemessages', locale=language[0], interactive=False) # Compile all message files management.call_command('compilemessages', interactive=False)
from django.core.management.base import BaseCommand from redistricting.utils import * class Command(BaseCommand): """ This command prints creates and compiles language message files """ args = None help = 'Create and compile language message files' def handle(self, *args, **options): """ Create and compile language message files """ # Make messages for each language defined in settings for language in settings.LANGUAGES: # For django templates management.call_command('makemessages', locale=language[0], interactive=False) # For javascript files management.call_command('makemessages', domain='djangojs', locale=language[0], interactive=False) # Compile all message files management.call_command('compilemessages', interactive=False)
Add creation of js message files to management command
Add creation of js message files to management command
Python
apache-2.0
JimCallahanOrlando/DistrictBuilder,JimCallahanOrlando/DistrictBuilder,JimCallahanOrlando/DistrictBuilder,JimCallahanOrlando/DistrictBuilder
python
## Code Before: from django.core.management.base import BaseCommand from redistricting.utils import * class Command(BaseCommand): """ This command prints creates and compiles language message files """ args = None help = 'Create and compile language message files' def handle(self, *args, **options): """ Create and compile language message files """ # Make messages for each language defined in settings for language in settings.LANGUAGES: management.call_command('makemessages', locale=language[0], interactive=False) # Compile all message files management.call_command('compilemessages', interactive=False) ## Instruction: Add creation of js message files to management command ## Code After: from django.core.management.base import BaseCommand from redistricting.utils import * class Command(BaseCommand): """ This command prints creates and compiles language message files """ args = None help = 'Create and compile language message files' def handle(self, *args, **options): """ Create and compile language message files """ # Make messages for each language defined in settings for language in settings.LANGUAGES: # For django templates management.call_command('makemessages', locale=language[0], interactive=False) # For javascript files management.call_command('makemessages', domain='djangojs', locale=language[0], interactive=False) # Compile all message files management.call_command('compilemessages', interactive=False)
# ... existing code ... """ # Make messages for each language defined in settings for language in settings.LANGUAGES: # For django templates management.call_command('makemessages', locale=language[0], interactive=False) # For javascript files management.call_command('makemessages', domain='djangojs', locale=language[0], interactive=False) # Compile all message files management.call_command('compilemessages', interactive=False) # ... rest of the code ...
4f4ba39bf2d270ef1cb34afe1a5ebe7816d448b7
manage.py
manage.py
from werkzeug import script def make_app(): from cadorsfeed.application import CadorsFeed return CadorsFeed() def make_shell(): from cadorsfeed import utils application = make_app() return locals() action_runserver = script.make_runserver(make_app, use_reloader=True) action_shell = script.make_shell(make_shell) script.run()
from werkzeug import script def make_app(): from cadorsfeed.application import CadorsFeed return CadorsFeed() def make_shell(): from cadorsfeed import utils application = make_app() return locals() action_runserver = script.make_runserver(make_app, use_reloader=True, hostname='') action_shell = script.make_shell(make_shell) script.run()
Set hostname to '' so the server binds to all interfaces.
Set hostname to '' so the server binds to all interfaces.
Python
mit
kurtraschke/cadors-parse,kurtraschke/cadors-parse
python
## Code Before: from werkzeug import script def make_app(): from cadorsfeed.application import CadorsFeed return CadorsFeed() def make_shell(): from cadorsfeed import utils application = make_app() return locals() action_runserver = script.make_runserver(make_app, use_reloader=True) action_shell = script.make_shell(make_shell) script.run() ## Instruction: Set hostname to '' so the server binds to all interfaces. ## Code After: from werkzeug import script def make_app(): from cadorsfeed.application import CadorsFeed return CadorsFeed() def make_shell(): from cadorsfeed import utils application = make_app() return locals() action_runserver = script.make_runserver(make_app, use_reloader=True, hostname='') action_shell = script.make_shell(make_shell) script.run()
... application = make_app() return locals() action_runserver = script.make_runserver(make_app, use_reloader=True, hostname='') action_shell = script.make_shell(make_shell) script.run() ...
7f51f153f0fd1fd1dde06808879911897686f819
cities/Sample_City.py
cities/Sample_City.py
from bs4 import BeautifulSoup import json import datetime import pytz # The URL for the page where the parking lots are listed data_url = "http://example.com" # Name of the city, just in case it contains umlauts which this filename shouldn't city_name = "Sample City" # Name of this file (without '.py'), sorry for needing this, but it makes things easier file_name = "Sample_City" def parse_html(html): soup = BeautifulSoup(html) # Do everything necessary to scrape the contents of the html # into a dictionary of the format specified by the schema. def get_geodata_for_lot(lot_name): geofile = open("./cities/" + file_name + ".geojson") geodata = geofile.read() geofile.close() geodata = json.loads(geodata) for feature in geodata["features"]: if feature["properties"]["name"] == lot_name: return { "lon": feature["geometry"]["coordinates"][0], "lat": feature["geometry"]["coordinates"][1] } return [] if __name__ == "__main__": file = open("../tests/sample_city.html") html_data = file.read() file.close() parse_html(html_data)
from bs4 import BeautifulSoup import datetime import pytz from geodata import GeoData # The URL for the page where the parking lots are listed data_url = "http://example.com" # Name of the city, just in case it contains umlauts which this filename shouldn't city_name = "Sample City" # Name of this file (without '.py'), sorry for needing this, but it makes things easier file_name = "Sample_City" # Uncomment the following line if there's geodata in the format of Sample_City.geodata in this directory # geodata = GeoData(city_name) def parse_html(html): soup = BeautifulSoup(html) # Do everything necessary to scrape the contents of the html # into a dictionary of the format specified by the schema. data = { "last_updated": "", "lots": [] } print(data) return data # the following is for testing this out, just delete it all when done if __name__ == "__main__": with open("../tests/sample_city.html") as f: parse_html(f.read())
Clean up sample city file
Clean up sample city file
Python
mit
offenesdresden/ParkAPI,Mic92/ParkAPI,offenesdresden/ParkAPI,Mic92/ParkAPI
python
## Code Before: from bs4 import BeautifulSoup import json import datetime import pytz # The URL for the page where the parking lots are listed data_url = "http://example.com" # Name of the city, just in case it contains umlauts which this filename shouldn't city_name = "Sample City" # Name of this file (without '.py'), sorry for needing this, but it makes things easier file_name = "Sample_City" def parse_html(html): soup = BeautifulSoup(html) # Do everything necessary to scrape the contents of the html # into a dictionary of the format specified by the schema. def get_geodata_for_lot(lot_name): geofile = open("./cities/" + file_name + ".geojson") geodata = geofile.read() geofile.close() geodata = json.loads(geodata) for feature in geodata["features"]: if feature["properties"]["name"] == lot_name: return { "lon": feature["geometry"]["coordinates"][0], "lat": feature["geometry"]["coordinates"][1] } return [] if __name__ == "__main__": file = open("../tests/sample_city.html") html_data = file.read() file.close() parse_html(html_data) ## Instruction: Clean up sample city file ## Code After: from bs4 import BeautifulSoup import datetime import pytz from geodata import GeoData # The URL for the page where the parking lots are listed data_url = "http://example.com" # Name of the city, just in case it contains umlauts which this filename shouldn't city_name = "Sample City" # Name of this file (without '.py'), sorry for needing this, but it makes things easier file_name = "Sample_City" # Uncomment the following line if there's geodata in the format of Sample_City.geodata in this directory # geodata = GeoData(city_name) def parse_html(html): soup = BeautifulSoup(html) # Do everything necessary to scrape the contents of the html # into a dictionary of the format specified by the schema. data = { "last_updated": "", "lots": [] } print(data) return data # the following is for testing this out, just delete it all when done if __name__ == "__main__": with open("../tests/sample_city.html") as f: parse_html(f.read())
... from bs4 import BeautifulSoup import datetime import pytz from geodata import GeoData # The URL for the page where the parking lots are listed data_url = "http://example.com" ... # Name of this file (without '.py'), sorry for needing this, but it makes things easier file_name = "Sample_City" # Uncomment the following line if there's geodata in the format of Sample_City.geodata in this directory # geodata = GeoData(city_name) def parse_html(html): soup = BeautifulSoup(html) ... # Do everything necessary to scrape the contents of the html # into a dictionary of the format specified by the schema. data = { "last_updated": "", "lots": [] } print(data) return data # the following is for testing this out, just delete it all when done if __name__ == "__main__": with open("../tests/sample_city.html") as f: parse_html(f.read()) ...
791d021497e1e4169383b28e6e1c8ab806645616
plugins/stats-collector/src/com/intellij/stats/experiment/EmulatedExperiment.kt
plugins/stats-collector/src/com/intellij/stats/experiment/EmulatedExperiment.kt
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.stats.experiment import com.intellij.internal.statistic.DeviceIdManager import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.util.registry.Registry /* * For now, we decide about AB experiment inside IDE using user id and salt */ class EmulatedExperiment { companion object { const val GROUP_A_EXPERIMENT_VERSION: Int = 7 const val GROUP_B_EXPERIMENT_VERSION: Int = 8 const val IS_ENABLED = true fun shouldRank(experimentVersion: Int): Boolean { return experimentVersion == GROUP_B_EXPERIMENT_VERSION && !Registry.`is`("completion.stats.exit.experiment") } } fun emulate(experimentVersion: Int, performExperiment: Boolean, salt: String): Int? { val application = ApplicationManager.getApplication() if (!application.isEAP || application.isUnitTestMode || experimentVersion != 2 || performExperiment || !IS_ENABLED) { return null } val userId = DeviceIdManager.getOrGenerateId() val hash = (userId + salt).hashCode() % 16 return when (hash) { 3 -> GROUP_A_EXPERIMENT_VERSION 4 -> GROUP_B_EXPERIMENT_VERSION else -> null } } }
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.stats.experiment import com.intellij.internal.statistic.DeviceIdManager import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.util.registry.Registry import kotlin.math.abs /* * For now, we decide about AB experiment inside IDE using user id and salt */ class EmulatedExperiment { companion object { const val GROUP_A_EXPERIMENT_VERSION: Int = 7 const val GROUP_B_EXPERIMENT_VERSION: Int = 8 const val IS_ENABLED = true fun shouldRank(experimentVersion: Int): Boolean { return experimentVersion == GROUP_B_EXPERIMENT_VERSION && !Registry.`is`("completion.stats.exit.experiment") } } fun emulate(experimentVersion: Int, performExperiment: Boolean, salt: String): Int? { val application = ApplicationManager.getApplication() if (!application.isEAP || application.isUnitTestMode || experimentVersion != 2 || performExperiment || !IS_ENABLED) { return null } val userId = DeviceIdManager.getOrGenerateId() val hash = abs((userId + salt).hashCode()) % 8 return when (hash) { 3 -> GROUP_A_EXPERIMENT_VERSION 4 -> GROUP_B_EXPERIMENT_VERSION else -> null } } }
Increase the number of AB experiment participants (x4)
[stats-collector] Increase the number of AB experiment participants (x4) GitOrigin-RevId: e56710dffe69f4bafa7be3805c3f0d60e90c0529
Kotlin
apache-2.0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
kotlin
## Code Before: // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.stats.experiment import com.intellij.internal.statistic.DeviceIdManager import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.util.registry.Registry /* * For now, we decide about AB experiment inside IDE using user id and salt */ class EmulatedExperiment { companion object { const val GROUP_A_EXPERIMENT_VERSION: Int = 7 const val GROUP_B_EXPERIMENT_VERSION: Int = 8 const val IS_ENABLED = true fun shouldRank(experimentVersion: Int): Boolean { return experimentVersion == GROUP_B_EXPERIMENT_VERSION && !Registry.`is`("completion.stats.exit.experiment") } } fun emulate(experimentVersion: Int, performExperiment: Boolean, salt: String): Int? { val application = ApplicationManager.getApplication() if (!application.isEAP || application.isUnitTestMode || experimentVersion != 2 || performExperiment || !IS_ENABLED) { return null } val userId = DeviceIdManager.getOrGenerateId() val hash = (userId + salt).hashCode() % 16 return when (hash) { 3 -> GROUP_A_EXPERIMENT_VERSION 4 -> GROUP_B_EXPERIMENT_VERSION else -> null } } } ## Instruction: [stats-collector] Increase the number of AB experiment participants (x4) GitOrigin-RevId: e56710dffe69f4bafa7be3805c3f0d60e90c0529 ## Code After: // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.stats.experiment import com.intellij.internal.statistic.DeviceIdManager import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.util.registry.Registry import kotlin.math.abs /* * For now, we decide about AB experiment inside IDE using user id and salt */ class EmulatedExperiment { companion object { const val GROUP_A_EXPERIMENT_VERSION: Int = 7 const val GROUP_B_EXPERIMENT_VERSION: Int = 8 const val IS_ENABLED = true fun shouldRank(experimentVersion: Int): Boolean { return experimentVersion == GROUP_B_EXPERIMENT_VERSION && !Registry.`is`("completion.stats.exit.experiment") } } fun emulate(experimentVersion: Int, performExperiment: Boolean, salt: String): Int? { val application = ApplicationManager.getApplication() if (!application.isEAP || application.isUnitTestMode || experimentVersion != 2 || performExperiment || !IS_ENABLED) { return null } val userId = DeviceIdManager.getOrGenerateId() val hash = abs((userId + salt).hashCode()) % 8 return when (hash) { 3 -> GROUP_A_EXPERIMENT_VERSION 4 -> GROUP_B_EXPERIMENT_VERSION else -> null } } }
... import com.intellij.internal.statistic.DeviceIdManager import com.intellij.openapi.application.ApplicationManager import com.intellij.openapi.util.registry.Registry import kotlin.math.abs /* * For now, we decide about AB experiment inside IDE using user id and salt ... } val userId = DeviceIdManager.getOrGenerateId() val hash = abs((userId + salt).hashCode()) % 8 return when (hash) { 3 -> GROUP_A_EXPERIMENT_VERSION 4 -> GROUP_B_EXPERIMENT_VERSION ...
e2be9eb27d6fc7cfa424cbf908347796ab595526
groundstation/broadcast_announcer.py
groundstation/broadcast_announcer.py
import socket import logger from groundstation.broadcast_socket import BroadcastSocket import logger log = logger.getLogger(__name__) class BroadcastAnnouncer(BroadcastSocket): def __init__(self, port): super(BroadcastAnnouncer, self).__init__() self._addr = '255.255.255.255', port self._name = None self.broadcast_payload = "PING None" @property def name(self): return self._name @name.setter def name(self, value): self._name = value self.broadcast_payload = "PING %s" % (self._name) def ping(self): log.info("ping payload: %s" % (self.broadcast_payload)) transmitted = self.socket.sendto(self.broadcast_payload, self._addr) if transmitted != len(self.broadcast_payload): log.warning("ping wasn't successfully broadcast")
import socket import logger from sockets.broadcast_socket import BroadcastSocket import logger log = logger.getLogger(__name__) class BroadcastAnnouncer(BroadcastSocket): def __init__(self, port): super(BroadcastAnnouncer, self).__init__() self._addr = '255.255.255.255', port self._name = None self.broadcast_payload = "PING None" @property def name(self): return self._name @name.setter def name(self, value): self._name = value self.broadcast_payload = "PING %s" % (self._name) def ping(self): log.info("ping payload: %s" % (self.broadcast_payload)) transmitted = self.socket.sendto(self.broadcast_payload, self._addr) if transmitted != len(self.broadcast_payload): log.warning("ping wasn't successfully broadcast")
Fix an import path bug masked by remaining .pyc files
Fix an import path bug masked by remaining .pyc files
Python
mit
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
python
## Code Before: import socket import logger from groundstation.broadcast_socket import BroadcastSocket import logger log = logger.getLogger(__name__) class BroadcastAnnouncer(BroadcastSocket): def __init__(self, port): super(BroadcastAnnouncer, self).__init__() self._addr = '255.255.255.255', port self._name = None self.broadcast_payload = "PING None" @property def name(self): return self._name @name.setter def name(self, value): self._name = value self.broadcast_payload = "PING %s" % (self._name) def ping(self): log.info("ping payload: %s" % (self.broadcast_payload)) transmitted = self.socket.sendto(self.broadcast_payload, self._addr) if transmitted != len(self.broadcast_payload): log.warning("ping wasn't successfully broadcast") ## Instruction: Fix an import path bug masked by remaining .pyc files ## Code After: import socket import logger from sockets.broadcast_socket import BroadcastSocket import logger log = logger.getLogger(__name__) class BroadcastAnnouncer(BroadcastSocket): def __init__(self, port): super(BroadcastAnnouncer, self).__init__() self._addr = '255.255.255.255', port self._name = None self.broadcast_payload = "PING None" @property def name(self): return self._name @name.setter def name(self, value): self._name = value self.broadcast_payload = "PING %s" % (self._name) def ping(self): log.info("ping payload: %s" % (self.broadcast_payload)) transmitted = self.socket.sendto(self.broadcast_payload, self._addr) if transmitted != len(self.broadcast_payload): log.warning("ping wasn't successfully broadcast")
// ... existing code ... import socket import logger from sockets.broadcast_socket import BroadcastSocket import logger log = logger.getLogger(__name__) // ... rest of the code ...
67c1855f75a3c29bc650c193235576f6b591c805
payment_redsys/__manifest__.py
payment_redsys/__manifest__.py
{ "name": "Pasarela de pago Redsys", "category": "Payment Acquirer", "summary": "Payment Acquirer: Redsys Implementation", "version": "14.0.2.0.0", "author": "Tecnativa," "Odoo Community Association (OCA)", "website": "https://github.com/OCA/l10n-spain", "depends": ["payment", "website_sale"], "external_dependencies": {"python": ["Crypto.Cipher.DES3"]}, "data": [ "views/redsys.xml", "views/payment_acquirer.xml", "views/payment_redsys_templates.xml", "data/payment_redsys.xml", ], "license": "AGPL-3", "installable": True, }
{ "name": "Pasarela de pago Redsys", "category": "Payment Acquirer", "summary": "Payment Acquirer: Redsys Implementation", "version": "14.0.2.0.0", "author": "Tecnativa," "Odoo Community Association (OCA)", "website": "https://github.com/OCA/l10n-spain", "depends": ["payment", "website_sale"], "external_dependencies": {"python": ["pycrypto"]}, "data": [ "views/redsys.xml", "views/payment_acquirer.xml", "views/payment_redsys_templates.xml", "data/payment_redsys.xml", ], "license": "AGPL-3", "installable": True, }
Put real package on pypi
[IMP] payment_redsys: Put real package on pypi
Python
agpl-3.0
cubells/l10n-spain,cubells/l10n-spain,cubells/l10n-spain
python
## Code Before: { "name": "Pasarela de pago Redsys", "category": "Payment Acquirer", "summary": "Payment Acquirer: Redsys Implementation", "version": "14.0.2.0.0", "author": "Tecnativa," "Odoo Community Association (OCA)", "website": "https://github.com/OCA/l10n-spain", "depends": ["payment", "website_sale"], "external_dependencies": {"python": ["Crypto.Cipher.DES3"]}, "data": [ "views/redsys.xml", "views/payment_acquirer.xml", "views/payment_redsys_templates.xml", "data/payment_redsys.xml", ], "license": "AGPL-3", "installable": True, } ## Instruction: [IMP] payment_redsys: Put real package on pypi ## Code After: { "name": "Pasarela de pago Redsys", "category": "Payment Acquirer", "summary": "Payment Acquirer: Redsys Implementation", "version": "14.0.2.0.0", "author": "Tecnativa," "Odoo Community Association (OCA)", "website": "https://github.com/OCA/l10n-spain", "depends": ["payment", "website_sale"], "external_dependencies": {"python": ["pycrypto"]}, "data": [ "views/redsys.xml", "views/payment_acquirer.xml", "views/payment_redsys_templates.xml", "data/payment_redsys.xml", ], "license": "AGPL-3", "installable": True, }
... "author": "Tecnativa," "Odoo Community Association (OCA)", "website": "https://github.com/OCA/l10n-spain", "depends": ["payment", "website_sale"], "external_dependencies": {"python": ["pycrypto"]}, "data": [ "views/redsys.xml", "views/payment_acquirer.xml", ...
0d1f0fa79d2cf381ed15b98ada4f9bec28f6c749
providence-tools-common/src/main/java/net/morimekta/providence/tools/common/Utils.java
providence-tools-common/src/main/java/net/morimekta/providence/tools/common/Utils.java
package net.morimekta.providence.tools.common; import java.io.IOException; import java.io.InputStream; import java.util.Properties; /** * General utility methods. */ public class Utils { public static String getVersionString() throws IOException { Properties properties = new Properties(); try (InputStream in = Utils.class.getResourceAsStream("/version.properties")) { properties.load(in); } return "v" + properties.getProperty("build.version"); } }
package net.morimekta.providence.tools.common; import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; import java.util.Properties; /** * General utility methods. */ public class Utils { private transient volatile static String versionString = null; public static String getVersionString() { if (versionString == null) { try { Properties properties = new Properties(); try (InputStream in = Utils.class.getResourceAsStream("/version.properties")) { properties.load(in); } versionString = "v" + properties.getProperty("build.version"); } catch (IOException e) { throw new UncheckedIOException(e.getMessage(), e); } } return versionString; } }
Read version string only once.
Read version string only once.
Java
apache-2.0
morimekta/thrift-j2,morimekta/providence,morimekta/providence
java
## Code Before: package net.morimekta.providence.tools.common; import java.io.IOException; import java.io.InputStream; import java.util.Properties; /** * General utility methods. */ public class Utils { public static String getVersionString() throws IOException { Properties properties = new Properties(); try (InputStream in = Utils.class.getResourceAsStream("/version.properties")) { properties.load(in); } return "v" + properties.getProperty("build.version"); } } ## Instruction: Read version string only once. ## Code After: package net.morimekta.providence.tools.common; import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; import java.util.Properties; /** * General utility methods. */ public class Utils { private transient volatile static String versionString = null; public static String getVersionString() { if (versionString == null) { try { Properties properties = new Properties(); try (InputStream in = Utils.class.getResourceAsStream("/version.properties")) { properties.load(in); } versionString = "v" + properties.getProperty("build.version"); } catch (IOException e) { throw new UncheckedIOException(e.getMessage(), e); } } return versionString; } }
... import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; import java.util.Properties; /** ... * General utility methods. */ public class Utils { private transient volatile static String versionString = null; public static String getVersionString() { if (versionString == null) { try { Properties properties = new Properties(); try (InputStream in = Utils.class.getResourceAsStream("/version.properties")) { properties.load(in); } versionString = "v" + properties.getProperty("build.version"); } catch (IOException e) { throw new UncheckedIOException(e.getMessage(), e); } } return versionString; } } ...
6da466984143d2a9176870583ca5dba8d1b9764c
test/integration/test_graylogapi.py
test/integration/test_graylogapi.py
import pytest from pygraylog.pygraylog import graylogapi def test_get(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') res = api._get() expected = { 'one': 'two' } assert res == expected def test_post(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') with pytest.raises(NotImplementedError): api._post() def test_put(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') with pytest.raises(NotImplementedError): api._put() def test_delete(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') with pytest.raises(NotImplementedError): api._delete()
import pytest from pygraylog.pygraylog import graylogapi def test_get(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') res = api._get() expected = "{\"one\": \"two\"}\n" assert res == expected def test_post(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') with pytest.raises(NotImplementedError): api._post() def test_put(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') with pytest.raises(NotImplementedError): api._put() def test_delete(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') with pytest.raises(NotImplementedError): api._delete()
Modify test to reflect that api returns string response.
Modify test to reflect that api returns string response.
Python
apache-2.0
zmallen/pygraylog
python
## Code Before: import pytest from pygraylog.pygraylog import graylogapi def test_get(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') res = api._get() expected = { 'one': 'two' } assert res == expected def test_post(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') with pytest.raises(NotImplementedError): api._post() def test_put(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') with pytest.raises(NotImplementedError): api._put() def test_delete(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') with pytest.raises(NotImplementedError): api._delete() ## Instruction: Modify test to reflect that api returns string response. ## Code After: import pytest from pygraylog.pygraylog import graylogapi def test_get(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') res = api._get() expected = "{\"one\": \"two\"}\n" assert res == expected def test_post(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') with pytest.raises(NotImplementedError): api._post() def test_put(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') with pytest.raises(NotImplementedError): api._put() def test_delete(): api = graylogapi.GraylogAPI('http://echo.jsontest.com/one/two', username = 'Zack', password = 'Zack') with pytest.raises(NotImplementedError): api._delete()
// ... existing code ... username = 'Zack', password = 'Zack') res = api._get() expected = "{\"one\": \"two\"}\n" assert res == expected def test_post(): // ... rest of the code ...
19dd810c5acb35ce5d7565ee57a55ae725194bd1
mvp/integration.py
mvp/integration.py
class Integration(object): name = None description = None icon = None banner = None requires_confirmation = False enabled_by_default = False columns = 1 def __init__(self): self.set_enabled(self.enabled_by_default) def fields(self): '''Return a list of fields. Example: return [ { 'name': 'StringField', 'type': 'str', 'default': None, 'options': [...], 'required': False, }, ... ] ''' return NotImplemented def on_filename_changed(self, form, value): return NotImplemented def set_enabled(self, value): '''Returns True if the integration was successfully enabled''' if value: return self._on_enable() else: return self._on_disable() def _on_enable(self): self.enabled = self.on_enable() return self.enabled def on_enable(self): '''Return True to enable integration and False to disable''' return True def _on_disable(self): self.enabled = not self.on_disable() return self.enabled def on_disable(self): '''Return True to disable integration and False to enable''' return True def before_playblast(self, data): return NotImplemented def after_playblast(self, data): return NotImplemented
class Integration(object): name = None description = None icon = None banner = None requires_confirmation = False enabled_by_default = False columns = 1 def __init__(self): self.set_enabled(self.enabled_by_default) def fields(self): '''Return a list of fields. Example: return [ { 'name': 'StringField', 'type': 'str', 'default': None, 'options': [...], 'required': False, }, ... ] ''' return NotImplemented def on_filename_changed(self, form, value): return NotImplemented def set_enabled(self, value): '''Returns True if the integration was successfully enabled''' if value: return self._on_enable() else: return self._on_disable() def _on_enable(self): self.enabled = self.on_enable() return self.enabled def on_enable(self): '''Return True to enable integration and False to disable''' return True def _on_disable(self): self.enabled = not self.on_disable() return self.enabled def on_disable(self): '''Return True to disable integration and False to enable''' return True def before_playblast(self, form, data): '''Runs before playblasting.''' return NotImplemented def after_playblast(self, form, data): '''Runs after playblasting.''' return NotImplemented def finalize(self, form, data): '''Runs after entire playblast process is finished. Unlike after_playblast, this method will only run ONCE after all playblasting is finished. So, when playblasting multiple render layers you can use this to execute after all of those render layers have completed rendering. Arguments: form: The Form object including render options data: List of renders that were output ''' return NotImplemented
Add finalize method to Integration.
Add finalize method to Integration.
Python
mit
danbradham/mvp
python
## Code Before: class Integration(object): name = None description = None icon = None banner = None requires_confirmation = False enabled_by_default = False columns = 1 def __init__(self): self.set_enabled(self.enabled_by_default) def fields(self): '''Return a list of fields. Example: return [ { 'name': 'StringField', 'type': 'str', 'default': None, 'options': [...], 'required': False, }, ... ] ''' return NotImplemented def on_filename_changed(self, form, value): return NotImplemented def set_enabled(self, value): '''Returns True if the integration was successfully enabled''' if value: return self._on_enable() else: return self._on_disable() def _on_enable(self): self.enabled = self.on_enable() return self.enabled def on_enable(self): '''Return True to enable integration and False to disable''' return True def _on_disable(self): self.enabled = not self.on_disable() return self.enabled def on_disable(self): '''Return True to disable integration and False to enable''' return True def before_playblast(self, data): return NotImplemented def after_playblast(self, data): return NotImplemented ## Instruction: Add finalize method to Integration. ## Code After: class Integration(object): name = None description = None icon = None banner = None requires_confirmation = False enabled_by_default = False columns = 1 def __init__(self): self.set_enabled(self.enabled_by_default) def fields(self): '''Return a list of fields. Example: return [ { 'name': 'StringField', 'type': 'str', 'default': None, 'options': [...], 'required': False, }, ... ] ''' return NotImplemented def on_filename_changed(self, form, value): return NotImplemented def set_enabled(self, value): '''Returns True if the integration was successfully enabled''' if value: return self._on_enable() else: return self._on_disable() def _on_enable(self): self.enabled = self.on_enable() return self.enabled def on_enable(self): '''Return True to enable integration and False to disable''' return True def _on_disable(self): self.enabled = not self.on_disable() return self.enabled def on_disable(self): '''Return True to disable integration and False to enable''' return True def before_playblast(self, form, data): '''Runs before playblasting.''' return NotImplemented def after_playblast(self, form, data): '''Runs after playblasting.''' return NotImplemented def finalize(self, form, data): '''Runs after entire playblast process is finished. Unlike after_playblast, this method will only run ONCE after all playblasting is finished. So, when playblasting multiple render layers you can use this to execute after all of those render layers have completed rendering. Arguments: form: The Form object including render options data: List of renders that were output ''' return NotImplemented
// ... existing code ... return True def before_playblast(self, form, data): '''Runs before playblasting.''' return NotImplemented def after_playblast(self, form, data): '''Runs after playblasting.''' return NotImplemented def finalize(self, form, data): '''Runs after entire playblast process is finished. Unlike after_playblast, this method will only run ONCE after all playblasting is finished. So, when playblasting multiple render layers you can use this to execute after all of those render layers have completed rendering. Arguments: form: The Form object including render options data: List of renders that were output ''' return NotImplemented // ... rest of the code ...
aa55b1577582b61c011a2af2e4e5f3bf78421e38
src/main/java/com/techcavern/wavetact/Main.java
src/main/java/com/techcavern/wavetact/Main.java
package com.techcavern.wavetact; import com.techcavern.wavetact.utils.CommandLineUtils; import com.techcavern.wavetact.utils.GeneralRegistry; import com.techcavern.wavetact.utils.IRCUtils; import org.slf4j.impl.SimpleLogger; @SuppressWarnings("ConstantConditions") public class Main { public static void main(String[] args) throws Exception { CommandLineUtils.initializeCommandlines(); // CommandLineUtils.parseCommandLineArguments(args); System.out.println("Starting..."); System.setProperty(SimpleLogger.SHOW_DATE_TIME_KEY, "true"); System.setProperty(SimpleLogger.DATE_TIME_FORMAT_KEY, "[yyyy/MM/dd HH:mm:ss]"); System.setProperty(SimpleLogger.LEVEL_IN_BRACKETS_KEY, "true"); IRCUtils.registerCommands(); // IRCUtils.registerNetworks(); IRCUtils.registerDevServer(); IRCUtils.loadSimpleActions(); IRCUtils.loadSimpleMessages(); IRCUtils.startThreads(); GeneralRegistry.WaveTact.start(); } }
package com.techcavern.wavetact; import com.techcavern.wavetact.utils.CommandLineUtils; import com.techcavern.wavetact.utils.GeneralRegistry; import com.techcavern.wavetact.utils.IRCUtils; import org.slf4j.impl.SimpleLogger; @SuppressWarnings("ConstantConditions") public class Main { public static void main(String[] args) throws Exception { if (!Boolean.parseBoolean(System.getProperty("dev"))) { System.out.println("Running in production mode"); CommandLineUtils.initializeCommandlines(); CommandLineUtils.parseCommandLineArguments(args); } else { System.out.println("Running in developer mode"); IRCUtils.registerDevServer(); } System.setProperty(SimpleLogger.SHOW_DATE_TIME_KEY, "true"); System.setProperty(SimpleLogger.DATE_TIME_FORMAT_KEY, "[yyyy/MM/dd HH:mm:ss]"); System.setProperty(SimpleLogger.LEVEL_IN_BRACKETS_KEY, "true"); IRCUtils.registerCommands(); IRCUtils.registerDevServer(); IRCUtils.loadSimpleActions(); IRCUtils.loadSimpleMessages(); IRCUtils.startThreads(); GeneralRegistry.WaveTact.start(); } }
Add system parameter for dev mode
Add system parameter for dev mode
Java
mit
TechCavern/WaveTact
java
## Code Before: package com.techcavern.wavetact; import com.techcavern.wavetact.utils.CommandLineUtils; import com.techcavern.wavetact.utils.GeneralRegistry; import com.techcavern.wavetact.utils.IRCUtils; import org.slf4j.impl.SimpleLogger; @SuppressWarnings("ConstantConditions") public class Main { public static void main(String[] args) throws Exception { CommandLineUtils.initializeCommandlines(); // CommandLineUtils.parseCommandLineArguments(args); System.out.println("Starting..."); System.setProperty(SimpleLogger.SHOW_DATE_TIME_KEY, "true"); System.setProperty(SimpleLogger.DATE_TIME_FORMAT_KEY, "[yyyy/MM/dd HH:mm:ss]"); System.setProperty(SimpleLogger.LEVEL_IN_BRACKETS_KEY, "true"); IRCUtils.registerCommands(); // IRCUtils.registerNetworks(); IRCUtils.registerDevServer(); IRCUtils.loadSimpleActions(); IRCUtils.loadSimpleMessages(); IRCUtils.startThreads(); GeneralRegistry.WaveTact.start(); } } ## Instruction: Add system parameter for dev mode ## Code After: package com.techcavern.wavetact; import com.techcavern.wavetact.utils.CommandLineUtils; import com.techcavern.wavetact.utils.GeneralRegistry; import com.techcavern.wavetact.utils.IRCUtils; import org.slf4j.impl.SimpleLogger; @SuppressWarnings("ConstantConditions") public class Main { public static void main(String[] args) throws Exception { if (!Boolean.parseBoolean(System.getProperty("dev"))) { System.out.println("Running in production mode"); CommandLineUtils.initializeCommandlines(); CommandLineUtils.parseCommandLineArguments(args); } else { System.out.println("Running in developer mode"); IRCUtils.registerDevServer(); } System.setProperty(SimpleLogger.SHOW_DATE_TIME_KEY, "true"); System.setProperty(SimpleLogger.DATE_TIME_FORMAT_KEY, "[yyyy/MM/dd HH:mm:ss]"); System.setProperty(SimpleLogger.LEVEL_IN_BRACKETS_KEY, "true"); IRCUtils.registerCommands(); IRCUtils.registerDevServer(); IRCUtils.loadSimpleActions(); IRCUtils.loadSimpleMessages(); IRCUtils.startThreads(); GeneralRegistry.WaveTact.start(); } }
... public static void main(String[] args) throws Exception { if (!Boolean.parseBoolean(System.getProperty("dev"))) { System.out.println("Running in production mode"); CommandLineUtils.initializeCommandlines(); CommandLineUtils.parseCommandLineArguments(args); } else { System.out.println("Running in developer mode"); IRCUtils.registerDevServer(); } System.setProperty(SimpleLogger.SHOW_DATE_TIME_KEY, "true"); System.setProperty(SimpleLogger.DATE_TIME_FORMAT_KEY, "[yyyy/MM/dd HH:mm:ss]"); System.setProperty(SimpleLogger.LEVEL_IN_BRACKETS_KEY, "true"); IRCUtils.registerCommands(); IRCUtils.registerDevServer(); IRCUtils.loadSimpleActions(); IRCUtils.loadSimpleMessages(); ...