{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'PDF TO Markdown' && linkText !== 'PDF TO Markdown' ) { link.textContent = 'PDF TO Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== 'Voice Cloning' ) { link.textContent = 'Voice Cloning'; link.href = 'https://vibevoice.info/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'PDF TO Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \\n\" +\n \"\";\n String jsonString = XmlToJsonConverter.convertXmlToJson(xmlString);\n String expectedJsonResult = \"{\\\"note\\\":{\\\"heading\\\":\\\"Reminder\\\",\\\"from\\\":\\\"Jani\\\",\\\"to\\\":\\\"Tove\\\",\\\"body\\\":\\\"Don't forget me this weekend!\\\"}}\";\n assertTrue(expectedJsonResult.equals(expectedJsonResult));\n }\n}\n"},"new_contents":{"kind":"string","value":"package converter;\n\nimport processors.XmlToJsonConverter;\nimport org.junit.Test;\n\nimport static org.junit.Assert.assertTrue;\n\npublic class XmlToJsonConverterTest {\n\n /**\n * Test conversion from xml to json.\n */\n @Test\n public void convertXmlToJsonTest() {\n String xmlString =\n \"\\n\" +\n \"Tove\\n\" +\n \"Jani\\n\" +\n \"Reminder\\n\" +\n \"Don't forget me this weekend!\\n\" +\n \"\";\n String jsonString = null;\n try {\n jsonString = new XmlToJsonConverter().process(xmlString, null);\n } catch (Throwable throwable) {\n throwable.printStackTrace();\n }\n String expectedJsonResult = \"{\\\"note\\\":{\\\"heading\\\":\\\"Reminder\\\",\\\"from\\\":\\\"Jani\\\",\\\"to\\\":\\\"Tove\\\",\\\"body\\\":\\\"Don't forget me this weekend!\\\"}}\";\n assertTrue(expectedJsonResult.equals(expectedJsonResult));\n }\n}\n"},"subject":{"kind":"string","value":"Update test for new interface."},"message":{"kind":"string","value":"Update test for new interface.\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"IrimieBogdan/DistributedMonitoring,IrimieBogdan/DistributedMonitoring,IrimieBogdan/DistributedMonitoring"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage converter;\n\nimport converters.XmlToJsonConverter;\nimport org.junit.Test;\n\nimport static org.junit.Assert.assertTrue;\n\npublic class XmlToJsonConverterTest {\n\n /**\n * Test conversion from xml to json.\n */\n @Test\n public void convertXmlToJsonTest() {\n String xmlString =\n \"\\n\" +\n \"Tove\\n\" +\n \"Jani\\n\" +\n \"Reminder\\n\" +\n \"Don't forget me this weekend!\\n\" +\n \"\";\n String jsonString = XmlToJsonConverter.convertXmlToJson(xmlString);\n String expectedJsonResult = \"{\\\"note\\\":{\\\"heading\\\":\\\"Reminder\\\",\\\"from\\\":\\\"Jani\\\",\\\"to\\\":\\\"Tove\\\",\\\"body\\\":\\\"Don't forget me this weekend!\\\"}}\";\n assertTrue(expectedJsonResult.equals(expectedJsonResult));\n }\n}\n\n## Instruction:\nUpdate test for new interface.\n\n## Code After:\npackage converter;\n\nimport processors.XmlToJsonConverter;\nimport org.junit.Test;\n\nimport static org.junit.Assert.assertTrue;\n\npublic class XmlToJsonConverterTest {\n\n /**\n * Test conversion from xml to json.\n */\n @Test\n public void convertXmlToJsonTest() {\n String xmlString =\n \"\\n\" +\n \"Tove\\n\" +\n \"Jani\\n\" +\n \"Reminder\\n\" +\n \"Don't forget me this weekend!\\n\" +\n \"\";\n String jsonString = null;\n try {\n jsonString = new XmlToJsonConverter().process(xmlString, null);\n } catch (Throwable throwable) {\n throwable.printStackTrace();\n }\n String expectedJsonResult = \"{\\\"note\\\":{\\\"heading\\\":\\\"Reminder\\\",\\\"from\\\":\\\"Jani\\\",\\\"to\\\":\\\"Tove\\\",\\\"body\\\":\\\"Don't forget me this weekend!\\\"}}\";\n assertTrue(expectedJsonResult.equals(expectedJsonResult));\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\npackage converter;\n\nimport processors.XmlToJsonConverter;\nimport org.junit.Test;\n\nimport static org.junit.Assert.assertTrue;\n\n\n// ... modified code ... \n\n\n \"Reminder\\n\" +\n \"Don't forget me this weekend!\\n\" +\n \"\";\n String jsonString = null;\n try {\n jsonString = new XmlToJsonConverter().process(xmlString, null);\n } catch (Throwable throwable) {\n throwable.printStackTrace();\n }\n String expectedJsonResult = \"{\\\"note\\\":{\\\"heading\\\":\\\"Reminder\\\",\\\"from\\\":\\\"Jani\\\",\\\"to\\\":\\\"Tove\\\",\\\"body\\\":\\\"Don't forget me this weekend!\\\"}}\";\n assertTrue(expectedJsonResult.equals(expectedJsonResult));\n }\n\n\n// ... rest of the code ..."}}},{"rowIdx":3725,"cells":{"commit":{"kind":"string","value":"19faea809ec3ea8a9722b0e87bb028fd23c721a1"},"old_file":{"kind":"string","value":"modlib.c"},"new_file":{"kind":"string","value":"modlib.c"},"old_contents":{"kind":"string","value":"\nuint16_t MODBUSSwapEndian( uint16_t Data )\n{\n //Change big-endian to little-endian and vice versa\n\n unsigned char Swap;\n\n\t//Create 2 bytes long union\n union Conversion\n {\n uint16_t Data;\n unsigned char Bytes[2];\n } Conversion;\n\n\t//Swap bytes\n Conversion.Data = Data;\n Swap = Conversion.Bytes[0];\n Conversion.Bytes[0] = Conversion.Bytes[1];\n Conversion.Bytes[1] = Swap;\n\n return Conversion.Data;\n}\n\nuint16_t MODBUSCRC16( uint16_t *Data, uint16_t Length )\n{\n\t//Calculate CRC16 checksum using given data and length\n\n\tuint16_t CRC = 0xFFFF;\n\tuint16_t i;\n\tunsigned char j;\n\n\tfor ( i = 0; i < Length; i++ )\n\t{\n\t\tCRC ^= Data[i]; //XOR current data byte with CRC value\n\n\t\tfor ( j = 8; j != 0; j-- )\n\t\t{\n\t\t\t//For each bit\n\t\t\t//Is least-significant-bit is set?\n \t\tif ( ( CRC & 0x0001 ) != 0 )\n\t\t\t{\n \t\t\tCRC >>= 1; //Shift to right and xor\n \t\t\tCRC ^= 0xA001;\n \t\t}\n \t\telse // Else LSB is not set\n \t\t\tCRC >>= 1;\n\t\t}\n\t}\n\treturn CRC;\n}\n"},"new_contents":{"kind":"string","value":"\nuint16_t MODBUSSwapEndian( uint16_t Data )\n{\n //Change big-endian to little-endian and vice versa\n\n uint8_t Swap;\n\n\t//Create 2 bytes long union\n union Conversion\n {\n uint16_t Data;\n uint8_t Bytes[2];\n } Conversion;\n\n\t//Swap bytes\n Conversion.Data = Data;\n Swap = Conversion.Bytes[0];\n Conversion.Bytes[0] = Conversion.Bytes[1];\n Conversion.Bytes[1] = Swap;\n\n return Conversion.Data;\n}\n\nuint16_t MODBUSCRC16( uint16_t *Data, uint16_t Length )\n{\n\t//Calculate CRC16 checksum using given data and length\n\n\tuint16_t CRC = 0xFFFF;\n\tuint16_t i;\n\tuint8_t j;\n\n\tfor ( i = 0; i < Length; i++ )\n\t{\n\t\tCRC ^= Data[i]; //XOR current data byte with CRC value\n\n\t\tfor ( j = 8; j != 0; j-- )\n\t\t{\n\t\t\t//For each bit\n\t\t\t//Is least-significant-bit is set?\n \t\tif ( ( CRC & 0x0001 ) != 0 )\n\t\t\t{\n \t\t\tCRC >>= 1; //Shift to right and xor\n \t\t\tCRC ^= 0xA001;\n \t\t}\n \t\telse // Else LSB is not set\n \t\t\tCRC >>= 1;\n\t\t}\n\t}\n\treturn CRC;\n}\n"},"subject":{"kind":"string","value":"Change 'unsigned character' type variables to 'uint8_t'"},"message":{"kind":"string","value":"Change 'unsigned character' type variables to 'uint8_t'\n"},"lang":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"Jacajack/modlib"},"config":{"kind":"string","value":"c"},"content":{"kind":"string","value":"## Code Before:\n\nuint16_t MODBUSSwapEndian( uint16_t Data )\n{\n //Change big-endian to little-endian and vice versa\n\n unsigned char Swap;\n\n\t//Create 2 bytes long union\n union Conversion\n {\n uint16_t Data;\n unsigned char Bytes[2];\n } Conversion;\n\n\t//Swap bytes\n Conversion.Data = Data;\n Swap = Conversion.Bytes[0];\n Conversion.Bytes[0] = Conversion.Bytes[1];\n Conversion.Bytes[1] = Swap;\n\n return Conversion.Data;\n}\n\nuint16_t MODBUSCRC16( uint16_t *Data, uint16_t Length )\n{\n\t//Calculate CRC16 checksum using given data and length\n\n\tuint16_t CRC = 0xFFFF;\n\tuint16_t i;\n\tunsigned char j;\n\n\tfor ( i = 0; i < Length; i++ )\n\t{\n\t\tCRC ^= Data[i]; //XOR current data byte with CRC value\n\n\t\tfor ( j = 8; j != 0; j-- )\n\t\t{\n\t\t\t//For each bit\n\t\t\t//Is least-significant-bit is set?\n \t\tif ( ( CRC & 0x0001 ) != 0 )\n\t\t\t{\n \t\t\tCRC >>= 1; //Shift to right and xor\n \t\t\tCRC ^= 0xA001;\n \t\t}\n \t\telse // Else LSB is not set\n \t\t\tCRC >>= 1;\n\t\t}\n\t}\n\treturn CRC;\n}\n\n## Instruction:\nChange 'unsigned character' type variables to 'uint8_t'\n\n## Code After:\n\nuint16_t MODBUSSwapEndian( uint16_t Data )\n{\n //Change big-endian to little-endian and vice versa\n\n uint8_t Swap;\n\n\t//Create 2 bytes long union\n union Conversion\n {\n uint16_t Data;\n uint8_t Bytes[2];\n } Conversion;\n\n\t//Swap bytes\n Conversion.Data = Data;\n Swap = Conversion.Bytes[0];\n Conversion.Bytes[0] = Conversion.Bytes[1];\n Conversion.Bytes[1] = Swap;\n\n return Conversion.Data;\n}\n\nuint16_t MODBUSCRC16( uint16_t *Data, uint16_t Length )\n{\n\t//Calculate CRC16 checksum using given data and length\n\n\tuint16_t CRC = 0xFFFF;\n\tuint16_t i;\n\tuint8_t j;\n\n\tfor ( i = 0; i < Length; i++ )\n\t{\n\t\tCRC ^= Data[i]; //XOR current data byte with CRC value\n\n\t\tfor ( j = 8; j != 0; j-- )\n\t\t{\n\t\t\t//For each bit\n\t\t\t//Is least-significant-bit is set?\n \t\tif ( ( CRC & 0x0001 ) != 0 )\n\t\t\t{\n \t\t\tCRC >>= 1; //Shift to right and xor\n \t\t\tCRC ^= 0xA001;\n \t\t}\n \t\telse // Else LSB is not set\n \t\t\tCRC >>= 1;\n\t\t}\n\t}\n\treturn CRC;\n}\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n{\n //Change big-endian to little-endian and vice versa\n\n uint8_t Swap;\n\n\t//Create 2 bytes long union\n union Conversion\n {\n uint16_t Data;\n uint8_t Bytes[2];\n } Conversion;\n\n\t//Swap bytes\n\n\n ... \n\n\n\n\tuint16_t CRC = 0xFFFF;\n\tuint16_t i;\n\tuint8_t j;\n\n\tfor ( i = 0; i < Length; i++ )\n\t{\n\n\n ... "}}},{"rowIdx":3726,"cells":{"commit":{"kind":"string","value":"09f649ac0b14269067c43df9f879d963ab99cdac"},"old_file":{"kind":"string","value":"backend/breach/views.py"},"new_file":{"kind":"string","value":"backend/breach/views.py"},"old_contents":{"kind":"string","value":"import json\nfrom django.http import Http404, JsonResponse\nfrom django.views.decorators.csrf import csrf_exempt\nfrom breach.strategy import Strategy\nfrom breach.models import Victim\n\n\ndef get_work(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n\n # Example work structure:\n # return {'url': 'https://www.dimkarakostas.com/?breach-test',\n # 'amount': 10,\n # 'timeout': 0}\n\n new_work = strategy.get_work()\n\n return HttpResponse(json.dumps(new_work), content_type='application/json')\n\n@csrf_exempt\ndef work_completed(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n victory = strategy.work_completed()\n\n return JsonResponse({\n 'victory': victory\n })\n"},"new_contents":{"kind":"string","value":"import json\nfrom django.http import Http404, JsonResponse\nfrom django.views.decorators.csrf import csrf_exempt\nfrom breach.strategy import Strategy\nfrom breach.models import Victim\n\n\ndef get_work(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n\n # Example work structure:\n # return {'url': 'https://www.dimkarakostas.com/?breach-test',\n # 'amount': 10,\n # 'timeout': 0}\n\n new_work = strategy.get_work()\n\n return JsonResponse(new_work)\n\n\n@csrf_exempt\ndef work_completed(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n victory = strategy.work_completed()\n\n return JsonResponse({\n 'victory': victory\n })\n"},"subject":{"kind":"string","value":"Fix response with json for get_work"},"message":{"kind":"string","value":"Fix response with json for get_work\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"dionyziz/rupture,dimkarakostas/rupture,dionyziz/rupture,dimkarakostas/rupture,dimriou/rupture,esarafianou/rupture,dimriou/rupture,esarafianou/rupture,dionyziz/rupture,dimkarakostas/rupture,esarafianou/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,dimriou/rupture,esarafianou/rupture"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport json\nfrom django.http import Http404, JsonResponse\nfrom django.views.decorators.csrf import csrf_exempt\nfrom breach.strategy import Strategy\nfrom breach.models import Victim\n\n\ndef get_work(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n\n # Example work structure:\n # return {'url': 'https://www.dimkarakostas.com/?breach-test',\n # 'amount': 10,\n # 'timeout': 0}\n\n new_work = strategy.get_work()\n\n return HttpResponse(json.dumps(new_work), content_type='application/json')\n\n@csrf_exempt\ndef work_completed(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n victory = strategy.work_completed()\n\n return JsonResponse({\n 'victory': victory\n })\n\n## Instruction:\nFix response with json for get_work\n\n## Code After:\nimport json\nfrom django.http import Http404, JsonResponse\nfrom django.views.decorators.csrf import csrf_exempt\nfrom breach.strategy import Strategy\nfrom breach.models import Victim\n\n\ndef get_work(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n\n # Example work structure:\n # return {'url': 'https://www.dimkarakostas.com/?breach-test',\n # 'amount': 10,\n # 'timeout': 0}\n\n new_work = strategy.get_work()\n\n return JsonResponse(new_work)\n\n\n@csrf_exempt\ndef work_completed(request, victim_id=0):\n assert(victim_id)\n\n try:\n victim = Victim.objects.get(pk=victim_id)\n except:\n raise Http404('Victim not found')\n\n strategy = Strategy(victim)\n victory = strategy.work_completed()\n\n return JsonResponse({\n 'victory': victory\n })\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n\n new_work = strategy.get_work()\n\n return JsonResponse(new_work)\n\n\n@csrf_exempt\ndef work_completed(request, victim_id=0):\n\n\n ... "}}},{"rowIdx":3727,"cells":{"commit":{"kind":"string","value":"77f0c9dcf7fdc4ebe904c32b64fbcb5c9c1f4d6b"},"old_file":{"kind":"string","value":"src/main/java/com/googlecode/pngtastic/core/PngChunkInserter.java"},"new_file":{"kind":"string","value":"src/main/java/com/googlecode/pngtastic/core/PngChunkInserter.java"},"old_contents":{"kind":"string","value":"package com.googlecode.pngtastic.core;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\n\n/**\n * Usage:\n * \n * byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk);\n * final File exported = image.export(toDir + \"/name.png\", bytes);\n * \n *\n * @author ray\n */\npublic class PngChunkInserter {\n\n\tprivate static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 };\n\n\tpublic static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300);\n\n\tpublic byte[] insert(PngImage image, PngChunk chunk) throws IOException {\n\t\t// add it after the header chunk\n\t\timage.getChunks().add(1, chunk);\n\n\t\tfinal ByteArrayOutputStream outputBytes = new ByteArrayOutputStream();\n\t\timage.writeDataOutputStream(outputBytes);\n\n\t\treturn outputBytes.toByteArray();\n\t}\n}\n"},"new_contents":{"kind":"string","value":"package com.googlecode.pngtastic.core;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\n\n/**\n * Usage:\n * \n * byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk);\n * final File exported = image.export(toDir + \"/name.png\", bytes);\n * \n *\n * @author ray\n */\npublic class PngChunkInserter {\n\n\t/**\n\t * Conversion note: one inch is equal to exactly 0.0254 meters.\n\t * 300dpi = 300 / 0.0254 = 11,811.023622 = 11811 = 0x2E23 = new byte[] { 0, 0, 46, 35 }\n\t * http://comments.gmane.org/gmane.comp.graphics.png.general/2425\n\t */\n\tprivate static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 };\n\n\tpublic static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300);\n\n\tpublic byte[] insert(PngImage image, PngChunk chunk) throws IOException {\n\t\t// add it after the header chunk\n\t\timage.getChunks().add(1, chunk);\n\n\t\tfinal ByteArrayOutputStream outputBytes = new ByteArrayOutputStream();\n\t\timage.writeDataOutputStream(outputBytes);\n\n\t\treturn outputBytes.toByteArray();\n\t}\n}\n"},"subject":{"kind":"string","value":"Add comment describing dpi conversion"},"message":{"kind":"string","value":"Add comment describing dpi conversion\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"depsypher/pngtastic"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage com.googlecode.pngtastic.core;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\n\n/**\n * Usage:\n * \n * byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk);\n * final File exported = image.export(toDir + \"/name.png\", bytes);\n * \n *\n * @author ray\n */\npublic class PngChunkInserter {\n\n\tprivate static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 };\n\n\tpublic static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300);\n\n\tpublic byte[] insert(PngImage image, PngChunk chunk) throws IOException {\n\t\t// add it after the header chunk\n\t\timage.getChunks().add(1, chunk);\n\n\t\tfinal ByteArrayOutputStream outputBytes = new ByteArrayOutputStream();\n\t\timage.writeDataOutputStream(outputBytes);\n\n\t\treturn outputBytes.toByteArray();\n\t}\n}\n\n## Instruction:\nAdd comment describing dpi conversion\n\n## Code After:\npackage com.googlecode.pngtastic.core;\n\nimport java.io.ByteArrayOutputStream;\nimport java.io.IOException;\n\n/**\n * Usage:\n * \n * byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk);\n * final File exported = image.export(toDir + \"/name.png\", bytes);\n * \n *\n * @author ray\n */\npublic class PngChunkInserter {\n\n\t/**\n\t * Conversion note: one inch is equal to exactly 0.0254 meters.\n\t * 300dpi = 300 / 0.0254 = 11,811.023622 = 11811 = 0x2E23 = new byte[] { 0, 0, 46, 35 }\n\t * http://comments.gmane.org/gmane.comp.graphics.png.general/2425\n\t */\n\tprivate static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 };\n\n\tpublic static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300);\n\n\tpublic byte[] insert(PngImage image, PngChunk chunk) throws IOException {\n\t\t// add it after the header chunk\n\t\timage.getChunks().add(1, chunk);\n\n\t\tfinal ByteArrayOutputStream outputBytes = new ByteArrayOutputStream();\n\t\timage.writeDataOutputStream(outputBytes);\n\n\t\treturn outputBytes.toByteArray();\n\t}\n}\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n */\npublic class PngChunkInserter {\n\n\t/**\n\t * Conversion note: one inch is equal to exactly 0.0254 meters.\n\t * 300dpi = 300 / 0.0254 = 11,811.023622 = 11811 = 0x2E23 = new byte[] { 0, 0, 46, 35 }\n\t * http://comments.gmane.org/gmane.comp.graphics.png.general/2425\n\t */\n\tprivate static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 };\n\n\tpublic static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300);\n\n\n// ... rest of the code ..."}}},{"rowIdx":3728,"cells":{"commit":{"kind":"string","value":"99c06cff63c4dc661b5cffc5a80f79327269684a"},"old_file":{"kind":"string","value":"c/anagrams.c"},"new_file":{"kind":"string","value":"c/anagrams.c"},"old_contents":{"kind":"string","value":"\nvoid swap(char *i, char *j) {\n char saved = *i;\n *i = *j;\n *j = saved;\n}\n\nvoid generate_permutations(char* a, int n) {\n if (n == 0) {\n printf(\"%s\\n\", a);\n } else {\n for (int i = 0; i < n; i++) {\n generate_permutations(a, n-1);\n swap(&a[n % 2 == 0 ? 0 : i], &a[n]);\n }\n generate_permutations(a, n-1);\n }\n}\n\nint main(int argc, const char* argv[]) {\n\n if (argc != 2) {\n fprintf(stderr, \"Need exactly one argument!\\n\");\n return 1;\n }\n\n size_t len = strlen(argv[1]);\n char *word = malloc(len + 1);\n word = strncpy(word, argv[1], len);\n\n generate_permutations(word, len-1);\n\n free(word);\n return 0;\n}\n"},"new_contents":{"kind":"string","value":"\nvoid swap(char *i, char *j) {\n char saved = *i;\n *i = *j;\n *j = saved;\n}\n\nvoid generate_permutations(char* a, int n) {\n if (n == 0) {\n printf(\"%s\\n\", a);\n } else {\n for (int i = 0; i < n; i++) {\n generate_permutations(a, n-1);\n swap(&a[n % 2 == 0 ? 0 : i], &a[n]);\n }\n generate_permutations(a, n-1);\n }\n}\n\nint main(int argc, const char* argv[]) {\n\n if (argc != 2) {\n fprintf(stderr, \"Exactly one argument is required\\n\");\n return 1;\n }\n\n size_t len = strlen(argv[1]);\n char *word = malloc(len + 1);\n word = strncpy(word, argv[1], len);\n\n generate_permutations(word, len-1);\n\n free(word);\n return 0;\n}\n"},"subject":{"kind":"string","value":"Make the error message consistent with all of the other languages"},"message":{"kind":"string","value":"Make the error message consistent with all of the other languages\n"},"lang":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot"},"config":{"kind":"string","value":"c"},"content":{"kind":"string","value":"## Code Before:\n\nvoid swap(char *i, char *j) {\n char saved = *i;\n *i = *j;\n *j = saved;\n}\n\nvoid generate_permutations(char* a, int n) {\n if (n == 0) {\n printf(\"%s\\n\", a);\n } else {\n for (int i = 0; i < n; i++) {\n generate_permutations(a, n-1);\n swap(&a[n % 2 == 0 ? 0 : i], &a[n]);\n }\n generate_permutations(a, n-1);\n }\n}\n\nint main(int argc, const char* argv[]) {\n\n if (argc != 2) {\n fprintf(stderr, \"Need exactly one argument!\\n\");\n return 1;\n }\n\n size_t len = strlen(argv[1]);\n char *word = malloc(len + 1);\n word = strncpy(word, argv[1], len);\n\n generate_permutations(word, len-1);\n\n free(word);\n return 0;\n}\n\n## Instruction:\nMake the error message consistent with all of the other languages\n\n## Code After:\n\nvoid swap(char *i, char *j) {\n char saved = *i;\n *i = *j;\n *j = saved;\n}\n\nvoid generate_permutations(char* a, int n) {\n if (n == 0) {\n printf(\"%s\\n\", a);\n } else {\n for (int i = 0; i < n; i++) {\n generate_permutations(a, n-1);\n swap(&a[n % 2 == 0 ? 0 : i], &a[n]);\n }\n generate_permutations(a, n-1);\n }\n}\n\nint main(int argc, const char* argv[]) {\n\n if (argc != 2) {\n fprintf(stderr, \"Exactly one argument is required\\n\");\n return 1;\n }\n\n size_t len = strlen(argv[1]);\n char *word = malloc(len + 1);\n word = strncpy(word, argv[1], len);\n\n generate_permutations(word, len-1);\n\n free(word);\n return 0;\n}\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nint main(int argc, const char* argv[]) {\n\n if (argc != 2) {\n fprintf(stderr, \"Exactly one argument is required\\n\");\n return 1;\n }\n\n\n\n ... "}}},{"rowIdx":3729,"cells":{"commit":{"kind":"string","value":"377ff1dabc595fab4af9cb35bb8c071f2872c612"},"old_file":{"kind":"string","value":"agile-dbus-java-interface/src/main/java/iot/agile/object/DeviceStatusType.java"},"new_file":{"kind":"string","value":"agile-dbus-java-interface/src/main/java/iot/agile/object/DeviceStatusType.java"},"old_contents":{"kind":"string","value":"package iot.agile.object;\n\npublic enum DeviceStatusType {\n \t CONNECTED,\n\t DISCONNECTED,\n\t ERROR\n}\n"},"new_contents":{"kind":"string","value":"package iot.agile.object;\n\npublic enum DeviceStatusType {\n \t CONNECTED,\n\t DISCONNECTED,\n\t ON,\n\t 0FF,\n\t ERROR\n}\n"},"subject":{"kind":"string","value":"Add more device status type"},"message":{"kind":"string","value":"Add more device status type\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"epl-1.0"},"repos":{"kind":"string","value":"muka/agile-api-spec,muka/agile-api-spec,muka/agile-api-spec"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage iot.agile.object;\n\npublic enum DeviceStatusType {\n \t CONNECTED,\n\t DISCONNECTED,\n\t ERROR\n}\n\n## Instruction:\nAdd more device status type\n\n## Code After:\npackage iot.agile.object;\n\npublic enum DeviceStatusType {\n \t CONNECTED,\n\t DISCONNECTED,\n\t ON,\n\t 0FF,\n\t ERROR\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\npublic enum DeviceStatusType {\n \t CONNECTED,\n\t DISCONNECTED,\n\t ON,\n\t 0FF,\n\t ERROR\n}\n\n\n# ... rest of the code ..."}}},{"rowIdx":3730,"cells":{"commit":{"kind":"string","value":"243adb38e3d4f61404f4df14a9a5aa18af8638d9"},"old_file":{"kind":"string","value":"app/src/main/java/de/philipphager/disclosure/feature/analyser/app/Apk.java"},"new_file":{"kind":"string","value":"app/src/main/java/de/philipphager/disclosure/feature/analyser/app/Apk.java"},"old_contents":{"kind":"string","value":"package de.philipphager.disclosure.feature.analyser.app;\n\nimport dalvik.system.DexFile;\nimport de.philipphager.disclosure.database.app.model.App;\nimport java.io.IOException;\nimport java.util.Collections;\nimport java.util.List;\nimport rx.Observable;\nimport timber.log.Timber;\n\npublic class Apk {\n private static final int MIN_INDEX = 0;\n private final App app;\n private List sortedClassNames;\n\n public Apk(App app) throws IOException {\n this.app = app;\n load();\n }\n\n private void load() throws IOException {\n DexFile dexFile = new DexFile(app.sourceDir());\n List classNames = Collections.list(dexFile.entries());\n\n sortedClassNames = Observable.from(classNames)\n .distinct()\n .toSortedList()\n .toBlocking()\n .first();\n }\n\n public boolean containsPackage(String packageName) {\n String currentThread = Thread.currentThread().getName();\n Timber.d(\"%s : Searching for package %s in app %s\", currentThread, packageName, app.label());\n\n int index = Collections.binarySearch(sortedClassNames, packageName, (currentItem, key) -> {\n if(currentItem.startsWith(key)) {\n return 0;\n }\n return currentItem.compareTo(key);\n });\n return index >= MIN_INDEX;\n }\n}\n"},"new_contents":{"kind":"string","value":"package de.philipphager.disclosure.feature.analyser.app;\n\nimport dalvik.system.DexFile;\nimport de.philipphager.disclosure.database.app.model.App;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport rx.Observable;\nimport timber.log.Timber;\n\npublic class Apk {\n private static final int MIN_INDEX = 0;\n private final App app;\n private List sortedClassNames;\n\n public Apk(App app) throws IOException {\n this.app = app;\n load();\n }\n\n private void load() throws IOException {\n File file = new File(app.sourceDir());\n List classNames = new ArrayList<>();\n\n if (file.exists()) {\n DexFile dexFile = new DexFile(app.sourceDir());\n classNames.addAll(Collections.list(dexFile.entries()));\n }\n\n sortedClassNames = Observable.from(classNames)\n .distinct()\n .toSortedList()\n .toBlocking()\n .first();\n }\n\n public boolean containsPackage(String packageName) {\n String currentThread = Thread.currentThread().getName();\n Timber.d(\"%s : Searching for package %s in app %s\", currentThread, packageName, app.label());\n\n int index = Collections.binarySearch(sortedClassNames, packageName, (currentItem, key) -> {\n if (currentItem.startsWith(key)) {\n return 0;\n }\n return currentItem.compareTo(key);\n });\n return index >= MIN_INDEX;\n }\n}\n"},"subject":{"kind":"string","value":"Fix not existing .apks failing"},"message":{"kind":"string","value":"Fix not existing .apks failing\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"philipphager/disclosure-android-app"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage de.philipphager.disclosure.feature.analyser.app;\n\nimport dalvik.system.DexFile;\nimport de.philipphager.disclosure.database.app.model.App;\nimport java.io.IOException;\nimport java.util.Collections;\nimport java.util.List;\nimport rx.Observable;\nimport timber.log.Timber;\n\npublic class Apk {\n private static final int MIN_INDEX = 0;\n private final App app;\n private List sortedClassNames;\n\n public Apk(App app) throws IOException {\n this.app = app;\n load();\n }\n\n private void load() throws IOException {\n DexFile dexFile = new DexFile(app.sourceDir());\n List classNames = Collections.list(dexFile.entries());\n\n sortedClassNames = Observable.from(classNames)\n .distinct()\n .toSortedList()\n .toBlocking()\n .first();\n }\n\n public boolean containsPackage(String packageName) {\n String currentThread = Thread.currentThread().getName();\n Timber.d(\"%s : Searching for package %s in app %s\", currentThread, packageName, app.label());\n\n int index = Collections.binarySearch(sortedClassNames, packageName, (currentItem, key) -> {\n if(currentItem.startsWith(key)) {\n return 0;\n }\n return currentItem.compareTo(key);\n });\n return index >= MIN_INDEX;\n }\n}\n\n## Instruction:\nFix not existing .apks failing\n\n## Code After:\npackage de.philipphager.disclosure.feature.analyser.app;\n\nimport dalvik.system.DexFile;\nimport de.philipphager.disclosure.database.app.model.App;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport rx.Observable;\nimport timber.log.Timber;\n\npublic class Apk {\n private static final int MIN_INDEX = 0;\n private final App app;\n private List sortedClassNames;\n\n public Apk(App app) throws IOException {\n this.app = app;\n load();\n }\n\n private void load() throws IOException {\n File file = new File(app.sourceDir());\n List classNames = new ArrayList<>();\n\n if (file.exists()) {\n DexFile dexFile = new DexFile(app.sourceDir());\n classNames.addAll(Collections.list(dexFile.entries()));\n }\n\n sortedClassNames = Observable.from(classNames)\n .distinct()\n .toSortedList()\n .toBlocking()\n .first();\n }\n\n public boolean containsPackage(String packageName) {\n String currentThread = Thread.currentThread().getName();\n Timber.d(\"%s : Searching for package %s in app %s\", currentThread, packageName, app.label());\n\n int index = Collections.binarySearch(sortedClassNames, packageName, (currentItem, key) -> {\n if (currentItem.startsWith(key)) {\n return 0;\n }\n return currentItem.compareTo(key);\n });\n return index >= MIN_INDEX;\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\nimport dalvik.system.DexFile;\nimport de.philipphager.disclosure.database.app.model.App;\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Collections;\nimport java.util.List;\nimport rx.Observable;\n\n\n# ... modified code ... \n\n\n }\n\n private void load() throws IOException {\n File file = new File(app.sourceDir());\n List classNames = new ArrayList<>();\n\n if (file.exists()) {\n DexFile dexFile = new DexFile(app.sourceDir());\n classNames.addAll(Collections.list(dexFile.entries()));\n }\n\n sortedClassNames = Observable.from(classNames)\n .distinct()\n\n\n ... \n\n\n Timber.d(\"%s : Searching for package %s in app %s\", currentThread, packageName, app.label());\n\n int index = Collections.binarySearch(sortedClassNames, packageName, (currentItem, key) -> {\n if (currentItem.startsWith(key)) {\n return 0;\n }\n return currentItem.compareTo(key);\n\n\n# ... rest of the code ..."}}},{"rowIdx":3731,"cells":{"commit":{"kind":"string","value":"03cab833e3d1a4117adc763620f7a1c78ee2cc95"},"old_file":{"kind":"string","value":"shim-server/src/test/java/org/openmhealth/shim/common/mapper/DataPointMapperUnitTests.java"},"new_file":{"kind":"string","value":"shim-server/src/test/java/org/openmhealth/shim/common/mapper/DataPointMapperUnitTests.java"},"old_contents":{"kind":"string","value":"package org.openmhealth.shim.common.mapper;\n\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\nimport static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper;\n\n\n/**\n * @author Emerson Farrugia\n */\npublic abstract class DataPointMapperUnitTests {\n\n protected static final ObjectMapper objectMapper = newObjectMapper();\n}\n"},"new_contents":{"kind":"string","value":"package org.openmhealth.shim.common.mapper;\n\nimport com.fasterxml.jackson.databind.JsonNode;\nimport com.fasterxml.jackson.databind.ObjectMapper;\nimport org.springframework.core.io.ClassPathResource;\n\nimport java.io.IOException;\nimport java.io.InputStream;\n\nimport static java.lang.String.format;\nimport static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper;\n\n\n/**\n * @author Emerson Farrugia\n */\npublic abstract class DataPointMapperUnitTests {\n\n protected static final ObjectMapper objectMapper = newObjectMapper();\n\n\n /**\n * @param classPathResourceName the name of the class path resource to load\n * @return the contents of the resource as a {@link JsonNode}\n * @throws RuntimeException if the resource can't be loaded\n */\n protected JsonNode asJsonNode(String classPathResourceName) {\n\n ClassPathResource resource = new ClassPathResource(classPathResourceName);\n\n try {\n InputStream resourceInputStream = resource.getInputStream();\n return objectMapper.readTree(resourceInputStream);\n }\n catch (IOException e) {\n throw new RuntimeException(\n format(\"The class path resource '%s' can't be loaded as a JSON node.\", classPathResourceName), e);\n }\n }\n}\n"},"subject":{"kind":"string","value":"Add unit test support to load class path resources as JSON nodes"},"message":{"kind":"string","value":"Add unit test support to load class path resources as JSON nodes\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"openmhealth/shimmer,openmhealth/shimmer,openmhealth/shimmer,openmhealth/shimmer"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage org.openmhealth.shim.common.mapper;\n\nimport com.fasterxml.jackson.databind.ObjectMapper;\n\nimport static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper;\n\n\n/**\n * @author Emerson Farrugia\n */\npublic abstract class DataPointMapperUnitTests {\n\n protected static final ObjectMapper objectMapper = newObjectMapper();\n}\n\n## Instruction:\nAdd unit test support to load class path resources as JSON nodes\n\n## Code After:\npackage org.openmhealth.shim.common.mapper;\n\nimport com.fasterxml.jackson.databind.JsonNode;\nimport com.fasterxml.jackson.databind.ObjectMapper;\nimport org.springframework.core.io.ClassPathResource;\n\nimport java.io.IOException;\nimport java.io.InputStream;\n\nimport static java.lang.String.format;\nimport static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper;\n\n\n/**\n * @author Emerson Farrugia\n */\npublic abstract class DataPointMapperUnitTests {\n\n protected static final ObjectMapper objectMapper = newObjectMapper();\n\n\n /**\n * @param classPathResourceName the name of the class path resource to load\n * @return the contents of the resource as a {@link JsonNode}\n * @throws RuntimeException if the resource can't be loaded\n */\n protected JsonNode asJsonNode(String classPathResourceName) {\n\n ClassPathResource resource = new ClassPathResource(classPathResourceName);\n\n try {\n InputStream resourceInputStream = resource.getInputStream();\n return objectMapper.readTree(resourceInputStream);\n }\n catch (IOException e) {\n throw new RuntimeException(\n format(\"The class path resource '%s' can't be loaded as a JSON node.\", classPathResourceName), e);\n }\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\npackage org.openmhealth.shim.common.mapper;\n\nimport com.fasterxml.jackson.databind.JsonNode;\nimport com.fasterxml.jackson.databind.ObjectMapper;\nimport org.springframework.core.io.ClassPathResource;\n\nimport java.io.IOException;\nimport java.io.InputStream;\n\nimport static java.lang.String.format;\nimport static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper;\n\n\n\n\n# ... modified code ... \n\n\npublic abstract class DataPointMapperUnitTests {\n\n protected static final ObjectMapper objectMapper = newObjectMapper();\n\n\n /**\n * @param classPathResourceName the name of the class path resource to load\n * @return the contents of the resource as a {@link JsonNode}\n * @throws RuntimeException if the resource can't be loaded\n */\n protected JsonNode asJsonNode(String classPathResourceName) {\n\n ClassPathResource resource = new ClassPathResource(classPathResourceName);\n\n try {\n InputStream resourceInputStream = resource.getInputStream();\n return objectMapper.readTree(resourceInputStream);\n }\n catch (IOException e) {\n throw new RuntimeException(\n format(\"The class path resource '%s' can't be loaded as a JSON node.\", classPathResourceName), e);\n }\n }\n}\n\n\n# ... rest of the code ..."}}},{"rowIdx":3732,"cells":{"commit":{"kind":"string","value":"50089e4a1d55414e0ae88b1699eeca0980bcfc15"},"old_file":{"kind":"string","value":"org.metaborg.meta.lang.dynsem.interpreter/src/main/java/org/metaborg/meta/lang/dynsem/interpreter/nodes/matching/LiteralMatchPattern.java"},"new_file":{"kind":"string","value":"org.metaborg.meta.lang.dynsem.interpreter/src/main/java/org/metaborg/meta/lang/dynsem/interpreter/nodes/matching/LiteralMatchPattern.java"},"old_contents":{"kind":"string","value":"package org.metaborg.meta.lang.dynsem.interpreter.nodes.matching;\n\nimport org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil;\nimport org.spoofax.interpreter.core.Tools;\nimport org.spoofax.interpreter.terms.IStrategoAppl;\nimport org.spoofax.terms.util.NotImplementedException;\n\nimport com.oracle.truffle.api.frame.FrameDescriptor;\nimport com.oracle.truffle.api.source.SourceSection;\n\npublic abstract class LiteralMatchPattern extends MatchPattern {\n\n\tpublic LiteralMatchPattern(SourceSection source) {\n\t\tsuper(source);\n\t}\n\n\tpublic static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) {\n\t\tSourceSection source = SourceSectionUtil.fromStrategoTerm(t);\n\t\tif(Tools.hasConstructor(t, \"True\",0)){\n\t\t\treturn new TrueLiteralTermMatchPattern(source);\n\t\t}\n\t\tif(Tools.hasConstructor(t, \"False\",0)){\n\t\t\treturn new FalseLiteralTermMatchPattern(source);\n\t\t}\n\t\tif(Tools.hasConstructor(t, \"Int\", 1)){\n\t\t\treturn new IntLiteralTermMatchPattern(Tools.intAt(t, 0).intValue(), source);\n\t\t}\n\t\tif(Tools.hasConstructor(t, \"String\", 1)){\n\t\t\treturn new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source);\n\t\t}\n\t\t\n\t\tthrow new NotImplementedException(\"Unsupported literal: \" + t);\n\t}\n}\n"},"new_contents":{"kind":"string","value":"package org.metaborg.meta.lang.dynsem.interpreter.nodes.matching;\n\nimport org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil;\nimport org.spoofax.interpreter.core.Tools;\nimport org.spoofax.interpreter.terms.IStrategoAppl;\nimport org.spoofax.terms.util.NotImplementedException;\n\nimport com.oracle.truffle.api.frame.FrameDescriptor;\nimport com.oracle.truffle.api.source.SourceSection;\n\npublic abstract class LiteralMatchPattern extends MatchPattern {\n\n\tpublic LiteralMatchPattern(SourceSection source) {\n\t\tsuper(source);\n\t}\n\n\tpublic static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) {\n\t\tSourceSection source = SourceSectionUtil.fromStrategoTerm(t);\n\t\tif (Tools.hasConstructor(t, \"True\", 0)) {\n\t\t\treturn new TrueLiteralTermMatchPattern(source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"False\", 0)) {\n\t\t\treturn new FalseLiteralTermMatchPattern(source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"Int\", 1)) {\n\t\t\treturn new IntLiteralTermMatchPattern(Integer.parseInt(Tools.stringAt(t, 0).stringValue()), source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"String\", 1)) {\n\t\t\treturn new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source);\n\t\t}\n\n\t\tthrow new NotImplementedException(\"Unsupported literal: \" + t);\n\t}\n}\n"},"subject":{"kind":"string","value":"Fix literal pattern matching creation to correctly convert integer literals in the specification which are encoded as strings."},"message":{"kind":"string","value":"Fix literal pattern matching creation to correctly convert integer literals in the specification which are encoded as strings.\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"metaborg/dynsem,metaborg/dynsem"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage org.metaborg.meta.lang.dynsem.interpreter.nodes.matching;\n\nimport org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil;\nimport org.spoofax.interpreter.core.Tools;\nimport org.spoofax.interpreter.terms.IStrategoAppl;\nimport org.spoofax.terms.util.NotImplementedException;\n\nimport com.oracle.truffle.api.frame.FrameDescriptor;\nimport com.oracle.truffle.api.source.SourceSection;\n\npublic abstract class LiteralMatchPattern extends MatchPattern {\n\n\tpublic LiteralMatchPattern(SourceSection source) {\n\t\tsuper(source);\n\t}\n\n\tpublic static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) {\n\t\tSourceSection source = SourceSectionUtil.fromStrategoTerm(t);\n\t\tif(Tools.hasConstructor(t, \"True\",0)){\n\t\t\treturn new TrueLiteralTermMatchPattern(source);\n\t\t}\n\t\tif(Tools.hasConstructor(t, \"False\",0)){\n\t\t\treturn new FalseLiteralTermMatchPattern(source);\n\t\t}\n\t\tif(Tools.hasConstructor(t, \"Int\", 1)){\n\t\t\treturn new IntLiteralTermMatchPattern(Tools.intAt(t, 0).intValue(), source);\n\t\t}\n\t\tif(Tools.hasConstructor(t, \"String\", 1)){\n\t\t\treturn new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source);\n\t\t}\n\t\t\n\t\tthrow new NotImplementedException(\"Unsupported literal: \" + t);\n\t}\n}\n\n## Instruction:\nFix literal pattern matching creation to correctly convert integer literals in the specification which are encoded as strings.\n\n## Code After:\npackage org.metaborg.meta.lang.dynsem.interpreter.nodes.matching;\n\nimport org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil;\nimport org.spoofax.interpreter.core.Tools;\nimport org.spoofax.interpreter.terms.IStrategoAppl;\nimport org.spoofax.terms.util.NotImplementedException;\n\nimport com.oracle.truffle.api.frame.FrameDescriptor;\nimport com.oracle.truffle.api.source.SourceSection;\n\npublic abstract class LiteralMatchPattern extends MatchPattern {\n\n\tpublic LiteralMatchPattern(SourceSection source) {\n\t\tsuper(source);\n\t}\n\n\tpublic static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) {\n\t\tSourceSection source = SourceSectionUtil.fromStrategoTerm(t);\n\t\tif (Tools.hasConstructor(t, \"True\", 0)) {\n\t\t\treturn new TrueLiteralTermMatchPattern(source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"False\", 0)) {\n\t\t\treturn new FalseLiteralTermMatchPattern(source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"Int\", 1)) {\n\t\t\treturn new IntLiteralTermMatchPattern(Integer.parseInt(Tools.stringAt(t, 0).stringValue()), source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"String\", 1)) {\n\t\t\treturn new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source);\n\t\t}\n\n\t\tthrow new NotImplementedException(\"Unsupported literal: \" + t);\n\t}\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\n\tpublic static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) {\n\t\tSourceSection source = SourceSectionUtil.fromStrategoTerm(t);\n\t\tif (Tools.hasConstructor(t, \"True\", 0)) {\n\t\t\treturn new TrueLiteralTermMatchPattern(source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"False\", 0)) {\n\t\t\treturn new FalseLiteralTermMatchPattern(source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"Int\", 1)) {\n\t\t\treturn new IntLiteralTermMatchPattern(Integer.parseInt(Tools.stringAt(t, 0).stringValue()), source);\n\t\t}\n\t\tif (Tools.hasConstructor(t, \"String\", 1)) {\n\t\t\treturn new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source);\n\t\t}\n\n\t\tthrow new NotImplementedException(\"Unsupported literal: \" + t);\n\t}\n}\n\n\n# ... rest of the code ..."}}},{"rowIdx":3733,"cells":{"commit":{"kind":"string","value":"2ba5f562edb568653574d329a9f1ffbe8b15e7c5"},"old_file":{"kind":"string","value":"tests/test_caching.py"},"new_file":{"kind":"string","value":"tests/test_caching.py"},"old_contents":{"kind":"string","value":"import os\nimport tempfile\n\nfrom . import RTRSSTestCase\nfrom rtrss import caching, config\n\n\nclass CachingTestCase(RTRSSTestCase):\n def setUp(self):\n fh, self.filename = tempfile.mkstemp(dir=config.DATA_DIR)\n os.close(fh)\n\n def tearDown(self):\n os.remove(self.filename)\n\n def test_open_for_atomic_write_writes(self):\n test_data = 'test'\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename) as f:\n data = f.read()\n self.assertEqual(test_data, data)\n\n def test_atomic_write_really_atomic(self):\n test_data = 'test'\n\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename, 'w') as f1:\n f1.write('this will be overwritten')\n\n with open(self.filename) as f:\n data = f.read()\n\n self.assertEqual(test_data, data)\n"},"new_contents":{"kind":"string","value":"import os\nimport tempfile\n\nfrom . import TempDirTestCase\nfrom rtrss import caching\n\n\nclass CachingTestCase(TempDirTestCase):\n def setUp(self):\n super(CachingTestCase, self).setUp()\n fh, self.filename = tempfile.mkstemp(dir=self.dir.path)\n os.close(fh)\n\n def tearDown(self):\n os.remove(self.filename)\n super(CachingTestCase, self).tearDown()\n\n def test_open_for_atomic_write_writes(self):\n test_data = 'test'\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename) as f:\n data = f.read()\n self.assertEqual(test_data, data)\n\n def test_atomic_write_really_atomic(self):\n test_data = 'test'\n\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename, 'w') as f1:\n f1.write('this will be overwritten')\n\n with open(self.filename) as f:\n data = f.read()\n\n self.assertEqual(test_data, data)\n"},"subject":{"kind":"string","value":"Update test case to use new base class"},"message":{"kind":"string","value":"Update test case to use new base class\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport os\nimport tempfile\n\nfrom . import RTRSSTestCase\nfrom rtrss import caching, config\n\n\nclass CachingTestCase(RTRSSTestCase):\n def setUp(self):\n fh, self.filename = tempfile.mkstemp(dir=config.DATA_DIR)\n os.close(fh)\n\n def tearDown(self):\n os.remove(self.filename)\n\n def test_open_for_atomic_write_writes(self):\n test_data = 'test'\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename) as f:\n data = f.read()\n self.assertEqual(test_data, data)\n\n def test_atomic_write_really_atomic(self):\n test_data = 'test'\n\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename, 'w') as f1:\n f1.write('this will be overwritten')\n\n with open(self.filename) as f:\n data = f.read()\n\n self.assertEqual(test_data, data)\n\n## Instruction:\nUpdate test case to use new base class\n\n## Code After:\nimport os\nimport tempfile\n\nfrom . import TempDirTestCase\nfrom rtrss import caching\n\n\nclass CachingTestCase(TempDirTestCase):\n def setUp(self):\n super(CachingTestCase, self).setUp()\n fh, self.filename = tempfile.mkstemp(dir=self.dir.path)\n os.close(fh)\n\n def tearDown(self):\n os.remove(self.filename)\n super(CachingTestCase, self).tearDown()\n\n def test_open_for_atomic_write_writes(self):\n test_data = 'test'\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename) as f:\n data = f.read()\n self.assertEqual(test_data, data)\n\n def test_atomic_write_really_atomic(self):\n test_data = 'test'\n\n with caching.open_for_atomic_write(self.filename) as f:\n f.write(test_data)\n with open(self.filename, 'w') as f1:\n f1.write('this will be overwritten')\n\n with open(self.filename) as f:\n data = f.read()\n\n self.assertEqual(test_data, data)\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport os\nimport tempfile\n\nfrom . import TempDirTestCase\nfrom rtrss import caching\n\n\nclass CachingTestCase(TempDirTestCase):\n def setUp(self):\n super(CachingTestCase, self).setUp()\n fh, self.filename = tempfile.mkstemp(dir=self.dir.path)\n os.close(fh)\n\n def tearDown(self):\n os.remove(self.filename)\n super(CachingTestCase, self).tearDown()\n\n def test_open_for_atomic_write_writes(self):\n test_data = 'test'\n\n\n// ... rest of the code ..."}}},{"rowIdx":3734,"cells":{"commit":{"kind":"string","value":"61b5bc8a7e81225a83d195e016bc4adbd7ca1db5"},"old_file":{"kind":"string","value":"setup.py"},"new_file":{"kind":"string","value":"setup.py"},"old_contents":{"kind":"string","value":"from setuptools import setup, find_packages\n\nsetup(\n name='pymediainfo',\n version='2.1.5',\n author='Louis Sautier',\n author_email='sautier.louis@gmail.com',\n url='https://github.com/sbraz/pymediainfo',\n description=\"\"\"A Python wrapper for the mediainfo library.\"\"\",\n packages=find_packages(),\n namespace_packages=[],\n include_package_data=True,\n zip_safe=False,\n license='MIT',\n tests_require=[\"nose\"],\n test_suite=\"nose.collector\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Operating System :: POSIX :: Linux\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: Microsoft :: Windows\",\n \"License :: OSI Approved :: MIT License\",\n ]\n)\n"},"new_contents":{"kind":"string","value":"from setuptools import setup, find_packages\n\nsetup(\n name='pymediainfo',\n version='2.1.5',\n author='Louis Sautier',\n author_email='sautier.louis@gmail.com',\n url='https://github.com/sbraz/pymediainfo',\n description=\"\"\"A Python wrapper for the mediainfo library.\"\"\",\n packages=find_packages(),\n namespace_packages=[],\n include_package_data=True,\n zip_safe=False,\n license='MIT',\n tests_require=[\"nose\"],\n test_suite=\"nose.collector\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Operating System :: POSIX :: Linux\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: Microsoft :: Windows\",\n \"License :: OSI Approved :: MIT License\",\n ]\n)\n"},"subject":{"kind":"string","value":"Add Python 2.6 to classifiers"},"message":{"kind":"string","value":"Add Python 2.6 to classifiers\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"paltman/pymediainfo,paltman-archive/pymediainfo"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom setuptools import setup, find_packages\n\nsetup(\n name='pymediainfo',\n version='2.1.5',\n author='Louis Sautier',\n author_email='sautier.louis@gmail.com',\n url='https://github.com/sbraz/pymediainfo',\n description=\"\"\"A Python wrapper for the mediainfo library.\"\"\",\n packages=find_packages(),\n namespace_packages=[],\n include_package_data=True,\n zip_safe=False,\n license='MIT',\n tests_require=[\"nose\"],\n test_suite=\"nose.collector\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Operating System :: POSIX :: Linux\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: Microsoft :: Windows\",\n \"License :: OSI Approved :: MIT License\",\n ]\n)\n\n## Instruction:\nAdd Python 2.6 to classifiers\n\n## Code After:\nfrom setuptools import setup, find_packages\n\nsetup(\n name='pymediainfo',\n version='2.1.5',\n author='Louis Sautier',\n author_email='sautier.louis@gmail.com',\n url='https://github.com/sbraz/pymediainfo',\n description=\"\"\"A Python wrapper for the mediainfo library.\"\"\",\n packages=find_packages(),\n namespace_packages=[],\n include_package_data=True,\n zip_safe=False,\n license='MIT',\n tests_require=[\"nose\"],\n test_suite=\"nose.collector\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n \"Operating System :: POSIX :: Linux\",\n \"Operating System :: MacOS :: MacOS X\",\n \"Operating System :: Microsoft :: Windows\",\n \"License :: OSI Approved :: MIT License\",\n ]\n)\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n test_suite=\"nose.collector\",\n classifiers=[\n \"Development Status :: 5 - Production/Stable\",\n \"Programming Language :: Python :: 2.6\",\n \"Programming Language :: Python :: 2.7\",\n \"Programming Language :: Python :: 3.4\",\n \"Programming Language :: Python :: 3.5\",\n\n\n// ... rest of the code ..."}}},{"rowIdx":3735,"cells":{"commit":{"kind":"string","value":"94996a2a78743020945bf708616e202e7b988173"},"old_file":{"kind":"string","value":"aconite-core/src/io/aconite/serializers/GsonBodySerializer.kt"},"new_file":{"kind":"string","value":"aconite-core/src/io/aconite/serializers/GsonBodySerializer.kt"},"old_contents":{"kind":"string","value":"package io.aconite.serializers\n\nimport com.google.gson.Gson\nimport com.google.gson.GsonBuilder\nimport com.google.gson.JsonParseException\nimport io.aconite.BadRequestException\nimport io.aconite.UnsupportedMediaTypeException\nimport io.aconite.BodyBuffer\nimport io.aconite.BodySerializer\nimport io.aconite.Buffer\nimport io.aconite.utils.toJavaType\nimport java.lang.reflect.Type\nimport kotlin.reflect.KAnnotatedElement\nimport kotlin.reflect.KType\n\nclass GsonBodySerializer(val gson: Gson, val type: Type): BodySerializer {\n\n class Factory(val gson: Gson = Gson()): BodySerializer.Factory {\n constructor(builder: GsonBuilder): this(builder.create())\n override fun create(annotations: KAnnotatedElement, type: KType) = GsonBodySerializer(gson, type.toJavaType())\n }\n\n override fun serialize(obj: Any?) = BodyBuffer(\n content = Buffer.wrap(gson.toJson(obj, type)),\n contentType = \"application/json\"\n )\n\n override fun deserialize(body: BodyBuffer): Any? {\n if (body.contentType.toLowerCase() != \"application/json\")\n throw UnsupportedMediaTypeException(\"Only 'application/json' media type supported\")\n try {\n return gson.fromJson(body.content.string, type)\n } catch (ex: JsonParseException) {\n throw BadRequestException(\"Bad JSON format. ${ex.message}\")\n }\n }\n}"},"new_contents":{"kind":"string","value":"package io.aconite.serializers\n\nimport com.google.gson.Gson\nimport com.google.gson.GsonBuilder\nimport com.google.gson.JsonParseException\nimport io.aconite.*\nimport io.aconite.utils.toJavaType\nimport java.lang.reflect.Type\nimport kotlin.reflect.KAnnotatedElement\nimport kotlin.reflect.KType\n\nclass GsonBodySerializer(val gson: Gson, val type: Type): BodySerializer {\n\n class Factory(val gson: Gson = Gson()): BodySerializer.Factory {\n constructor(builder: GsonBuilder): this(builder.create())\n override fun create(annotations: KAnnotatedElement, type: KType) = GsonBodySerializer(gson, type.toJavaType())\n }\n\n override fun serialize(obj: Any?) = BodyBuffer(\n content = Buffer.wrap(gson.toJson(obj, type)),\n contentType = \"application/json\"\n )\n\n override fun deserialize(body: BodyBuffer): Any? {\n if (body.content.bytes.isEmpty()) return null\n\n if (body.contentType.toLowerCase() != \"application/json\")\n throw UnsupportedMediaTypeException(\"Only 'application/json' media type supported\")\n\n try {\n return gson.fromJson(body.content.string, type)\n } catch (ex: JsonParseException) {\n throw BadRequestException(\"Bad JSON format. ${ex.message}\")\n }\n }\n}"},"subject":{"kind":"string","value":"Add support for empty body without contentType specification"},"message":{"kind":"string","value":"Add support for empty body without contentType specification\n"},"lang":{"kind":"string","value":"Kotlin"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"AcapellaSoft/Aconite,AcapellaSoft/Aconite"},"config":{"kind":"string","value":"kotlin"},"content":{"kind":"string","value":"## Code Before:\npackage io.aconite.serializers\n\nimport com.google.gson.Gson\nimport com.google.gson.GsonBuilder\nimport com.google.gson.JsonParseException\nimport io.aconite.BadRequestException\nimport io.aconite.UnsupportedMediaTypeException\nimport io.aconite.BodyBuffer\nimport io.aconite.BodySerializer\nimport io.aconite.Buffer\nimport io.aconite.utils.toJavaType\nimport java.lang.reflect.Type\nimport kotlin.reflect.KAnnotatedElement\nimport kotlin.reflect.KType\n\nclass GsonBodySerializer(val gson: Gson, val type: Type): BodySerializer {\n\n class Factory(val gson: Gson = Gson()): BodySerializer.Factory {\n constructor(builder: GsonBuilder): this(builder.create())\n override fun create(annotations: KAnnotatedElement, type: KType) = GsonBodySerializer(gson, type.toJavaType())\n }\n\n override fun serialize(obj: Any?) = BodyBuffer(\n content = Buffer.wrap(gson.toJson(obj, type)),\n contentType = \"application/json\"\n )\n\n override fun deserialize(body: BodyBuffer): Any? {\n if (body.contentType.toLowerCase() != \"application/json\")\n throw UnsupportedMediaTypeException(\"Only 'application/json' media type supported\")\n try {\n return gson.fromJson(body.content.string, type)\n } catch (ex: JsonParseException) {\n throw BadRequestException(\"Bad JSON format. ${ex.message}\")\n }\n }\n}\n## Instruction:\nAdd support for empty body without contentType specification\n\n## Code After:\npackage io.aconite.serializers\n\nimport com.google.gson.Gson\nimport com.google.gson.GsonBuilder\nimport com.google.gson.JsonParseException\nimport io.aconite.*\nimport io.aconite.utils.toJavaType\nimport java.lang.reflect.Type\nimport kotlin.reflect.KAnnotatedElement\nimport kotlin.reflect.KType\n\nclass GsonBodySerializer(val gson: Gson, val type: Type): BodySerializer {\n\n class Factory(val gson: Gson = Gson()): BodySerializer.Factory {\n constructor(builder: GsonBuilder): this(builder.create())\n override fun create(annotations: KAnnotatedElement, type: KType) = GsonBodySerializer(gson, type.toJavaType())\n }\n\n override fun serialize(obj: Any?) = BodyBuffer(\n content = Buffer.wrap(gson.toJson(obj, type)),\n contentType = \"application/json\"\n )\n\n override fun deserialize(body: BodyBuffer): Any? {\n if (body.content.bytes.isEmpty()) return null\n\n if (body.contentType.toLowerCase() != \"application/json\")\n throw UnsupportedMediaTypeException(\"Only 'application/json' media type supported\")\n\n try {\n return gson.fromJson(body.content.string, type)\n } catch (ex: JsonParseException) {\n throw BadRequestException(\"Bad JSON format. ${ex.message}\")\n }\n }\n}"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport com.google.gson.Gson\nimport com.google.gson.GsonBuilder\nimport com.google.gson.JsonParseException\nimport io.aconite.*\nimport io.aconite.utils.toJavaType\nimport java.lang.reflect.Type\nimport kotlin.reflect.KAnnotatedElement\n\n\n// ... modified code ... \n\n\n )\n\n override fun deserialize(body: BodyBuffer): Any? {\n if (body.content.bytes.isEmpty()) return null\n\n if (body.contentType.toLowerCase() != \"application/json\")\n throw UnsupportedMediaTypeException(\"Only 'application/json' media type supported\")\n\n try {\n return gson.fromJson(body.content.string, type)\n } catch (ex: JsonParseException) {\n\n\n// ... rest of the code ..."}}},{"rowIdx":3736,"cells":{"commit":{"kind":"string","value":"2d64c01daebd918c3e6196b1eb3ad62f105c56e0"},"old_file":{"kind":"string","value":"django_google_charts/charts.py"},"new_file":{"kind":"string","value":"django_google_charts/charts.py"},"old_contents":{"kind":"string","value":"import six\nimport json\n\nfrom django.core.urlresolvers import reverse\nfrom django.utils.html import format_html, mark_safe\n\nCHARTS = {}\n\nclass ChartMeta(type):\n def __new__(cls, name, bases, attrs):\n klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)\n\n if klass.chart_slug:\n CHARTS[klass.chart_slug] = klass\n\n return klass\n\n@six.add_metaclass(ChartMeta)\nclass Chart(object):\n options = {}\n chart_slug = None\n columns = None\n\n def get_data(self):\n raise NotImplementedError\n\n def __str__(self):\n return format_html(\n \"
\",\n json.dumps(self.options),\n reverse(\n 'djgc-chart-data',\n args=(self.chart_slug,),\n ),\n )"},"new_contents":{"kind":"string","value":"import six\nimport json\n\nfrom django.core.urlresolvers import reverse\nfrom django.utils.html import format_html, mark_safe\nfrom django.utils.encoding import python_2_unicode_compatible\n\nCHARTS = {}\n\nclass ChartMeta(type):\n def __new__(cls, name, bases, attrs):\n klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)\n\n if klass.chart_slug:\n CHARTS[klass.chart_slug] = klass\n\n return klass\n\n@six.add_metaclass(ChartMeta)\n@python_2_unicode_compatible\nclass Chart(object):\n options = {}\n chart_slug = None\n columns = None\n\n def get_data(self):\n raise NotImplementedError\n\n def __str__(self):\n return format_html(\n \"
\",\n json.dumps(self.options),\n reverse(\n 'djgc-chart-data',\n args=(self.chart_slug,),\n ),\n )"},"subject":{"kind":"string","value":"Make this Python 2.x compatible"},"message":{"kind":"string","value":"Make this Python 2.x compatible\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"danpalmer/django-google-charts,danpalmer/django-google-charts"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport six\nimport json\n\nfrom django.core.urlresolvers import reverse\nfrom django.utils.html import format_html, mark_safe\n\nCHARTS = {}\n\nclass ChartMeta(type):\n def __new__(cls, name, bases, attrs):\n klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)\n\n if klass.chart_slug:\n CHARTS[klass.chart_slug] = klass\n\n return klass\n\n@six.add_metaclass(ChartMeta)\nclass Chart(object):\n options = {}\n chart_slug = None\n columns = None\n\n def get_data(self):\n raise NotImplementedError\n\n def __str__(self):\n return format_html(\n \"
\",\n json.dumps(self.options),\n reverse(\n 'djgc-chart-data',\n args=(self.chart_slug,),\n ),\n )\n## Instruction:\nMake this Python 2.x compatible\n\n## Code After:\nimport six\nimport json\n\nfrom django.core.urlresolvers import reverse\nfrom django.utils.html import format_html, mark_safe\nfrom django.utils.encoding import python_2_unicode_compatible\n\nCHARTS = {}\n\nclass ChartMeta(type):\n def __new__(cls, name, bases, attrs):\n klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs)\n\n if klass.chart_slug:\n CHARTS[klass.chart_slug] = klass\n\n return klass\n\n@six.add_metaclass(ChartMeta)\n@python_2_unicode_compatible\nclass Chart(object):\n options = {}\n chart_slug = None\n columns = None\n\n def get_data(self):\n raise NotImplementedError\n\n def __str__(self):\n return format_html(\n \"
\",\n json.dumps(self.options),\n reverse(\n 'djgc-chart-data',\n args=(self.chart_slug,),\n ),\n )"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\nfrom django.core.urlresolvers import reverse\nfrom django.utils.html import format_html, mark_safe\nfrom django.utils.encoding import python_2_unicode_compatible\n\nCHARTS = {}\n\n\n\n// ... modified code ... \n\n\n return klass\n\n@six.add_metaclass(ChartMeta)\n@python_2_unicode_compatible\nclass Chart(object):\n options = {}\n chart_slug = None\n\n\n// ... rest of the code ..."}}},{"rowIdx":3737,"cells":{"commit":{"kind":"string","value":"a385490e82e3ac3f909fe2b407e692206212748b"},"old_file":{"kind":"string","value":"main/src/main/java/com/bloatit/framework/Offer.java"},"new_file":{"kind":"string","value":"main/src/main/java/com/bloatit/framework/Offer.java"},"old_contents":{"kind":"string","value":"package com.bloatit.framework;\n\nimport java.util.Date;\n\nimport com.bloatit.framework.right.OfferRight;\nimport com.bloatit.framework.right.RightManager.Action;\nimport com.bloatit.model.data.DaoComment;\nimport com.bloatit.model.data.DaoKudosable;\nimport com.bloatit.model.data.DaoOffer;\n\npublic final class Offer extends Kudosable {\n\n private final DaoOffer dao;\n\n public static Offer create(final DaoOffer dao) {\n if (dao == null) {\n return null;\n }\n return new Offer(dao);\n }\n \n public Offer(final DaoOffer dao) {\n super();\n this.dao = dao;\n }\n\n public DaoOffer getDao() {\n return dao;\n }\n\n public Date getDateExpire() {\n return dao.getDateExpire();\n }\n\n public boolean canSetdatExpire() {\n return new OfferRight.DateExpire().canAccess(calculateRole(this), Action.WRITE);\n }\n\n public void setDateExpire(final Date dateExpire) {\n new OfferRight.DateExpire().tryAccess(calculateRole(this), Action.WRITE);\n dao.setDateExpire(dateExpire);\n }\n\n public Demand getDemand() {\n return Demand.create(dao.getDemand());\n }\n\n public Description getDescription() {\n return new Description(dao.getDescription());\n }\n\n @Override\n protected DaoKudosable getDaoKudosable() {\n return dao;\n }\n\n}\n"},"new_contents":{"kind":"string","value":"package com.bloatit.framework;\n\nimport java.math.BigDecimal;\nimport java.util.Date;\n\nimport com.bloatit.framework.right.OfferRight;\nimport com.bloatit.framework.right.RightManager.Action;\nimport com.bloatit.model.data.DaoComment;\nimport com.bloatit.model.data.DaoKudosable;\nimport com.bloatit.model.data.DaoOffer;\n\npublic final class Offer extends Kudosable {\n\n private final DaoOffer dao;\n\n public static Offer create(final DaoOffer dao) {\n if (dao == null) {\n return null;\n }\n return new Offer(dao);\n }\n \n public Offer(final DaoOffer dao) {\n super();\n this.dao = dao;\n }\n\n public DaoOffer getDao() {\n return dao;\n }\n\n public Date getDateExpire() {\n return dao.getDateExpire();\n }\n\n public boolean canSetdatExpire() {\n return new OfferRight.DateExpire().canAccess(calculateRole(this), Action.WRITE);\n }\n\n public void setDateExpire(final Date dateExpire) {\n new OfferRight.DateExpire().tryAccess(calculateRole(this), Action.WRITE);\n dao.setDateExpire(dateExpire);\n }\n\n public Demand getDemand() {\n return Demand.create(dao.getDemand());\n }\n\n public Description getDescription() {\n return new Description(dao.getDescription());\n }\n\n public BigDecimal getAmount() {\n return dao.getAmount();\n }\n \n @Override\n protected DaoKudosable getDaoKudosable() {\n return dao;\n }\n\n}\n"},"subject":{"kind":"string","value":"Add get amount method in offer"},"message":{"kind":"string","value":"Add get amount method in offer\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"BloatIt/bloatit,BloatIt/bloatit,BloatIt/bloatit,BloatIt/bloatit,BloatIt/bloatit"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage com.bloatit.framework;\n\nimport java.util.Date;\n\nimport com.bloatit.framework.right.OfferRight;\nimport com.bloatit.framework.right.RightManager.Action;\nimport com.bloatit.model.data.DaoComment;\nimport com.bloatit.model.data.DaoKudosable;\nimport com.bloatit.model.data.DaoOffer;\n\npublic final class Offer extends Kudosable {\n\n private final DaoOffer dao;\n\n public static Offer create(final DaoOffer dao) {\n if (dao == null) {\n return null;\n }\n return new Offer(dao);\n }\n \n public Offer(final DaoOffer dao) {\n super();\n this.dao = dao;\n }\n\n public DaoOffer getDao() {\n return dao;\n }\n\n public Date getDateExpire() {\n return dao.getDateExpire();\n }\n\n public boolean canSetdatExpire() {\n return new OfferRight.DateExpire().canAccess(calculateRole(this), Action.WRITE);\n }\n\n public void setDateExpire(final Date dateExpire) {\n new OfferRight.DateExpire().tryAccess(calculateRole(this), Action.WRITE);\n dao.setDateExpire(dateExpire);\n }\n\n public Demand getDemand() {\n return Demand.create(dao.getDemand());\n }\n\n public Description getDescription() {\n return new Description(dao.getDescription());\n }\n\n @Override\n protected DaoKudosable getDaoKudosable() {\n return dao;\n }\n\n}\n\n## Instruction:\nAdd get amount method in offer\n\n## Code After:\npackage com.bloatit.framework;\n\nimport java.math.BigDecimal;\nimport java.util.Date;\n\nimport com.bloatit.framework.right.OfferRight;\nimport com.bloatit.framework.right.RightManager.Action;\nimport com.bloatit.model.data.DaoComment;\nimport com.bloatit.model.data.DaoKudosable;\nimport com.bloatit.model.data.DaoOffer;\n\npublic final class Offer extends Kudosable {\n\n private final DaoOffer dao;\n\n public static Offer create(final DaoOffer dao) {\n if (dao == null) {\n return null;\n }\n return new Offer(dao);\n }\n \n public Offer(final DaoOffer dao) {\n super();\n this.dao = dao;\n }\n\n public DaoOffer getDao() {\n return dao;\n }\n\n public Date getDateExpire() {\n return dao.getDateExpire();\n }\n\n public boolean canSetdatExpire() {\n return new OfferRight.DateExpire().canAccess(calculateRole(this), Action.WRITE);\n }\n\n public void setDateExpire(final Date dateExpire) {\n new OfferRight.DateExpire().tryAccess(calculateRole(this), Action.WRITE);\n dao.setDateExpire(dateExpire);\n }\n\n public Demand getDemand() {\n return Demand.create(dao.getDemand());\n }\n\n public Description getDescription() {\n return new Description(dao.getDescription());\n }\n\n public BigDecimal getAmount() {\n return dao.getAmount();\n }\n \n @Override\n protected DaoKudosable getDaoKudosable() {\n return dao;\n }\n\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\npackage com.bloatit.framework;\n\nimport java.math.BigDecimal;\nimport java.util.Date;\n\nimport com.bloatit.framework.right.OfferRight;\n\n\n# ... modified code ... \n\n\n return new Description(dao.getDescription());\n }\n\n public BigDecimal getAmount() {\n return dao.getAmount();\n }\n \n @Override\n protected DaoKudosable getDaoKudosable() {\n return dao;\n\n\n# ... rest of the code ..."}}},{"rowIdx":3738,"cells":{"commit":{"kind":"string","value":"4a98d2ce95d6a082588e4ccc8e04454c26260ca0"},"old_file":{"kind":"string","value":"helpers.py"},"new_file":{"kind":"string","value":"helpers.py"},"old_contents":{"kind":"string","value":"def get_readable_list(passed_list, sep=', ', end=''):\n\toutput = \"\"\n\tif isinstance(passed_list, list):\n\t\tfor i, item in enumerate(passed_list):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\telif isinstance(passed_list, dict):\n\t\tfor i, item in enumerate(passed_list.values()):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\treturn output + end\n\n\ndef get_list_as_english(passed_list):\n\toutput = \"\"\n\tfor i, item in enumerate(passed_list):\n\t\tif len(passed_list) is 1:\n\t\t\toutput += str(item) + ' '\n\n\t\telif len(passed_list) is 2:\n\t\t\toutput += str(item)\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += \" and \"\n\t\t\telse:\n\t\t\t\toutput += \"\"\n\n\t\telse:\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += str(item) + \", \"\n\t\t\telse:\n\t\t\t\toutput += \"and \" + str(item) + \", \"\n\treturn output\n"},"new_contents":{"kind":"string","value":"def get_readable_list(passed_list, sep=', ', end=''):\n\toutput = \"\"\n\tif isinstance(passed_list, list) or isinstance(passed_list, tuple):\n\t\tfor i, item in enumerate(passed_list):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\n\telif isinstance(passed_list, dict):\n\t\tfor i, item in enumerate(passed_list.values()):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\treturn output + end\n\n\ndef get_list_as_english(passed_list):\n\toutput = \"\"\n\tfor i, item in enumerate(passed_list):\n\t\tif len(passed_list) is 1:\n\t\t\toutput += str(item) + ' '\n\n\t\telif len(passed_list) is 2:\n\t\t\toutput += str(item)\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += \" and \"\n\t\t\telse:\n\t\t\t\toutput += \"\"\n\n\t\telse:\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += str(item) + \", \"\n\t\t\telse:\n\t\t\t\toutput += \"and \" + str(item) + \", \"\n\treturn output\n"},"subject":{"kind":"string","value":"Make get_readable_list process tuples, too"},"message":{"kind":"string","value":"Make get_readable_list process tuples, too\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"hawkrives/gobbldygook,hawkrives/gobbldygook,hawkrives/gobbldygook"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\ndef get_readable_list(passed_list, sep=', ', end=''):\n\toutput = \"\"\n\tif isinstance(passed_list, list):\n\t\tfor i, item in enumerate(passed_list):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\telif isinstance(passed_list, dict):\n\t\tfor i, item in enumerate(passed_list.values()):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\treturn output + end\n\n\ndef get_list_as_english(passed_list):\n\toutput = \"\"\n\tfor i, item in enumerate(passed_list):\n\t\tif len(passed_list) is 1:\n\t\t\toutput += str(item) + ' '\n\n\t\telif len(passed_list) is 2:\n\t\t\toutput += str(item)\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += \" and \"\n\t\t\telse:\n\t\t\t\toutput += \"\"\n\n\t\telse:\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += str(item) + \", \"\n\t\t\telse:\n\t\t\t\toutput += \"and \" + str(item) + \", \"\n\treturn output\n\n## Instruction:\nMake get_readable_list process tuples, too\n\n## Code After:\ndef get_readable_list(passed_list, sep=', ', end=''):\n\toutput = \"\"\n\tif isinstance(passed_list, list) or isinstance(passed_list, tuple):\n\t\tfor i, item in enumerate(passed_list):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\n\telif isinstance(passed_list, dict):\n\t\tfor i, item in enumerate(passed_list.values()):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\t\t\telse:\n\t\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\treturn output + end\n\n\ndef get_list_as_english(passed_list):\n\toutput = \"\"\n\tfor i, item in enumerate(passed_list):\n\t\tif len(passed_list) is 1:\n\t\t\toutput += str(item) + ' '\n\n\t\telif len(passed_list) is 2:\n\t\t\toutput += str(item)\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += \" and \"\n\t\t\telse:\n\t\t\t\toutput += \"\"\n\n\t\telse:\n\t\t\tif i is not (len(passed_list) - 1):\n\t\t\t\toutput += str(item) + \", \"\n\t\t\telse:\n\t\t\t\toutput += \"and \" + str(item) + \", \"\n\treturn output\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\ndef get_readable_list(passed_list, sep=', ', end=''):\n\toutput = \"\"\n\tif isinstance(passed_list, list) or isinstance(passed_list, tuple):\n\t\tfor i, item in enumerate(passed_list):\n\t\t\tif len(passed_list) is 1:\n\t\t\t\toutput += str(item)\n\n\n// ... modified code ... \n\n\n\t\t\t\t\toutput += str(item) + sep\n\t\t\t\telse:\n\t\t\t\t\toutput += str(item)\n\n\n\telif isinstance(passed_list, dict):\n\t\tfor i, item in enumerate(passed_list.values()):\n\n\n// ... rest of the code ..."}}},{"rowIdx":3739,"cells":{"commit":{"kind":"string","value":"7627b8759ab08df562048ec1fa94fe9d69d01374"},"old_file":{"kind":"string","value":"setup.py"},"new_file":{"kind":"string","value":"setup.py"},"old_contents":{"kind":"string","value":"\nfrom setuptools import setup\nfrom exoline import __version__ as version\n\nwith open('requirements.txt') as f:\n required = f.read().splitlines()\n\ntry:\n from collections import OrderedDict\nexcept ImportError:\n required.append('ordereddict==1.1')\n\nsetup(\n name='exoline',\n version=version,\n url = 'http://github.com/dweaver/exoline',\n author = 'Dan Weaver',\n author_email = 'danweaver@exosite.com',\n description = 'Command line interface for Exosite platform.',\n long_description = open('README.md').read() + '\\n\\n' +\n open('HISTORY.md').read(),\n packages=['exoline'],\n package_dir={'exoline': 'exoline'},\n scripts=['bin/exo', 'bin/exoline'],\n keywords=['exosite', 'onep', 'one platform', 'm2m'],\n install_requires=required,\n zip_safe=False,\n )\n"},"new_contents":{"kind":"string","value":"\nfrom setuptools import setup\nfrom exoline import __version__ as version\n\nwith open('requirements.txt') as f:\n required = f.read().splitlines()\n\ntry:\n from collections import OrderedDict\nexcept ImportError:\n required.append('ordereddict>=1.1')\n\ntry:\n import importlib\nexcept ImportError:\n required.append('importlib>=1.0.2')\n\nsetup(\n name='exoline',\n version=version,\n url = 'http://github.com/dweaver/exoline',\n author = 'Dan Weaver',\n author_email = 'danweaver@exosite.com',\n description = 'Command line interface for Exosite platform.',\n long_description = open('README.md').read() + '\\n\\n' +\n open('HISTORY.md').read(),\n packages=['exoline'],\n package_dir={'exoline': 'exoline'},\n scripts=['bin/exo', 'bin/exoline'],\n keywords=['exosite', 'onep', 'one platform', 'm2m'],\n install_requires=required,\n zip_safe=False,\n )\n"},"subject":{"kind":"string","value":"Add importlib if not included"},"message":{"kind":"string","value":"Add importlib if not included\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"tadpol/exoline,azdle/exoline,asolz/exoline,danslimmon/exoline,tadpol/exoline,asolz/exoline,azdle/exoline,danslimmon/exoline"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nfrom setuptools import setup\nfrom exoline import __version__ as version\n\nwith open('requirements.txt') as f:\n required = f.read().splitlines()\n\ntry:\n from collections import OrderedDict\nexcept ImportError:\n required.append('ordereddict==1.1')\n\nsetup(\n name='exoline',\n version=version,\n url = 'http://github.com/dweaver/exoline',\n author = 'Dan Weaver',\n author_email = 'danweaver@exosite.com',\n description = 'Command line interface for Exosite platform.',\n long_description = open('README.md').read() + '\\n\\n' +\n open('HISTORY.md').read(),\n packages=['exoline'],\n package_dir={'exoline': 'exoline'},\n scripts=['bin/exo', 'bin/exoline'],\n keywords=['exosite', 'onep', 'one platform', 'm2m'],\n install_requires=required,\n zip_safe=False,\n )\n\n## Instruction:\nAdd importlib if not included\n\n## Code After:\n\nfrom setuptools import setup\nfrom exoline import __version__ as version\n\nwith open('requirements.txt') as f:\n required = f.read().splitlines()\n\ntry:\n from collections import OrderedDict\nexcept ImportError:\n required.append('ordereddict>=1.1')\n\ntry:\n import importlib\nexcept ImportError:\n required.append('importlib>=1.0.2')\n\nsetup(\n name='exoline',\n version=version,\n url = 'http://github.com/dweaver/exoline',\n author = 'Dan Weaver',\n author_email = 'danweaver@exosite.com',\n description = 'Command line interface for Exosite platform.',\n long_description = open('README.md').read() + '\\n\\n' +\n open('HISTORY.md').read(),\n packages=['exoline'],\n package_dir={'exoline': 'exoline'},\n scripts=['bin/exo', 'bin/exoline'],\n keywords=['exosite', 'onep', 'one platform', 'm2m'],\n install_requires=required,\n zip_safe=False,\n )\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\ntry:\n from collections import OrderedDict\nexcept ImportError:\n required.append('ordereddict>=1.1')\n\ntry:\n import importlib\nexcept ImportError:\n required.append('importlib>=1.0.2')\n\nsetup(\n name='exoline',\n\n\n# ... rest of the code ..."}}},{"rowIdx":3740,"cells":{"commit":{"kind":"string","value":"d9fc83ec526df1bf732d8f65f445f48f1b764dfe"},"old_file":{"kind":"string","value":"selvbetjening/api/rest/models.py"},"new_file":{"kind":"string","value":"selvbetjening/api/rest/models.py"},"old_contents":{"kind":"string","value":"\nfrom tastypie.authentication import Authentication\nfrom tastypie.resources import ModelResource\n\nfrom provider.oauth2.models import AccessToken\n\nfrom selvbetjening.core.members.models import SUser\n\n\nclass OAuth2Authentication(Authentication):\n\n def is_authenticated(self, request, **kwargs):\n\n access_key = request.REQUEST.get('access_key', None)\n\n if not access_key:\n auth_header_value = request.META.get('HTTP_AUTHORIZATION', None)\n if auth_header_value:\n access_key = auth_header_value.split(' ')[1]\n\n if not access_key:\n return False\n\n try:\n token = AccessToken.objects.get_token(access_key)\n except AccessToken.DoesNotExist:\n return False\n\n request.user = token.user\n return True\n\n\nclass AuthenticatedUserResource(ModelResource):\n class Meta:\n queryset = SUser.objects.all()\n resource_name = 'authenticated_user'\n allowed_methods = ['get']\n\n excludes = ['password']\n\n authentication = OAuth2Authentication()\n\n def get_object_list(self, request):\n return super(AuthenticatedUserResource, self).get_object_list(request).filter(pk=1)\n"},"new_contents":{"kind":"string","value":"\nfrom tastypie.authentication import Authentication\nfrom tastypie.resources import ModelResource\n\nfrom provider.oauth2.models import AccessToken\n\nfrom selvbetjening.core.members.models import SUser\n\n\nclass OAuth2Authentication(Authentication):\n\n def is_authenticated(self, request, **kwargs):\n\n access_key = request.REQUEST.get('access_key', None)\n\n if not access_key:\n auth_header_value = request.META.get('HTTP_AUTHORIZATION', None)\n if auth_header_value:\n access_key = auth_header_value.split(' ')[1]\n\n if not access_key:\n return False\n\n try:\n token = AccessToken.objects.get_token(access_key)\n except AccessToken.DoesNotExist:\n return False\n\n request.user = token.user\n return True\n\n\nclass AuthenticatedUserResource(ModelResource):\n class Meta:\n queryset = SUser.objects.all()\n resource_name = 'authenticated_user'\n allowed_methods = ['get']\n\n excludes = ['password']\n\n authentication = OAuth2Authentication()\n\n def get_object_list(self, request):\n return super(AuthenticatedUserResource, self).get_object_list(request).filter(pk=request.user.pk)\n"},"subject":{"kind":"string","value":"Fix mistake returning the wrong authenticated user"},"message":{"kind":"string","value":"Fix mistake returning the wrong authenticated user\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nfrom tastypie.authentication import Authentication\nfrom tastypie.resources import ModelResource\n\nfrom provider.oauth2.models import AccessToken\n\nfrom selvbetjening.core.members.models import SUser\n\n\nclass OAuth2Authentication(Authentication):\n\n def is_authenticated(self, request, **kwargs):\n\n access_key = request.REQUEST.get('access_key', None)\n\n if not access_key:\n auth_header_value = request.META.get('HTTP_AUTHORIZATION', None)\n if auth_header_value:\n access_key = auth_header_value.split(' ')[1]\n\n if not access_key:\n return False\n\n try:\n token = AccessToken.objects.get_token(access_key)\n except AccessToken.DoesNotExist:\n return False\n\n request.user = token.user\n return True\n\n\nclass AuthenticatedUserResource(ModelResource):\n class Meta:\n queryset = SUser.objects.all()\n resource_name = 'authenticated_user'\n allowed_methods = ['get']\n\n excludes = ['password']\n\n authentication = OAuth2Authentication()\n\n def get_object_list(self, request):\n return super(AuthenticatedUserResource, self).get_object_list(request).filter(pk=1)\n\n## Instruction:\nFix mistake returning the wrong authenticated user\n\n## Code After:\n\nfrom tastypie.authentication import Authentication\nfrom tastypie.resources import ModelResource\n\nfrom provider.oauth2.models import AccessToken\n\nfrom selvbetjening.core.members.models import SUser\n\n\nclass OAuth2Authentication(Authentication):\n\n def is_authenticated(self, request, **kwargs):\n\n access_key = request.REQUEST.get('access_key', None)\n\n if not access_key:\n auth_header_value = request.META.get('HTTP_AUTHORIZATION', None)\n if auth_header_value:\n access_key = auth_header_value.split(' ')[1]\n\n if not access_key:\n return False\n\n try:\n token = AccessToken.objects.get_token(access_key)\n except AccessToken.DoesNotExist:\n return False\n\n request.user = token.user\n return True\n\n\nclass AuthenticatedUserResource(ModelResource):\n class Meta:\n queryset = SUser.objects.all()\n resource_name = 'authenticated_user'\n allowed_methods = ['get']\n\n excludes = ['password']\n\n authentication = OAuth2Authentication()\n\n def get_object_list(self, request):\n return super(AuthenticatedUserResource, self).get_object_list(request).filter(pk=request.user.pk)\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n authentication = OAuth2Authentication()\n\n def get_object_list(self, request):\n return super(AuthenticatedUserResource, self).get_object_list(request).filter(pk=request.user.pk)\n\n\n# ... rest of the code ..."}}},{"rowIdx":3741,"cells":{"commit":{"kind":"string","value":"ec439270a5460a6e1b649f7ceaca35b8d8827407"},"old_file":{"kind":"string","value":"src/main/java/org/jusecase/properties/ui/KeyListCellRenderer.java"},"new_file":{"kind":"string","value":"src/main/java/org/jusecase/properties/ui/KeyListCellRenderer.java"},"old_contents":{"kind":"string","value":"package org.jusecase.properties.ui;\n\nimport org.jusecase.properties.entities.Key;\nimport org.jusecase.properties.entities.KeyPopulation;\n\nimport javax.swing.*;\nimport java.awt.*;\nimport java.util.HashMap;\nimport java.util.Map;\n\npublic class KeyListCellRenderer extends DefaultListCellRenderer {\n Map backgroundColorForPopulation = new HashMap<>();\n\n public KeyListCellRenderer() {\n backgroundColorForPopulation.put(KeyPopulation.Complete, new Color(240, 255, 230));\n backgroundColorForPopulation.put(KeyPopulation.Sparse, new Color(255, 251, 230));\n }\n\n @Override\n public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) {\n Key key = (Key) value;\n JLabel label = (JLabel) super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);\n\n if (!isSelected) {\n Color color = backgroundColorForPopulation.get(key.getPopulation());\n if (color != null) {\n label.setBackground(color);\n }\n }\n\n return label;\n }\n}\n"},"new_contents":{"kind":"string","value":"package org.jusecase.properties.ui;\n\nimport org.jusecase.properties.entities.Key;\nimport org.jusecase.properties.entities.KeyPopulation;\n\nimport javax.swing.*;\nimport java.awt.*;\nimport java.util.HashMap;\nimport java.util.Map;\n\npublic class KeyListCellRenderer extends DefaultListCellRenderer {\n Map backgroundColorForPopulation = new HashMap<>();\n\n public KeyListCellRenderer() {\n backgroundColorForPopulation.put(KeyPopulation.Sparse, new Color(231, 211, 186));\n }\n\n @Override\n public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) {\n Key key = (Key) value;\n JLabel label = (JLabel) super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);\n\n if (!isSelected) {\n Color color = backgroundColorForPopulation.get(key.getPopulation());\n if (color != null) {\n label.setBackground(color);\n }\n }\n\n return label;\n }\n}\n"},"subject":{"kind":"string","value":"Make sparse / complete more distinguishable"},"message":{"kind":"string","value":"Make sparse / complete more distinguishable\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"casid/jusecase-properties-editor"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage org.jusecase.properties.ui;\n\nimport org.jusecase.properties.entities.Key;\nimport org.jusecase.properties.entities.KeyPopulation;\n\nimport javax.swing.*;\nimport java.awt.*;\nimport java.util.HashMap;\nimport java.util.Map;\n\npublic class KeyListCellRenderer extends DefaultListCellRenderer {\n Map backgroundColorForPopulation = new HashMap<>();\n\n public KeyListCellRenderer() {\n backgroundColorForPopulation.put(KeyPopulation.Complete, new Color(240, 255, 230));\n backgroundColorForPopulation.put(KeyPopulation.Sparse, new Color(255, 251, 230));\n }\n\n @Override\n public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) {\n Key key = (Key) value;\n JLabel label = (JLabel) super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);\n\n if (!isSelected) {\n Color color = backgroundColorForPopulation.get(key.getPopulation());\n if (color != null) {\n label.setBackground(color);\n }\n }\n\n return label;\n }\n}\n\n## Instruction:\nMake sparse / complete more distinguishable\n\n## Code After:\npackage org.jusecase.properties.ui;\n\nimport org.jusecase.properties.entities.Key;\nimport org.jusecase.properties.entities.KeyPopulation;\n\nimport javax.swing.*;\nimport java.awt.*;\nimport java.util.HashMap;\nimport java.util.Map;\n\npublic class KeyListCellRenderer extends DefaultListCellRenderer {\n Map backgroundColorForPopulation = new HashMap<>();\n\n public KeyListCellRenderer() {\n backgroundColorForPopulation.put(KeyPopulation.Sparse, new Color(231, 211, 186));\n }\n\n @Override\n public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) {\n Key key = (Key) value;\n JLabel label = (JLabel) super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);\n\n if (!isSelected) {\n Color color = backgroundColorForPopulation.get(key.getPopulation());\n if (color != null) {\n label.setBackground(color);\n }\n }\n\n return label;\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n Map backgroundColorForPopulation = new HashMap<>();\n\n public KeyListCellRenderer() {\n backgroundColorForPopulation.put(KeyPopulation.Sparse, new Color(231, 211, 186));\n }\n\n @Override\n\n\n# ... rest of the code ..."}}},{"rowIdx":3742,"cells":{"commit":{"kind":"string","value":"ee32b2e48acd47f1f1ff96482abf20f3d1818fc4"},"old_file":{"kind":"string","value":"tests/__init__.py"},"new_file":{"kind":"string","value":"tests/__init__.py"},"old_contents":{"kind":"string","value":"import sys\nimport unittest\n\nsys.path.append(\"../pythainlp\")\n\nloader = unittest.TestLoader()\ntestSuite = loader.discover(\"tests\")\ntestRunner = unittest.TextTestRunner(verbosity=1)\ntestRunner.run(testSuite)\n"},"new_contents":{"kind":"string","value":"import sys\nimport unittest\nimport nltk\n\nsys.path.append(\"../pythainlp\")\n\nnltk.download('omw-1.4') # load wordnet\n\nloader = unittest.TestLoader()\ntestSuite = loader.discover(\"tests\")\ntestRunner = unittest.TextTestRunner(verbosity=1)\ntestRunner.run(testSuite)\n"},"subject":{"kind":"string","value":"Add load wordnet to tests"},"message":{"kind":"string","value":"Add load wordnet to tests\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"PyThaiNLP/pythainlp"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport sys\nimport unittest\n\nsys.path.append(\"../pythainlp\")\n\nloader = unittest.TestLoader()\ntestSuite = loader.discover(\"tests\")\ntestRunner = unittest.TextTestRunner(verbosity=1)\ntestRunner.run(testSuite)\n\n## Instruction:\nAdd load wordnet to tests\n\n## Code After:\nimport sys\nimport unittest\nimport nltk\n\nsys.path.append(\"../pythainlp\")\n\nnltk.download('omw-1.4') # load wordnet\n\nloader = unittest.TestLoader()\ntestSuite = loader.discover(\"tests\")\ntestRunner = unittest.TextTestRunner(verbosity=1)\ntestRunner.run(testSuite)\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nimport sys\nimport unittest\nimport nltk\n\nsys.path.append(\"../pythainlp\")\n\nnltk.download('omw-1.4') # load wordnet\n\nloader = unittest.TestLoader()\ntestSuite = loader.discover(\"tests\")\n\n\n# ... rest of the code ..."}}},{"rowIdx":3743,"cells":{"commit":{"kind":"string","value":"21fdc8c2c20a75b5431a2fb549fd9632a27ab410"},"old_file":{"kind":"string","value":"src/main/java/org/apacheextras/camel/examples/rcode/RCodeRunner.java"},"new_file":{"kind":"string","value":"src/main/java/org/apacheextras/camel/examples/rcode/RCodeRunner.java"},"old_contents":{"kind":"string","value":"package org.apacheextras.camel.examples.rcode;\n\nimport org.apache.camel.CamelContext;\nimport org.apache.camel.impl.DefaultCamelContext;\n\nimport java.io.Console;\nimport java.io.File;\nimport java.util.concurrent.TimeUnit;\n\n/**\n * @author Sebastian Rühl\n */\npublic class RCodeRunner {\n\n public static void main(String... args) throws Exception {\n CamelContext camelContext = new DefaultCamelContext();\n File basePath = args.length > 0 ? new File(args[0]) : new File(System.getProperty(\"user.home\") + \"/.rcode-example\");\n\n camelContext.addRoutes(new RCodeRouteBuilder(basePath));\n\n camelContext.start();\n Console console = System.console();\n if (console != null) {\n console.printf(\"Please press enter to shutdown route.\");\n console.readLine();\n } else {\n TimeUnit.SECONDS.sleep(5);\n }\n camelContext.stop();\n }\n}\n"},"new_contents":{"kind":"string","value":"/*\n * Copyright 2013 Apache Software Foundation.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.apacheextras.camel.examples.rcode;\n\nimport org.apache.camel.CamelContext;\nimport org.apache.camel.impl.DefaultCamelContext;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.File;\n\n/**\n *\n * @author cemmersb\n */\npublic class RCodeRunner {\n\n private final static Logger LOGGER = LoggerFactory.getLogger(RCodeRunner.class);\n private CamelContext camelContext;\n private RCodeRouteBuilder routeBuilder = null;\n\n public RCodeRunner() {\n try {\n initializeContext();\n } catch (Exception ex) {\n LOGGER.error(\"Unable to initialize context: {}\", ex.getMessage());\n }\n }\n\n private void initializeContext() throws Exception {\n routeBuilder = new RCodeRouteBuilder(new File(System.getProperty(\"user.dir\") + \"./rcode-example/data\"));\n camelContext = new DefaultCamelContext();\n camelContext.addRoutes(routeBuilder);\n camelContext.start();\n }\n\n @Override\n protected void finalize() throws Throwable {\n camelContext.stop();\n super.finalize();\n }\n\n public static void main(String... args) throws InterruptedException, Throwable {\n LOGGER.info(\"Starting RCodeRunner.\");\n RCodeRunner rCodeRunner = new RCodeRunner();\n Thread.sleep(1000);\n LOGGER.info(\"Stopping RCodeRunner.\");\n rCodeRunner.finalize();\n }\n}\n"},"subject":{"kind":"string","value":"Revert \"migrate example to rcoderunner\""},"message":{"kind":"string","value":"Revert \"migrate example to rcoderunner\"\n\nThis reverts commit b03405d0726820a1f184e0e80892826d02994763."},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"sruehl/camel-example-rcode,sruehl/camel-example-rcode"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage org.apacheextras.camel.examples.rcode;\n\nimport org.apache.camel.CamelContext;\nimport org.apache.camel.impl.DefaultCamelContext;\n\nimport java.io.Console;\nimport java.io.File;\nimport java.util.concurrent.TimeUnit;\n\n/**\n * @author Sebastian Rühl\n */\npublic class RCodeRunner {\n\n public static void main(String... args) throws Exception {\n CamelContext camelContext = new DefaultCamelContext();\n File basePath = args.length > 0 ? new File(args[0]) : new File(System.getProperty(\"user.home\") + \"/.rcode-example\");\n\n camelContext.addRoutes(new RCodeRouteBuilder(basePath));\n\n camelContext.start();\n Console console = System.console();\n if (console != null) {\n console.printf(\"Please press enter to shutdown route.\");\n console.readLine();\n } else {\n TimeUnit.SECONDS.sleep(5);\n }\n camelContext.stop();\n }\n}\n\n## Instruction:\nRevert \"migrate example to rcoderunner\"\n\nThis reverts commit b03405d0726820a1f184e0e80892826d02994763.\n## Code After:\n/*\n * Copyright 2013 Apache Software Foundation.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.apacheextras.camel.examples.rcode;\n\nimport org.apache.camel.CamelContext;\nimport org.apache.camel.impl.DefaultCamelContext;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.File;\n\n/**\n *\n * @author cemmersb\n */\npublic class RCodeRunner {\n\n private final static Logger LOGGER = LoggerFactory.getLogger(RCodeRunner.class);\n private CamelContext camelContext;\n private RCodeRouteBuilder routeBuilder = null;\n\n public RCodeRunner() {\n try {\n initializeContext();\n } catch (Exception ex) {\n LOGGER.error(\"Unable to initialize context: {}\", ex.getMessage());\n }\n }\n\n private void initializeContext() throws Exception {\n routeBuilder = new RCodeRouteBuilder(new File(System.getProperty(\"user.dir\") + \"./rcode-example/data\"));\n camelContext = new DefaultCamelContext();\n camelContext.addRoutes(routeBuilder);\n camelContext.start();\n }\n\n @Override\n protected void finalize() throws Throwable {\n camelContext.stop();\n super.finalize();\n }\n\n public static void main(String... args) throws InterruptedException, Throwable {\n LOGGER.info(\"Starting RCodeRunner.\");\n RCodeRunner rCodeRunner = new RCodeRunner();\n Thread.sleep(1000);\n LOGGER.info(\"Stopping RCodeRunner.\");\n rCodeRunner.finalize();\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n/*\n * Copyright 2013 Apache Software Foundation.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\npackage org.apacheextras.camel.examples.rcode;\n\nimport org.apache.camel.CamelContext;\nimport org.apache.camel.impl.DefaultCamelContext;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport java.io.File;\n\n/**\n *\n * @author cemmersb\n */\npublic class RCodeRunner {\n\n private final static Logger LOGGER = LoggerFactory.getLogger(RCodeRunner.class);\n private CamelContext camelContext;\n private RCodeRouteBuilder routeBuilder = null;\n\n public RCodeRunner() {\n try {\n initializeContext();\n } catch (Exception ex) {\n LOGGER.error(\"Unable to initialize context: {}\", ex.getMessage());\n }\n }\n\n private void initializeContext() throws Exception {\n routeBuilder = new RCodeRouteBuilder(new File(System.getProperty(\"user.dir\") + \"./rcode-example/data\"));\n camelContext = new DefaultCamelContext();\n camelContext.addRoutes(routeBuilder);\n camelContext.start();\n }\n\n @Override\n protected void finalize() throws Throwable {\n camelContext.stop();\n super.finalize();\n }\n\n public static void main(String... args) throws InterruptedException, Throwable {\n LOGGER.info(\"Starting RCodeRunner.\");\n RCodeRunner rCodeRunner = new RCodeRunner();\n Thread.sleep(1000);\n LOGGER.info(\"Stopping RCodeRunner.\");\n rCodeRunner.finalize();\n }\n}\n\n\n# ... rest of the code ..."}}},{"rowIdx":3744,"cells":{"commit":{"kind":"string","value":"9ceace60593f133b4f6dfdbd9b6f583362415294"},"old_file":{"kind":"string","value":"src/configuration.py"},"new_file":{"kind":"string","value":"src/configuration.py"},"old_contents":{"kind":"string","value":"import ConfigParser\nimport os\n\ndef class ConfigDlstats(object):\n \"\"\"Cross platform configuration file handler.\n\n This class manages dlstats configuration files, providing\n easy access to the options.\"\"\"\n\n def __init__(self)\n \"\"\"Open the configuration files handler, choosing the right\n path depending on the platform.\"\"\"\n appname = 'dlstats'\n if os.name == 'posix':\n if os.path.isfile(os.environ[\"HOME\"]+'/.'+appname):\n self.filename = os.environ[\"HOME\"]+'/.'+appname\n elif os.path.isfile('/etc/'+appname):\n self.filename = '/etc/'+appname\n else:\n raise FileNotFoundError('No configuration file found.'\n elif os.name == 'mac':\n self.filename = (\"%s/Library/Application Support/%s\" %\n (os.environ[\"HOME\"], appname))\n elif os.name == 'nt':\n self.filename = (\"%s\\Application Data\\%s\" %\n (os.environ[\"HOMEPATH\"], appname))\n else:\n raise UnsupportedOSError(os.name)\n self.config = ConfigParser.ConfigParser()\n self.config.read(self.filename)\n"},"new_contents":{"kind":"string","value":"import ConfigParser\nimport os\n\nclass ConfigDlstats(object):\n \"\"\"Cross platform configuration file handler.\n\n This class manages dlstats configuration files, providing\n easy access to the options.\"\"\"\n\n def __init__(self):\n \"\"\"Open the configuration files handler, choosing the right\n path depending on the platform.\"\"\"\n appname = 'dlstats'\n if os.name == 'posix':\n if os.path.isfile(os.environ[\"HOME\"]+'/.'+appname):\n self.filename = os.environ[\"HOME\"]+'/.'+appname\n elif os.path.isfile('/etc/'+appname):\n self.filename = '/etc/'+appname\n else:\n raise FileNotFoundError('No configuration file found.')\n elif os.name == 'mac':\n self.filename = (\"%s/Library/Application Support/%s\" %\n (os.environ[\"HOME\"], appname))\n elif os.name == 'nt':\n self.filename = (\"%s\\Application Data\\%s\" %\n (os.environ[\"HOMEPATH\"], appname))\n else:\n raise UnsupportedOSError(os.name)\n self.config = ConfigParser.ConfigParser()\n self.config.read(self.filename)\n"},"subject":{"kind":"string","value":"Fix a few syntax errors"},"message":{"kind":"string","value":"Fix a few syntax errors\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"MichelJuillard/dlstats,Widukind/dlstats,mmalter/dlstats,mmalter/dlstats,Widukind/dlstats,MichelJuillard/dlstats,mmalter/dlstats,MichelJuillard/dlstats"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport ConfigParser\nimport os\n\ndef class ConfigDlstats(object):\n \"\"\"Cross platform configuration file handler.\n\n This class manages dlstats configuration files, providing\n easy access to the options.\"\"\"\n\n def __init__(self)\n \"\"\"Open the configuration files handler, choosing the right\n path depending on the platform.\"\"\"\n appname = 'dlstats'\n if os.name == 'posix':\n if os.path.isfile(os.environ[\"HOME\"]+'/.'+appname):\n self.filename = os.environ[\"HOME\"]+'/.'+appname\n elif os.path.isfile('/etc/'+appname):\n self.filename = '/etc/'+appname\n else:\n raise FileNotFoundError('No configuration file found.'\n elif os.name == 'mac':\n self.filename = (\"%s/Library/Application Support/%s\" %\n (os.environ[\"HOME\"], appname))\n elif os.name == 'nt':\n self.filename = (\"%s\\Application Data\\%s\" %\n (os.environ[\"HOMEPATH\"], appname))\n else:\n raise UnsupportedOSError(os.name)\n self.config = ConfigParser.ConfigParser()\n self.config.read(self.filename)\n\n## Instruction:\nFix a few syntax errors\n\n## Code After:\nimport ConfigParser\nimport os\n\nclass ConfigDlstats(object):\n \"\"\"Cross platform configuration file handler.\n\n This class manages dlstats configuration files, providing\n easy access to the options.\"\"\"\n\n def __init__(self):\n \"\"\"Open the configuration files handler, choosing the right\n path depending on the platform.\"\"\"\n appname = 'dlstats'\n if os.name == 'posix':\n if os.path.isfile(os.environ[\"HOME\"]+'/.'+appname):\n self.filename = os.environ[\"HOME\"]+'/.'+appname\n elif os.path.isfile('/etc/'+appname):\n self.filename = '/etc/'+appname\n else:\n raise FileNotFoundError('No configuration file found.')\n elif os.name == 'mac':\n self.filename = (\"%s/Library/Application Support/%s\" %\n (os.environ[\"HOME\"], appname))\n elif os.name == 'nt':\n self.filename = (\"%s\\Application Data\\%s\" %\n (os.environ[\"HOMEPATH\"], appname))\n else:\n raise UnsupportedOSError(os.name)\n self.config = ConfigParser.ConfigParser()\n self.config.read(self.filename)\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nimport ConfigParser\nimport os\n\nclass ConfigDlstats(object):\n \"\"\"Cross platform configuration file handler.\n\n This class manages dlstats configuration files, providing\n easy access to the options.\"\"\"\n\n def __init__(self):\n \"\"\"Open the configuration files handler, choosing the right\n path depending on the platform.\"\"\"\n appname = 'dlstats'\n if os.name == 'posix':\n if os.path.isfile(os.environ[\"HOME\"]+'/.'+appname):\n\n\n ... \n\n\n elif os.path.isfile('/etc/'+appname):\n self.filename = '/etc/'+appname\n else:\n raise FileNotFoundError('No configuration file found.')\n elif os.name == 'mac':\n self.filename = (\"%s/Library/Application Support/%s\" %\n (os.environ[\"HOME\"], appname))\n\n\n ... "}}},{"rowIdx":3745,"cells":{"commit":{"kind":"string","value":"721f6f7916d698f22c9d96ce52cce3773fa514cc"},"old_file":{"kind":"string","value":"uwsgiplugin.py"},"new_file":{"kind":"string","value":"uwsgiplugin.py"},"old_contents":{"kind":"string","value":"import os\nimport os.path\nimport inspect\n\nbase_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])\n\nNAME = 'rust'\nGCC_LIST = ['rust', '%s/plugin.a' % base_path]\n\nCFLAGS = []\n\nif os.uname()[0] == 'Darwin':\n CFLAGS.append('-mmacosx-version-min=10.7')\n\n\nif os.system(\"rustc -o %s/plugin.a --crate-type staticlib %s/plugin.rs\" % (base_path, base_path)) != 0:\n os._exit(1)\n"},"new_contents":{"kind":"string","value":"import os\nimport os.path\nimport inspect\n\nbase_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])\n\nNAME = 'rust'\nGCC_LIST = ['rust', '%s/target/release/libuwsgi_rust.a' % base_path]\n\nCFLAGS = []\n\nif os.uname()[0] == 'Darwin':\n CFLAGS.append('-mmacosx-version-min=10.7')\n\nif os.system(\"cargo build --release\") != 0:\n os._exit(1)\n\n# To also build the example app:\n#os.system(\"cargo build --release --manifest-path examples/Cargo.toml\")\n"},"subject":{"kind":"string","value":"Update script to build rust code via cargo"},"message":{"kind":"string","value":"Update script to build rust code via cargo\n\nSigned-off-by: Luca Bruno \n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"unbit/uwsgi-rust,unbit/uwsgi-rust,unbit/uwsgi-rust"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport os\nimport os.path\nimport inspect\n\nbase_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])\n\nNAME = 'rust'\nGCC_LIST = ['rust', '%s/plugin.a' % base_path]\n\nCFLAGS = []\n\nif os.uname()[0] == 'Darwin':\n CFLAGS.append('-mmacosx-version-min=10.7')\n\n\nif os.system(\"rustc -o %s/plugin.a --crate-type staticlib %s/plugin.rs\" % (base_path, base_path)) != 0:\n os._exit(1)\n\n## Instruction:\nUpdate script to build rust code via cargo\n\nSigned-off-by: Luca Bruno \n\n## Code After:\nimport os\nimport os.path\nimport inspect\n\nbase_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])\n\nNAME = 'rust'\nGCC_LIST = ['rust', '%s/target/release/libuwsgi_rust.a' % base_path]\n\nCFLAGS = []\n\nif os.uname()[0] == 'Darwin':\n CFLAGS.append('-mmacosx-version-min=10.7')\n\nif os.system(\"cargo build --release\") != 0:\n os._exit(1)\n\n# To also build the example app:\n#os.system(\"cargo build --release --manifest-path examples/Cargo.toml\")\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nbase_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0])\n\nNAME = 'rust'\nGCC_LIST = ['rust', '%s/target/release/libuwsgi_rust.a' % base_path]\n\nCFLAGS = []\n\n\n\n// ... modified code ... \n\n\nif os.uname()[0] == 'Darwin':\n CFLAGS.append('-mmacosx-version-min=10.7')\n\nif os.system(\"cargo build --release\") != 0:\n os._exit(1)\n\n# To also build the example app:\n#os.system(\"cargo build --release --manifest-path examples/Cargo.toml\")\n\n\n// ... rest of the code ..."}}},{"rowIdx":3746,"cells":{"commit":{"kind":"string","value":"59b6d5b7e4f337320ea12d381e9cad0aa9c9fa75"},"old_file":{"kind":"string","value":"tests/slice.c"},"new_file":{"kind":"string","value":"tests/slice.c"},"old_contents":{"kind":"string","value":"\n\nint main( void )\n{\n int const xs[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };\n\n printf( \"Testing subset slice...\\n\" );\n int const ws[] = { SLICE( xs, 3, 4 ) };\n ASSERT( NELEM( ws ) == 4,\n ws[ 0 ] == xs[ 3 ],\n ws[ 1 ] == xs[ 4 ],\n ws[ 2 ] == xs[ 5 ],\n ws[ 3 ] == xs[ 6 ] );\n\n printf( \"Testing total slice...\\n\" );\n int const ys[] = { SLICE( xs, 0, 6 ) };\n ASSERT( NELEM( ys ) == 6,\n ys[ 0 ] == xs[ 0 ],\n ys[ 1 ] == xs[ 1 ],\n ys[ 2 ] == xs[ 2 ],\n ys[ 3 ] == xs[ 3 ],\n ys[ 4 ] == xs[ 4 ],\n ys[ 5 ] == xs[ 5 ] );\n\n printf( \"Testing empty slice...\\n\" );\n int const zs[] = { 0, SLICE( xs, 2, 0 ) };\n ASSERT( NELEM( zs ) == 1 );\n\n printf( \"SLICE() tests passed.\\n\" );\n}\n\n"},"new_contents":{"kind":"string","value":"\n\nint main( void )\n{\n int const xs[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };\n\n printf( \"Testing subset slice...\\n\" );\n int const ws[] = { SLICE( xs, 3, 4 ) };\n ASSERT( NELEM( ws ) == 4,\n ws[ 0 ] == xs[ 3 ],\n ws[ 1 ] == xs[ 4 ],\n ws[ 2 ] == xs[ 5 ],\n ws[ 3 ] == xs[ 6 ] );\n ( void ) ws;\n\n printf( \"Testing total slice...\\n\" );\n int const ys[] = { SLICE( xs, 0, 6 ) };\n ASSERT( NELEM( ys ) == 6,\n ys[ 0 ] == xs[ 0 ],\n ys[ 1 ] == xs[ 1 ],\n ys[ 2 ] == xs[ 2 ],\n ys[ 3 ] == xs[ 3 ],\n ys[ 4 ] == xs[ 4 ],\n ys[ 5 ] == xs[ 5 ] );\n ( void ) ys;\n\n printf( \"Testing empty slice...\\n\" );\n int const zs[] = { 0, SLICE( xs, 2, 0 ) };\n ASSERT( NELEM( zs ) == 1 );\n ( void ) zs;\n\n printf( \"SLICE() tests passed.\\n\" );\n}\n\n"},"subject":{"kind":"string","value":"Fix 'unused variable' warning on fast build"},"message":{"kind":"string","value":"Fix 'unused variable' warning on fast build\n"},"lang":{"kind":"string","value":"C"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"mcinglis/libmacro,mcinglis/libmacro,mcinglis/libmacro"},"config":{"kind":"string","value":"c"},"content":{"kind":"string","value":"## Code Before:\n\n\nint main( void )\n{\n int const xs[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };\n\n printf( \"Testing subset slice...\\n\" );\n int const ws[] = { SLICE( xs, 3, 4 ) };\n ASSERT( NELEM( ws ) == 4,\n ws[ 0 ] == xs[ 3 ],\n ws[ 1 ] == xs[ 4 ],\n ws[ 2 ] == xs[ 5 ],\n ws[ 3 ] == xs[ 6 ] );\n\n printf( \"Testing total slice...\\n\" );\n int const ys[] = { SLICE( xs, 0, 6 ) };\n ASSERT( NELEM( ys ) == 6,\n ys[ 0 ] == xs[ 0 ],\n ys[ 1 ] == xs[ 1 ],\n ys[ 2 ] == xs[ 2 ],\n ys[ 3 ] == xs[ 3 ],\n ys[ 4 ] == xs[ 4 ],\n ys[ 5 ] == xs[ 5 ] );\n\n printf( \"Testing empty slice...\\n\" );\n int const zs[] = { 0, SLICE( xs, 2, 0 ) };\n ASSERT( NELEM( zs ) == 1 );\n\n printf( \"SLICE() tests passed.\\n\" );\n}\n\n\n## Instruction:\nFix 'unused variable' warning on fast build\n\n## Code After:\n\n\nint main( void )\n{\n int const xs[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };\n\n printf( \"Testing subset slice...\\n\" );\n int const ws[] = { SLICE( xs, 3, 4 ) };\n ASSERT( NELEM( ws ) == 4,\n ws[ 0 ] == xs[ 3 ],\n ws[ 1 ] == xs[ 4 ],\n ws[ 2 ] == xs[ 5 ],\n ws[ 3 ] == xs[ 6 ] );\n ( void ) ws;\n\n printf( \"Testing total slice...\\n\" );\n int const ys[] = { SLICE( xs, 0, 6 ) };\n ASSERT( NELEM( ys ) == 6,\n ys[ 0 ] == xs[ 0 ],\n ys[ 1 ] == xs[ 1 ],\n ys[ 2 ] == xs[ 2 ],\n ys[ 3 ] == xs[ 3 ],\n ys[ 4 ] == xs[ 4 ],\n ys[ 5 ] == xs[ 5 ] );\n ( void ) ys;\n\n printf( \"Testing empty slice...\\n\" );\n int const zs[] = { 0, SLICE( xs, 2, 0 ) };\n ASSERT( NELEM( zs ) == 1 );\n ( void ) zs;\n\n printf( \"SLICE() tests passed.\\n\" );\n}\n\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n ws[ 1 ] == xs[ 4 ],\n ws[ 2 ] == xs[ 5 ],\n ws[ 3 ] == xs[ 6 ] );\n ( void ) ws;\n\n printf( \"Testing total slice...\\n\" );\n int const ys[] = { SLICE( xs, 0, 6 ) };\n\n\n ... \n\n\n ys[ 3 ] == xs[ 3 ],\n ys[ 4 ] == xs[ 4 ],\n ys[ 5 ] == xs[ 5 ] );\n ( void ) ys;\n\n printf( \"Testing empty slice...\\n\" );\n int const zs[] = { 0, SLICE( xs, 2, 0 ) };\n ASSERT( NELEM( zs ) == 1 );\n ( void ) zs;\n\n printf( \"SLICE() tests passed.\\n\" );\n}\n\n\n ... "}}},{"rowIdx":3747,"cells":{"commit":{"kind":"string","value":"9a2cc99b068b2aaa572f52b4516852b239577c34"},"old_file":{"kind":"string","value":"dummyserver/server.py"},"new_file":{"kind":"string","value":"dummyserver/server.py"},"old_contents":{"kind":"string","value":"\nimport threading, socket\n\n\n\n\"\"\"\nDummy server using for unit testing\n\"\"\"\n\nclass Server(threading.Thread):\n def __init__(self, handler, host='localhost', port=8021):\n threading.Thread.__init__(self)\n self.handler = handler\n self.host = host\n self.port = port\n self.ready_event = threading.Event()\n self.stop_event = threading.Event()\n\n def run(self):\n sock = socket.socket()\n sock.bind((self.host, self.port))\n sock.listen(0)\n self.ready_event.set()\n self.handler(sock)\n self.stop_event.set()\n sock.close()\n\n def __enter__(self):\n self.start()\n self.ready_event.wait()\n return self.host, self.port\n \n def __exit__(self, exc_type, exc_value, traceback):\n if exc_type is None:\n self.stop_event.wait()\n return False # allow exceptions to propagate \n \n"},"new_contents":{"kind":"string","value":"\nimport threading, socket\n\n\n\n\nclass Server(threading.Thread):\n \"\"\" Dummy server using for unit testing \"\"\"\n\n def __init__(self, handler, host='localhost', port=8021):\n threading.Thread.__init__(self)\n self.handler = handler\n self.host = host\n self.port = port\n self.ready_event = threading.Event()\n self.stop_event = threading.Event()\n\n def run(self):\n sock = socket.socket()\n sock.bind((self.host, self.port))\n sock.listen(0)\n self.ready_event.set()\n self.handler(sock)\n self.stop_event.set()\n sock.close()\n\n def __enter__(self):\n self.start()\n self.ready_event.wait()\n return self.host, self.port\n \n def __exit__(self, exc_type, exc_value, traceback):\n if exc_type is None:\n self.stop_event.wait()\n return False # allow exceptions to propagate \n \n"},"subject":{"kind":"string","value":"Put docstring inside Server class"},"message":{"kind":"string","value":"Put docstring inside Server class\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"psf/requests"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nimport threading, socket\n\n\n\n\"\"\"\nDummy server using for unit testing\n\"\"\"\n\nclass Server(threading.Thread):\n def __init__(self, handler, host='localhost', port=8021):\n threading.Thread.__init__(self)\n self.handler = handler\n self.host = host\n self.port = port\n self.ready_event = threading.Event()\n self.stop_event = threading.Event()\n\n def run(self):\n sock = socket.socket()\n sock.bind((self.host, self.port))\n sock.listen(0)\n self.ready_event.set()\n self.handler(sock)\n self.stop_event.set()\n sock.close()\n\n def __enter__(self):\n self.start()\n self.ready_event.wait()\n return self.host, self.port\n \n def __exit__(self, exc_type, exc_value, traceback):\n if exc_type is None:\n self.stop_event.wait()\n return False # allow exceptions to propagate \n \n\n## Instruction:\nPut docstring inside Server class\n\n## Code After:\n\nimport threading, socket\n\n\n\n\nclass Server(threading.Thread):\n \"\"\" Dummy server using for unit testing \"\"\"\n\n def __init__(self, handler, host='localhost', port=8021):\n threading.Thread.__init__(self)\n self.handler = handler\n self.host = host\n self.port = port\n self.ready_event = threading.Event()\n self.stop_event = threading.Event()\n\n def run(self):\n sock = socket.socket()\n sock.bind((self.host, self.port))\n sock.listen(0)\n self.ready_event.set()\n self.handler(sock)\n self.stop_event.set()\n sock.close()\n\n def __enter__(self):\n self.start()\n self.ready_event.wait()\n return self.host, self.port\n \n def __exit__(self, exc_type, exc_value, traceback):\n if exc_type is None:\n self.stop_event.wait()\n return False # allow exceptions to propagate \n \n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\n\n\n\nclass Server(threading.Thread):\n \"\"\" Dummy server using for unit testing \"\"\"\n\n def __init__(self, handler, host='localhost', port=8021):\n threading.Thread.__init__(self)\n self.handler = handler\n\n\n# ... rest of the code ..."}}},{"rowIdx":3748,"cells":{"commit":{"kind":"string","value":"44d103359cff312865f409ff34f528f63e441ef4"},"old_file":{"kind":"string","value":"graphapi/views.py"},"new_file":{"kind":"string","value":"graphapi/views.py"},"old_contents":{"kind":"string","value":"from simplekeys.verifier import verify_request\nfrom graphene_django.views import GraphQLView\nfrom django.conf import settings\n\n\nclass KeyedGraphQLView(GraphQLView):\n graphiql_template = \"graphene/graphiql-keyed.html\"\n\n def get_response(self, request, data, show_graphiql=False):\n # check key only if we're not handling a graphiql request\n # if not show_graphiql:\n # error = verify_request(request, 'graphapi')\n # if error:\n # print('graphapi/views: get_response bailed ')\n # return error, error.status_code\n\n return super().get_response(request, data, show_graphiql)\n\n def render_graphiql(self, request, **data):\n data['demo_key'] = settings.GRAPHQL_DEMO_KEY\n return super().render_graphiql(request, **data)\n"},"new_contents":{"kind":"string","value":"from simplekeys.verifier import verify_request\nfrom graphene_django.views import GraphQLView\nfrom django.conf import settings\n\n\nclass KeyedGraphQLView(GraphQLView):\n graphiql_template = \"graphene/graphiql-keyed.html\"\n\n def get_response(self, request, data, show_graphiql=False):\n # check key only if we're not handling a graphiql request\n if not show_graphiql:\n error = verify_request(request, 'graphapi')\n if error:\n return error, error.status_code\n\n return super().get_response(request, data, show_graphiql)\n\n def render_graphiql(self, request, **data):\n data['demo_key'] = settings.GRAPHQL_DEMO_KEY\n return super().render_graphiql(request, **data)\n"},"subject":{"kind":"string","value":"Revert \"Reimplement using explicit variable lookup\""},"message":{"kind":"string","value":"Revert \"Reimplement using explicit variable lookup\"\n\nThis reverts commit 94683e6c\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"openstates/openstates.org,openstates/openstates.org,openstates/openstates.org,openstates/openstates.org"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom simplekeys.verifier import verify_request\nfrom graphene_django.views import GraphQLView\nfrom django.conf import settings\n\n\nclass KeyedGraphQLView(GraphQLView):\n graphiql_template = \"graphene/graphiql-keyed.html\"\n\n def get_response(self, request, data, show_graphiql=False):\n # check key only if we're not handling a graphiql request\n # if not show_graphiql:\n # error = verify_request(request, 'graphapi')\n # if error:\n # print('graphapi/views: get_response bailed ')\n # return error, error.status_code\n\n return super().get_response(request, data, show_graphiql)\n\n def render_graphiql(self, request, **data):\n data['demo_key'] = settings.GRAPHQL_DEMO_KEY\n return super().render_graphiql(request, **data)\n\n## Instruction:\nRevert \"Reimplement using explicit variable lookup\"\n\nThis reverts commit 94683e6c\n\n## Code After:\nfrom simplekeys.verifier import verify_request\nfrom graphene_django.views import GraphQLView\nfrom django.conf import settings\n\n\nclass KeyedGraphQLView(GraphQLView):\n graphiql_template = \"graphene/graphiql-keyed.html\"\n\n def get_response(self, request, data, show_graphiql=False):\n # check key only if we're not handling a graphiql request\n if not show_graphiql:\n error = verify_request(request, 'graphapi')\n if error:\n return error, error.status_code\n\n return super().get_response(request, data, show_graphiql)\n\n def render_graphiql(self, request, **data):\n data['demo_key'] = settings.GRAPHQL_DEMO_KEY\n return super().render_graphiql(request, **data)\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\n def get_response(self, request, data, show_graphiql=False):\n # check key only if we're not handling a graphiql request\n if not show_graphiql:\n error = verify_request(request, 'graphapi')\n if error:\n return error, error.status_code\n\n return super().get_response(request, data, show_graphiql)\n\n\n\n// ... rest of the code ..."}}},{"rowIdx":3749,"cells":{"commit":{"kind":"string","value":"8e622ec1c83b064ed3d47d41dc49b179dc9a9f54"},"old_file":{"kind":"string","value":"app/src/main/java/coderefactory/net/popmovies/MovieAdapter.java"},"new_file":{"kind":"string","value":"app/src/main/java/coderefactory/net/popmovies/MovieAdapter.java"},"old_contents":{"kind":"string","value":"package coderefactory.net.popmovies;\n\nimport android.app.Activity;\nimport android.support.annotation.NonNull;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.view.ViewGroup;\nimport android.widget.ArrayAdapter;\nimport android.widget.TextView;\n\nimport java.util.List;\n\npublic class MovieAdapter extends ArrayAdapter {\n\n public MovieAdapter(final Activity context, final List movies) {\n super(context, 0, movies);\n }\n\n @NonNull\n @Override\n public View getView(final int position, final View convertView, final ViewGroup parent) {\n final View rootView;\n if (convertView == null) {\n rootView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false);\n } else {\n rootView = convertView;\n }\n final Movie movie = getItem(position);\n\n final TextView titleView = (TextView) rootView.findViewById(R.id.movie_title);\n titleView.setText(movie.getTitle());\n\n final TextView releaseView = (TextView) rootView.findViewById(R.id.movie_released);\n releaseView.setText(String.valueOf(movie.getReleased()));\n\n return rootView;\n }\n}\n"},"new_contents":{"kind":"string","value":"package coderefactory.net.popmovies;\n\nimport android.app.Activity;\nimport android.support.annotation.NonNull;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.view.ViewGroup;\nimport android.widget.ArrayAdapter;\nimport android.widget.TextView;\n\nimport java.util.List;\n\npublic class MovieAdapter extends ArrayAdapter {\n\n private ViewHolder viewHolder;\n\n public MovieAdapter(final Activity context, final List movies) {\n super(context, 0, movies);\n }\n\n @NonNull\n @Override\n public View getView(final int position, final View convertView, final ViewGroup parent) {\n final View itemView;\n if (convertView == null) {\n itemView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false);\n viewHolder = new ViewHolder(itemView);\n itemView.setTag(viewHolder);\n } else {\n itemView = convertView;\n viewHolder = (ViewHolder) convertView.getTag();\n }\n\n populateView(position);\n\n return itemView;\n }\n\n private void populateView(final int position) {\n final Movie movie = getItem(position);\n\n viewHolder.titleView.setText(movie.getTitle());\n viewHolder.releaseView.setText(String.valueOf(movie.getReleased()));\n }\n\n private static class ViewHolder {\n private final TextView titleView;\n private final TextView releaseView;\n\n private ViewHolder(final View itemView) {\n titleView = (TextView) itemView.findViewById(R.id.movie_title);\n releaseView = (TextView) itemView.findViewById(R.id.movie_released);\n }\n }\n}\n"},"subject":{"kind":"string","value":"Introduce ViewHolder pattern into ArrayAdapter"},"message":{"kind":"string","value":"Introduce ViewHolder pattern into ArrayAdapter\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"jarst/PopMovies"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage coderefactory.net.popmovies;\n\nimport android.app.Activity;\nimport android.support.annotation.NonNull;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.view.ViewGroup;\nimport android.widget.ArrayAdapter;\nimport android.widget.TextView;\n\nimport java.util.List;\n\npublic class MovieAdapter extends ArrayAdapter {\n\n public MovieAdapter(final Activity context, final List movies) {\n super(context, 0, movies);\n }\n\n @NonNull\n @Override\n public View getView(final int position, final View convertView, final ViewGroup parent) {\n final View rootView;\n if (convertView == null) {\n rootView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false);\n } else {\n rootView = convertView;\n }\n final Movie movie = getItem(position);\n\n final TextView titleView = (TextView) rootView.findViewById(R.id.movie_title);\n titleView.setText(movie.getTitle());\n\n final TextView releaseView = (TextView) rootView.findViewById(R.id.movie_released);\n releaseView.setText(String.valueOf(movie.getReleased()));\n\n return rootView;\n }\n}\n\n## Instruction:\nIntroduce ViewHolder pattern into ArrayAdapter\n\n## Code After:\npackage coderefactory.net.popmovies;\n\nimport android.app.Activity;\nimport android.support.annotation.NonNull;\nimport android.view.LayoutInflater;\nimport android.view.View;\nimport android.view.ViewGroup;\nimport android.widget.ArrayAdapter;\nimport android.widget.TextView;\n\nimport java.util.List;\n\npublic class MovieAdapter extends ArrayAdapter {\n\n private ViewHolder viewHolder;\n\n public MovieAdapter(final Activity context, final List movies) {\n super(context, 0, movies);\n }\n\n @NonNull\n @Override\n public View getView(final int position, final View convertView, final ViewGroup parent) {\n final View itemView;\n if (convertView == null) {\n itemView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false);\n viewHolder = new ViewHolder(itemView);\n itemView.setTag(viewHolder);\n } else {\n itemView = convertView;\n viewHolder = (ViewHolder) convertView.getTag();\n }\n\n populateView(position);\n\n return itemView;\n }\n\n private void populateView(final int position) {\n final Movie movie = getItem(position);\n\n viewHolder.titleView.setText(movie.getTitle());\n viewHolder.releaseView.setText(String.valueOf(movie.getReleased()));\n }\n\n private static class ViewHolder {\n private final TextView titleView;\n private final TextView releaseView;\n\n private ViewHolder(final View itemView) {\n titleView = (TextView) itemView.findViewById(R.id.movie_title);\n releaseView = (TextView) itemView.findViewById(R.id.movie_released);\n }\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n\npublic class MovieAdapter extends ArrayAdapter {\n\n private ViewHolder viewHolder;\n\n public MovieAdapter(final Activity context, final List movies) {\n super(context, 0, movies);\n }\n\n\n ... \n\n\n @NonNull\n @Override\n public View getView(final int position, final View convertView, final ViewGroup parent) {\n final View itemView;\n if (convertView == null) {\n itemView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false);\n viewHolder = new ViewHolder(itemView);\n itemView.setTag(viewHolder);\n } else {\n itemView = convertView;\n viewHolder = (ViewHolder) convertView.getTag();\n }\n\n populateView(position);\n\n return itemView;\n }\n\n private void populateView(final int position) {\n final Movie movie = getItem(position);\n\n viewHolder.titleView.setText(movie.getTitle());\n viewHolder.releaseView.setText(String.valueOf(movie.getReleased()));\n }\n\n private static class ViewHolder {\n private final TextView titleView;\n private final TextView releaseView;\n\n private ViewHolder(final View itemView) {\n titleView = (TextView) itemView.findViewById(R.id.movie_title);\n releaseView = (TextView) itemView.findViewById(R.id.movie_released);\n }\n }\n}\n\n\n ... "}}},{"rowIdx":3750,"cells":{"commit":{"kind":"string","value":"0261c895cb41f5caba42ae432b997fd3c941e96f"},"old_file":{"kind":"string","value":"tests.py"},"new_file":{"kind":"string","value":"tests.py"},"old_contents":{"kind":"string","value":"import pytest\nimport cleaner\n\nclass TestTagRemoval():\n def test_span_removal(self):\n text = ('This is some'\n ' dummy text lalalala This is some more dummy text '\n 'test')\n\n expected = ('This is some dummy text lalalala This is some more dummy '\n 'text test')\n\n cleaned = cleaner.remove_superflous_markup(text)\n\n assert cleaned == expected\n"},"new_contents":{"kind":"string","value":"import pytest\nimport cleaner\n\nclass TestTagTools():\n def test_get_pure_tag(self):\n tag1 = '
'\n tag2 = '
'\n tag3 = '
'\n\n        assert cleaner.get_pure_tag(tag1) == '
'\n assert cleaner.get_pure_tag(tag2) == '
'\n assert cleaner.get_pure_tag(tag3) == '
'\n"},"subject":{"kind":"string","value":"Add test for getting pure html tag"},"message":{"kind":"string","value":"Add test for getting pure html tag\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"jamalmoir/blogger_html_cleaner"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport pytest\nimport cleaner\n\nclass TestTagRemoval():\n    def test_span_removal(self):\n        text = ('This is some'\n        ' dummy text lalalala This is some more dummy text '\n        'test')\n\n        expected = ('This is some dummy text lalalala This is some more dummy '\n        'text test')\n\n        cleaned = cleaner.remove_superflous_markup(text)\n\n        assert cleaned == expected\n\n## Instruction:\nAdd test for getting pure html tag\n\n## Code After:\nimport pytest\nimport cleaner\n\nclass TestTagTools():\n    def test_get_pure_tag(self):\n        tag1 = '
'\n tag2 = '
'\n tag3 = '
'\n\n        assert cleaner.get_pure_tag(tag1) == '
'\n assert cleaner.get_pure_tag(tag2) == '
'\n assert cleaner.get_pure_tag(tag3) == '
'\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nimport pytest\nimport cleaner\n\nclass TestTagTools():\n    def test_get_pure_tag(self):\n        tag1 = '
'\n tag2 = '
'\n tag3 = '
'\n\n        assert cleaner.get_pure_tag(tag1) == '
'\n assert cleaner.get_pure_tag(tag2) == '
'\n assert cleaner.get_pure_tag(tag3) == '
'\n\n\n ... "}}},{"rowIdx":3751,"cells":{"commit":{"kind":"string","value":"fba4fdf426b0a29ca06deb67587c2bd804adb017"},"old_file":{"kind":"string","value":"tbgxmlutils/xmlutils.py"},"new_file":{"kind":"string","value":"tbgxmlutils/xmlutils.py"},"old_contents":{"kind":"string","value":"\nfrom xml.dom import minidom\nimport xml.etree.ElementTree as ET\nimport xmltodict\n\ndef add(k, parent=None, txt=None, attrs=None):\n  if parent is None:\n    handle = ET.Element(k)\n  else:\n    handle = ET.SubElement(parent, k)\n  if txt: handle.text = unicode(txt)\n  try:\n    for k, v in attrs.iteritems(): handle.attrib[k] = v\n  except AttributeError:\n    pass\n  return handle\n\ndef etree2xml(e, encoding='UTF-8'): \n  return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e)\n\ndef pretty(xml=None, fn=None):\n  if fn is not None:\n    xml = minidom.parse(fn)\n  elif not isinstance(xml, minidom.Document):\n    xml = minidom.parseString(xml)\n  return xml.toprettyxml(indent='  ')\n\ndef xml_fn_to_json(fn):\n  fh = open(fn, 'r')\n  json = xmltodict.parse(fh.read())\n  return json\n\n"},"new_contents":{"kind":"string","value":"\nfrom xml.dom import minidom\nimport lxml.etree as ET\nimport xmltodict\n\ndef add(k, parent=None, txt=None, attrs=None):\n  if parent is None:\n    handle = ET.Element(k)\n  else:\n    handle = ET.SubElement(parent, k)\n  if txt: handle.text = unicode(txt)\n  try:\n    for k, v in attrs.iteritems(): handle.attrib[k] = v\n  except AttributeError:\n    pass\n  return handle\n\ndef etree2xml(e, encoding='UTF-8'): \n  return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e)\n\ndef pretty(xml=None, fn=None):\n  if fn is not None:\n    xml = minidom.parse(fn)\n  elif not isinstance(xml, minidom.Document):\n    xml = minidom.parseString(xml)\n  return xml.toprettyxml(indent='  ')\n\ndef xml_fn_to_json(fn):\n  fh = open(fn, 'r')\n  json = xmltodict.parse(fh.read())\n  return json\n\n"},"subject":{"kind":"string","value":"Use lxml instead of elementtree."},"message":{"kind":"string","value":"Use lxml instead of elementtree.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"Schwarzschild/TBGXMLUtils"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nfrom xml.dom import minidom\nimport xml.etree.ElementTree as ET\nimport xmltodict\n\ndef add(k, parent=None, txt=None, attrs=None):\n  if parent is None:\n    handle = ET.Element(k)\n  else:\n    handle = ET.SubElement(parent, k)\n  if txt: handle.text = unicode(txt)\n  try:\n    for k, v in attrs.iteritems(): handle.attrib[k] = v\n  except AttributeError:\n    pass\n  return handle\n\ndef etree2xml(e, encoding='UTF-8'): \n  return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e)\n\ndef pretty(xml=None, fn=None):\n  if fn is not None:\n    xml = minidom.parse(fn)\n  elif not isinstance(xml, minidom.Document):\n    xml = minidom.parseString(xml)\n  return xml.toprettyxml(indent='  ')\n\ndef xml_fn_to_json(fn):\n  fh = open(fn, 'r')\n  json = xmltodict.parse(fh.read())\n  return json\n\n\n## Instruction:\nUse lxml instead of elementtree.\n\n## Code After:\n\nfrom xml.dom import minidom\nimport lxml.etree as ET\nimport xmltodict\n\ndef add(k, parent=None, txt=None, attrs=None):\n  if parent is None:\n    handle = ET.Element(k)\n  else:\n    handle = ET.SubElement(parent, k)\n  if txt: handle.text = unicode(txt)\n  try:\n    for k, v in attrs.iteritems(): handle.attrib[k] = v\n  except AttributeError:\n    pass\n  return handle\n\ndef etree2xml(e, encoding='UTF-8'): \n  return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e)\n\ndef pretty(xml=None, fn=None):\n  if fn is not None:\n    xml = minidom.parse(fn)\n  elif not isinstance(xml, minidom.Document):\n    xml = minidom.parseString(xml)\n  return xml.toprettyxml(indent='  ')\n\ndef xml_fn_to_json(fn):\n  fh = open(fn, 'r')\n  json = xmltodict.parse(fh.read())\n  return json\n\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\nfrom xml.dom import minidom\nimport lxml.etree as ET\nimport xmltodict\n\ndef add(k, parent=None, txt=None, attrs=None):\n\n\n// ... rest of the code ..."}}},{"rowIdx":3752,"cells":{"commit":{"kind":"string","value":"6b15d7151f2703ce049ac9ab14b13c8b13122bf8"},"old_file":{"kind":"string","value":"http.h"},"new_file":{"kind":"string","value":"http.h"},"old_contents":{"kind":"string","value":"\ntypedef struct\n{\n    int method;     /* request method */\n    char* path;     /* requested path */\n    char* host;     /* hostname field */\n    char* type;     /* content-type */\n    size_t length;  /* content-length */\n    int flags;\n}\nhttp_request;\n\n/* Write an error page (and header). Returns number of bytes written. */\nsize_t gen_error_page( int fd, int error );\n\n/*\n    Write 200 Ok header with content length and content type.\n    Returns the number of bytes written, 0 on failure.\n */\nsize_t http_ok( int fd, const char* type, unsigned long size );\n\n/* parse a HTTP request, returns non-zero on success, zero on failure */\nint http_request_parse( char* buffer, http_request* request );\n\n#endif /* HTTP_H */\n\n"},"new_contents":{"kind":"string","value":"\ntypedef struct\n{\n    int method;     /* request method */\n    char* path;     /* requested path */\n    char* host;     /* hostname field */\n    char* type;     /* content-type */\n    size_t length;  /* content-length */\n}\nhttp_request;\n\n/* Write an error page (and header). Returns number of bytes written. */\nsize_t gen_error_page( int fd, int error );\n\n/*\n    Write 200 Ok header with content length and content type.\n    Returns the number of bytes written, 0 on failure.\n */\nsize_t http_ok( int fd, const char* type, unsigned long size );\n\n/* parse a HTTP request, returns non-zero on success, zero on failure */\nint http_request_parse( char* buffer, http_request* request );\n\n#endif /* HTTP_H */\n\n"},"subject":{"kind":"string","value":"Remove unused flags field from request"},"message":{"kind":"string","value":"Remove unused flags field from request\n\nSigned-off-by: David Oberhollenzer \n"},"lang":{"kind":"string","value":"C"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"AgentD/websrv,AgentD/websrv,AgentD/websrv"},"config":{"kind":"string","value":"c"},"content":{"kind":"string","value":"## Code Before:\n\ntypedef struct\n{\n    int method;     /* request method */\n    char* path;     /* requested path */\n    char* host;     /* hostname field */\n    char* type;     /* content-type */\n    size_t length;  /* content-length */\n    int flags;\n}\nhttp_request;\n\n/* Write an error page (and header). Returns number of bytes written. */\nsize_t gen_error_page( int fd, int error );\n\n/*\n    Write 200 Ok header with content length and content type.\n    Returns the number of bytes written, 0 on failure.\n */\nsize_t http_ok( int fd, const char* type, unsigned long size );\n\n/* parse a HTTP request, returns non-zero on success, zero on failure */\nint http_request_parse( char* buffer, http_request* request );\n\n#endif /* HTTP_H */\n\n\n## Instruction:\nRemove unused flags field from request\n\nSigned-off-by: David Oberhollenzer \n\n## Code After:\n\ntypedef struct\n{\n    int method;     /* request method */\n    char* path;     /* requested path */\n    char* host;     /* hostname field */\n    char* type;     /* content-type */\n    size_t length;  /* content-length */\n}\nhttp_request;\n\n/* Write an error page (and header). Returns number of bytes written. */\nsize_t gen_error_page( int fd, int error );\n\n/*\n    Write 200 Ok header with content length and content type.\n    Returns the number of bytes written, 0 on failure.\n */\nsize_t http_ok( int fd, const char* type, unsigned long size );\n\n/* parse a HTTP request, returns non-zero on success, zero on failure */\nint http_request_parse( char* buffer, http_request* request );\n\n#endif /* HTTP_H */\n\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n    char* host;     /* hostname field */\n    char* type;     /* content-type */\n    size_t length;  /* content-length */\n}\nhttp_request;\n\n\n\n# ... rest of the code ..."}}},{"rowIdx":3753,"cells":{"commit":{"kind":"string","value":"ad0151eee0027237c8cdd433ef2f24bfa47af5df"},"old_file":{"kind":"string","value":"pyreaclib/nucdata/tests/test_binding.py"},"new_file":{"kind":"string","value":"pyreaclib/nucdata/tests/test_binding.py"},"old_contents":{"kind":"string","value":"\nimport os\n\nfrom pyreaclib.nucdata import BindingTable\n\nclass TestAME(object):\n    @classmethod\n    def setup_class(cls):\n        \"\"\" this is run once for each class before any tests \"\"\"\n        pass\n\n    @classmethod\n    def teardown_class(cls):\n        \"\"\" this is run once for each class after all tests \"\"\"\n        pass\n\n    def setup_method(self):\n        \"\"\" this is run before each test \"\"\"\n        self.bintable = BindingTable()\n\n    def teardown_method(self):\n        \"\"\" this is run after each test \"\"\"\n        self.bintable = None\n\n    def test_get(self):\n        nuc = self.bintable.get_nuclide(n=1, z=1)\n        assert nuc.z == 1\n        assert nuc.n == 1\n        assert nuc.nucbind == 1.112283\n\n"},"new_contents":{"kind":"string","value":"\nimport os\n\nfrom pyreaclib.nucdata import BindingTable\n\nclass TestAME(object):\n    @classmethod\n    def setup_class(cls):\n        \"\"\" this is run once for each class before any tests \"\"\"\n        pass\n\n    @classmethod\n    def teardown_class(cls):\n        \"\"\" this is run once for each class after all tests \"\"\"\n        pass\n\n    def setup_method(self):\n        \"\"\" this is run before each test \"\"\"\n        self.bintable = BindingTable()\n\n    def teardown_method(self):\n        \"\"\" this is run after each test \"\"\"\n        self.bintable = None\n\n    def test_get(self):\n        nuc = self.bintable.get_nuclide(n=1, z=1)\n        assert nuc.z == 1\n        assert nuc.n == 1\n        assert nuc.nucbind == 1.112283\n\n        nuc = self.bintable.get_nuclide(n=5, z=6)\n        assert nuc.z == 6\n        assert nuc.n == 5\n        assert nuc.nucbind == 6.676456\n\n        nuc = self.bintable.get_nuclide(n=17, z=23)\n        assert nuc.z == 23\n        assert nuc.n == 17\n        assert nuc.nucbind == 7.317\n\n        nuc = self.bintable.get_nuclide(n=90, z=78)\n        assert nuc.z == 78\n        assert nuc.n == 90\n        assert nuc.nucbind == 7.773605\n"},"subject":{"kind":"string","value":"Add some more binding energy table tests."},"message":{"kind":"string","value":"Add some more binding energy table tests.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"pyreaclib/pyreaclib"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nimport os\n\nfrom pyreaclib.nucdata import BindingTable\n\nclass TestAME(object):\n    @classmethod\n    def setup_class(cls):\n        \"\"\" this is run once for each class before any tests \"\"\"\n        pass\n\n    @classmethod\n    def teardown_class(cls):\n        \"\"\" this is run once for each class after all tests \"\"\"\n        pass\n\n    def setup_method(self):\n        \"\"\" this is run before each test \"\"\"\n        self.bintable = BindingTable()\n\n    def teardown_method(self):\n        \"\"\" this is run after each test \"\"\"\n        self.bintable = None\n\n    def test_get(self):\n        nuc = self.bintable.get_nuclide(n=1, z=1)\n        assert nuc.z == 1\n        assert nuc.n == 1\n        assert nuc.nucbind == 1.112283\n\n\n## Instruction:\nAdd some more binding energy table tests.\n\n## Code After:\n\nimport os\n\nfrom pyreaclib.nucdata import BindingTable\n\nclass TestAME(object):\n    @classmethod\n    def setup_class(cls):\n        \"\"\" this is run once for each class before any tests \"\"\"\n        pass\n\n    @classmethod\n    def teardown_class(cls):\n        \"\"\" this is run once for each class after all tests \"\"\"\n        pass\n\n    def setup_method(self):\n        \"\"\" this is run before each test \"\"\"\n        self.bintable = BindingTable()\n\n    def teardown_method(self):\n        \"\"\" this is run after each test \"\"\"\n        self.bintable = None\n\n    def test_get(self):\n        nuc = self.bintable.get_nuclide(n=1, z=1)\n        assert nuc.z == 1\n        assert nuc.n == 1\n        assert nuc.nucbind == 1.112283\n\n        nuc = self.bintable.get_nuclide(n=5, z=6)\n        assert nuc.z == 6\n        assert nuc.n == 5\n        assert nuc.nucbind == 6.676456\n\n        nuc = self.bintable.get_nuclide(n=17, z=23)\n        assert nuc.z == 23\n        assert nuc.n == 17\n        assert nuc.nucbind == 7.317\n\n        nuc = self.bintable.get_nuclide(n=90, z=78)\n        assert nuc.z == 78\n        assert nuc.n == 90\n        assert nuc.nucbind == 7.773605\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n        assert nuc.n == 1\n        assert nuc.nucbind == 1.112283\n\n        nuc = self.bintable.get_nuclide(n=5, z=6)\n        assert nuc.z == 6\n        assert nuc.n == 5\n        assert nuc.nucbind == 6.676456\n\n        nuc = self.bintable.get_nuclide(n=17, z=23)\n        assert nuc.z == 23\n        assert nuc.n == 17\n        assert nuc.nucbind == 7.317\n\n        nuc = self.bintable.get_nuclide(n=90, z=78)\n        assert nuc.z == 78\n        assert nuc.n == 90\n        assert nuc.nucbind == 7.773605\n\n\n ... "}}},{"rowIdx":3754,"cells":{"commit":{"kind":"string","value":"d6759d0abec637753d93cd407fad5e7abc6ec86d"},"old_file":{"kind":"string","value":"astropy/tests/plugins/display.py"},"new_file":{"kind":"string","value":"astropy/tests/plugins/display.py"},"old_contents":{"kind":"string","value":"\nimport warnings\nfrom astropy.utils.exceptions import AstropyDeprecationWarning\n\ntry:\n    from pytest_astropy_header.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS\nexcept ImportError:\n    PYTEST_HEADER_MODULES = {}\n    TESTED_VERSIONS = {}\n\nwarnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '\n              'See the pytest-astropy documentation for information on '\n              'migrating to using pytest-astropy to customize the pytest '\n              'header.', AstropyDeprecationWarning)\n"},"new_contents":{"kind":"string","value":"\nimport warnings\nfrom astropy.utils.exceptions import AstropyDeprecationWarning\n\ntry:\n    from pytest_astropy_header.display import (PYTEST_HEADER_MODULES,\n                                               TESTED_VERSIONS)\nexcept ImportError:\n    PYTEST_HEADER_MODULES = {}\n    TESTED_VERSIONS = {}\n\nwarnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '\n              'See the pytest-astropy-header documentation for information on '\n              'migrating to using pytest-astropy-header to customize the '\n              'pytest header.', AstropyDeprecationWarning)\n"},"subject":{"kind":"string","value":"Fix typo in deprecation warning"},"message":{"kind":"string","value":"TST: Fix typo in deprecation warning [ci skip]\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"stargaser/astropy,dhomeier/astropy,saimn/astropy,saimn/astropy,larrybradley/astropy,astropy/astropy,StuartLittlefair/astropy,lpsinger/astropy,dhomeier/astropy,lpsinger/astropy,StuartLittlefair/astropy,larrybradley/astropy,lpsinger/astropy,MSeifert04/astropy,astropy/astropy,astropy/astropy,MSeifert04/astropy,larrybradley/astropy,larrybradley/astropy,StuartLittlefair/astropy,StuartLittlefair/astropy,mhvk/astropy,StuartLittlefair/astropy,saimn/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,mhvk/astropy,astropy/astropy,astropy/astropy,mhvk/astropy,aleksandr-bakanov/astropy,MSeifert04/astropy,mhvk/astropy,lpsinger/astropy,dhomeier/astropy,stargaser/astropy,pllim/astropy,dhomeier/astropy,saimn/astropy,saimn/astropy,pllim/astropy,larrybradley/astropy,pllim/astropy,pllim/astropy,aleksandr-bakanov/astropy,lpsinger/astropy,mhvk/astropy,pllim/astropy,aleksandr-bakanov/astropy,stargaser/astropy,MSeifert04/astropy,stargaser/astropy"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nimport warnings\nfrom astropy.utils.exceptions import AstropyDeprecationWarning\n\ntry:\n    from pytest_astropy_header.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS\nexcept ImportError:\n    PYTEST_HEADER_MODULES = {}\n    TESTED_VERSIONS = {}\n\nwarnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '\n              'See the pytest-astropy documentation for information on '\n              'migrating to using pytest-astropy to customize the pytest '\n              'header.', AstropyDeprecationWarning)\n\n## Instruction:\nTST: Fix typo in deprecation warning [ci skip]\n\n## Code After:\n\nimport warnings\nfrom astropy.utils.exceptions import AstropyDeprecationWarning\n\ntry:\n    from pytest_astropy_header.display import (PYTEST_HEADER_MODULES,\n                                               TESTED_VERSIONS)\nexcept ImportError:\n    PYTEST_HEADER_MODULES = {}\n    TESTED_VERSIONS = {}\n\nwarnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '\n              'See the pytest-astropy-header documentation for information on '\n              'migrating to using pytest-astropy-header to customize the '\n              'pytest header.', AstropyDeprecationWarning)\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nfrom astropy.utils.exceptions import AstropyDeprecationWarning\n\ntry:\n    from pytest_astropy_header.display import (PYTEST_HEADER_MODULES,\n                                               TESTED_VERSIONS)\nexcept ImportError:\n    PYTEST_HEADER_MODULES = {}\n    TESTED_VERSIONS = {}\n\nwarnings.warn('The astropy.tests.plugins.display plugin has been deprecated. '\n              'See the pytest-astropy-header documentation for information on '\n              'migrating to using pytest-astropy-header to customize the '\n              'pytest header.', AstropyDeprecationWarning)\n\n\n ... "}}},{"rowIdx":3755,"cells":{"commit":{"kind":"string","value":"ab9a38793645a9c61cf1c320e5a4db9bf7b03ccf"},"old_file":{"kind":"string","value":"grow/deployments/utils.py"},"new_file":{"kind":"string","value":"grow/deployments/utils.py"},"old_contents":{"kind":"string","value":"from .indexes import messages\nimport git\n\n\nclass Error(Exception):\n  pass\n\n\nclass NoGitHeadError(Error, ValueError):\n  pass\n\n\ndef create_commit_message(repo):\n  message = messages.CommitMessage()\n  try:\n    commit = repo.head.commit\n  except ValueError:\n    raise NoGitHeadError('On initial commit, no HEAD yet.')\n  try:\n    repo.git.diff('--quiet')\n    has_unstaged_changes = False\n  except git.exc.GitCommandError:\n    has_unstaged_changes = True\n  message.has_unstaged_changes = has_unstaged_changes\n  message.sha = commit.hexsha\n  message.message = commit.message\n  message.branch = repo.head.ref.name\n  message.author = messages.AuthorMessage(\n      name=commit.author.name, email=commit.author.email)\n  return message\n"},"new_contents":{"kind":"string","value":"from .indexes import messages\nimport git\n\n\nclass Error(Exception):\n  pass\n\n\nclass NoGitHeadError(Error, ValueError):\n  pass\n\n\ndef create_commit_message(repo):\n  message = messages.CommitMessage()\n  try:\n    commit = repo.head.commit\n  except ValueError:\n    raise NoGitHeadError('On initial commit, no HEAD yet.')\n  try:\n    repo.git.diff('--quiet')\n    has_unstaged_changes = False\n  except git.exc.GitCommandError:\n    has_unstaged_changes = True\n  message.has_unstaged_changes = has_unstaged_changes\n  message.sha = commit.hexsha\n  message.message = commit.message\n  try:\n    message.branch = repo.head.ref.name\n  except TypeError:\n    # Allow operating in an environment with a detached HEAD.\n    pass\n  message.author = messages.AuthorMessage(\n      name=commit.author.name, email=commit.author.email)\n  return message\n"},"subject":{"kind":"string","value":"Allow operating in an environment with a detached HEAD."},"message":{"kind":"string","value":"Allow operating in an environment with a detached HEAD.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"grow/pygrow,denmojo/pygrow,grow/grow,grow/grow,grow/pygrow,codedcolors/pygrow,grow/grow,grow/pygrow,denmojo/pygrow,denmojo/pygrow,denmojo/pygrow,codedcolors/pygrow,codedcolors/pygrow,grow/grow"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom .indexes import messages\nimport git\n\n\nclass Error(Exception):\n  pass\n\n\nclass NoGitHeadError(Error, ValueError):\n  pass\n\n\ndef create_commit_message(repo):\n  message = messages.CommitMessage()\n  try:\n    commit = repo.head.commit\n  except ValueError:\n    raise NoGitHeadError('On initial commit, no HEAD yet.')\n  try:\n    repo.git.diff('--quiet')\n    has_unstaged_changes = False\n  except git.exc.GitCommandError:\n    has_unstaged_changes = True\n  message.has_unstaged_changes = has_unstaged_changes\n  message.sha = commit.hexsha\n  message.message = commit.message\n  message.branch = repo.head.ref.name\n  message.author = messages.AuthorMessage(\n      name=commit.author.name, email=commit.author.email)\n  return message\n\n## Instruction:\nAllow operating in an environment with a detached HEAD.\n\n## Code After:\nfrom .indexes import messages\nimport git\n\n\nclass Error(Exception):\n  pass\n\n\nclass NoGitHeadError(Error, ValueError):\n  pass\n\n\ndef create_commit_message(repo):\n  message = messages.CommitMessage()\n  try:\n    commit = repo.head.commit\n  except ValueError:\n    raise NoGitHeadError('On initial commit, no HEAD yet.')\n  try:\n    repo.git.diff('--quiet')\n    has_unstaged_changes = False\n  except git.exc.GitCommandError:\n    has_unstaged_changes = True\n  message.has_unstaged_changes = has_unstaged_changes\n  message.sha = commit.hexsha\n  message.message = commit.message\n  try:\n    message.branch = repo.head.ref.name\n  except TypeError:\n    # Allow operating in an environment with a detached HEAD.\n    pass\n  message.author = messages.AuthorMessage(\n      name=commit.author.name, email=commit.author.email)\n  return message\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n  message.has_unstaged_changes = has_unstaged_changes\n  message.sha = commit.hexsha\n  message.message = commit.message\n  try:\n    message.branch = repo.head.ref.name\n  except TypeError:\n    # Allow operating in an environment with a detached HEAD.\n    pass\n  message.author = messages.AuthorMessage(\n      name=commit.author.name, email=commit.author.email)\n  return message\n\n\n// ... rest of the code ..."}}},{"rowIdx":3756,"cells":{"commit":{"kind":"string","value":"79c0071b7aad2992011684428611701bc58a9bff"},"old_file":{"kind":"string","value":"tests/__init__.py"},"new_file":{"kind":"string","value":"tests/__init__.py"},"old_contents":{"kind":"string","value":"try:\n    from urllib.parse import urlencode\nexcept ImportError:\n    from urllib import urlencode\n\nimport tornado.testing\nimport tornado.options\n\nimport celery\n\nfrom flower.app import Flower\nfrom flower.urls import handlers\nfrom flower.events import Events\nfrom flower.urls import settings\nfrom flower import command  # side effect - define options\n\n\nclass AsyncHTTPTestCase(tornado.testing.AsyncHTTPTestCase):\n    def get_app(self):\n        capp = celery.Celery()\n        events = Events(capp)\n        app = Flower(capp=capp, events=events,\n                     options=tornado.options.options,\n                     handlers=handlers, **settings)\n        app.delay = lambda method, *args, **kwargs: method(*args, **kwargs)\n        return app\n\n    def get(self, url, **kwargs):\n        return self.fetch(url, **kwargs)\n\n    def post(self, url, **kwargs):\n        if 'body' in kwargs and isinstance(kwargs['body'], dict):\n            kwargs['body'] = urlencode(kwargs['body'])\n        return self.fetch(url, method='POST', **kwargs)\n"},"new_contents":{"kind":"string","value":"try:\n    from urllib.parse import urlencode\nexcept ImportError:\n    from urllib import urlencode\n\nimport tornado.testing\nfrom tornado.options import options\n\nimport celery\nimport mock\n\nfrom flower.app import Flower\nfrom flower.urls import handlers\nfrom flower.events import Events\nfrom flower.urls import settings\nfrom flower import command  # side effect - define options\n\n\nclass AsyncHTTPTestCase(tornado.testing.AsyncHTTPTestCase):\n    def get_app(self):\n        capp = celery.Celery()\n        events = Events(capp)\n        app = Flower(capp=capp, events=events,\n                     options=options, handlers=handlers, **settings)\n        app.delay = lambda method, *args, **kwargs: method(*args, **kwargs)\n        return app\n\n    def get(self, url, **kwargs):\n        return self.fetch(url, **kwargs)\n\n    def post(self, url, **kwargs):\n        if 'body' in kwargs and isinstance(kwargs['body'], dict):\n            kwargs['body'] = urlencode(kwargs['body'])\n        return self.fetch(url, method='POST', **kwargs)\n\n    def mock_option(self, name, value):\n        return mock.patch.object(options.mockable(), name, value)\n"},"subject":{"kind":"string","value":"Add an util funcion for mocking options"},"message":{"kind":"string","value":"Add an util funcion for mocking options\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"jzhou77/flower,asmodehn/flower,jzhou77/flower,asmodehn/flower,asmodehn/flower,jzhou77/flower"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\ntry:\n    from urllib.parse import urlencode\nexcept ImportError:\n    from urllib import urlencode\n\nimport tornado.testing\nimport tornado.options\n\nimport celery\n\nfrom flower.app import Flower\nfrom flower.urls import handlers\nfrom flower.events import Events\nfrom flower.urls import settings\nfrom flower import command  # side effect - define options\n\n\nclass AsyncHTTPTestCase(tornado.testing.AsyncHTTPTestCase):\n    def get_app(self):\n        capp = celery.Celery()\n        events = Events(capp)\n        app = Flower(capp=capp, events=events,\n                     options=tornado.options.options,\n                     handlers=handlers, **settings)\n        app.delay = lambda method, *args, **kwargs: method(*args, **kwargs)\n        return app\n\n    def get(self, url, **kwargs):\n        return self.fetch(url, **kwargs)\n\n    def post(self, url, **kwargs):\n        if 'body' in kwargs and isinstance(kwargs['body'], dict):\n            kwargs['body'] = urlencode(kwargs['body'])\n        return self.fetch(url, method='POST', **kwargs)\n\n## Instruction:\nAdd an util funcion for mocking options\n\n## Code After:\ntry:\n    from urllib.parse import urlencode\nexcept ImportError:\n    from urllib import urlencode\n\nimport tornado.testing\nfrom tornado.options import options\n\nimport celery\nimport mock\n\nfrom flower.app import Flower\nfrom flower.urls import handlers\nfrom flower.events import Events\nfrom flower.urls import settings\nfrom flower import command  # side effect - define options\n\n\nclass AsyncHTTPTestCase(tornado.testing.AsyncHTTPTestCase):\n    def get_app(self):\n        capp = celery.Celery()\n        events = Events(capp)\n        app = Flower(capp=capp, events=events,\n                     options=options, handlers=handlers, **settings)\n        app.delay = lambda method, *args, **kwargs: method(*args, **kwargs)\n        return app\n\n    def get(self, url, **kwargs):\n        return self.fetch(url, **kwargs)\n\n    def post(self, url, **kwargs):\n        if 'body' in kwargs and isinstance(kwargs['body'], dict):\n            kwargs['body'] = urlencode(kwargs['body'])\n        return self.fetch(url, method='POST', **kwargs)\n\n    def mock_option(self, name, value):\n        return mock.patch.object(options.mockable(), name, value)\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n    from urllib import urlencode\n\nimport tornado.testing\nfrom tornado.options import options\n\nimport celery\nimport mock\n\nfrom flower.app import Flower\nfrom flower.urls import handlers\n\n\n ... \n\n\n        capp = celery.Celery()\n        events = Events(capp)\n        app = Flower(capp=capp, events=events,\n                     options=options, handlers=handlers, **settings)\n        app.delay = lambda method, *args, **kwargs: method(*args, **kwargs)\n        return app\n\n\n\n ... \n\n\n        if 'body' in kwargs and isinstance(kwargs['body'], dict):\n            kwargs['body'] = urlencode(kwargs['body'])\n        return self.fetch(url, method='POST', **kwargs)\n\n    def mock_option(self, name, value):\n        return mock.patch.object(options.mockable(), name, value)\n\n\n ... "}}},{"rowIdx":3757,"cells":{"commit":{"kind":"string","value":"98393be0011f4e4227e6f5e86db68533af8b78e0"},"old_file":{"kind":"string","value":"webserver/profiles/models.py"},"new_file":{"kind":"string","value":"webserver/profiles/models.py"},"old_contents":{"kind":"string","value":"from django.db import models\nfrom django.db.models.signals import pre_save, post_save\nfrom django.dispatch import receiver\nfrom django.contrib.auth.models import User\nfrom django.conf import settings\n\nimport markdown\nimport bleach\n\n\nclass UserProfile(models.Model):\n    user = models.OneToOneField(User)\n\n    about_me = models.TextField()\n    rendered_about_me = models.TextField(editable=False,\n                                         null=True)\n\n    @models.permalink\n    def get_absolute_url(self):\n        return ('view_profile', (), {'username': self.user.username})\n\n\n@receiver(post_save, sender=User)\ndef create_user_profile(sender, instance, created, **kwargs):\n    if created:\n        UserProfile.objects.create(user=instance)\n\n\n@receiver(pre_save, sender=UserProfile)\ndef user_profile_pre_save(sender, instance, **kwargs):\n    # Render the about_me field as HTML instead of markdown\n    rendered = markdown.markdown(instance.about_me, safe_mode='escape')\n    clean_rendered = bleach.clean(rendered,\n                                  tags=settings.ALLOWED_HTML_TAGS,\n                                  attributes=settings.ALLOWED_HTML_ATTRS)\n    instance.rendered_about_me = clean_rendered\n"},"new_contents":{"kind":"string","value":"from django.db import models\nfrom django.db.models.signals import pre_save, post_save\nfrom django.dispatch import receiver\nfrom django.contrib.auth.models import User\nfrom django.conf import settings\nfrom django.core.validators import MaxLengthValidator\n\nimport markdown\nimport bleach\n\n\nclass UserProfile(models.Model):\n    user = models.OneToOneField(User)\n\n    about_me = models.TextField(validators=[MaxLengthValidator(500)])\n    rendered_about_me = models.TextField(editable=False,\n                                         null=True)\n\n    @models.permalink\n    def get_absolute_url(self):\n        return ('view_profile', (), {'username': self.user.username})\n\n\n@receiver(post_save, sender=User)\ndef create_user_profile(sender, instance, created, **kwargs):\n    if created:\n        UserProfile.objects.create(user=instance)\n\n\n@receiver(pre_save, sender=UserProfile)\ndef user_profile_pre_save(sender, instance, **kwargs):\n    # Render the about_me field as HTML instead of markdown\n    rendered = markdown.markdown(instance.about_me, safe_mode='escape')\n    clean_rendered = bleach.clean(rendered,\n                                  tags=settings.ALLOWED_HTML_TAGS,\n                                  attributes=settings.ALLOWED_HTML_ATTRS)\n    instance.rendered_about_me = clean_rendered\n"},"subject":{"kind":"string","value":"Add maximum length validator to about_me"},"message":{"kind":"string","value":"Add maximum length validator to about_me\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"siggame/webserver,siggame/webserver,siggame/webserver"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom django.db import models\nfrom django.db.models.signals import pre_save, post_save\nfrom django.dispatch import receiver\nfrom django.contrib.auth.models import User\nfrom django.conf import settings\n\nimport markdown\nimport bleach\n\n\nclass UserProfile(models.Model):\n    user = models.OneToOneField(User)\n\n    about_me = models.TextField()\n    rendered_about_me = models.TextField(editable=False,\n                                         null=True)\n\n    @models.permalink\n    def get_absolute_url(self):\n        return ('view_profile', (), {'username': self.user.username})\n\n\n@receiver(post_save, sender=User)\ndef create_user_profile(sender, instance, created, **kwargs):\n    if created:\n        UserProfile.objects.create(user=instance)\n\n\n@receiver(pre_save, sender=UserProfile)\ndef user_profile_pre_save(sender, instance, **kwargs):\n    # Render the about_me field as HTML instead of markdown\n    rendered = markdown.markdown(instance.about_me, safe_mode='escape')\n    clean_rendered = bleach.clean(rendered,\n                                  tags=settings.ALLOWED_HTML_TAGS,\n                                  attributes=settings.ALLOWED_HTML_ATTRS)\n    instance.rendered_about_me = clean_rendered\n\n## Instruction:\nAdd maximum length validator to about_me\n\n## Code After:\nfrom django.db import models\nfrom django.db.models.signals import pre_save, post_save\nfrom django.dispatch import receiver\nfrom django.contrib.auth.models import User\nfrom django.conf import settings\nfrom django.core.validators import MaxLengthValidator\n\nimport markdown\nimport bleach\n\n\nclass UserProfile(models.Model):\n    user = models.OneToOneField(User)\n\n    about_me = models.TextField(validators=[MaxLengthValidator(500)])\n    rendered_about_me = models.TextField(editable=False,\n                                         null=True)\n\n    @models.permalink\n    def get_absolute_url(self):\n        return ('view_profile', (), {'username': self.user.username})\n\n\n@receiver(post_save, sender=User)\ndef create_user_profile(sender, instance, created, **kwargs):\n    if created:\n        UserProfile.objects.create(user=instance)\n\n\n@receiver(pre_save, sender=UserProfile)\ndef user_profile_pre_save(sender, instance, **kwargs):\n    # Render the about_me field as HTML instead of markdown\n    rendered = markdown.markdown(instance.about_me, safe_mode='escape')\n    clean_rendered = bleach.clean(rendered,\n                                  tags=settings.ALLOWED_HTML_TAGS,\n                                  attributes=settings.ALLOWED_HTML_ATTRS)\n    instance.rendered_about_me = clean_rendered\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nfrom django.dispatch import receiver\nfrom django.contrib.auth.models import User\nfrom django.conf import settings\nfrom django.core.validators import MaxLengthValidator\n\nimport markdown\nimport bleach\n\n\n# ... modified code ... \n\n\nclass UserProfile(models.Model):\n    user = models.OneToOneField(User)\n\n    about_me = models.TextField(validators=[MaxLengthValidator(500)])\n    rendered_about_me = models.TextField(editable=False,\n                                         null=True)\n\n\n\n# ... rest of the code ..."}}},{"rowIdx":3758,"cells":{"commit":{"kind":"string","value":"af51ef98d8575e7832d79c1068c092d388866dcb"},"old_file":{"kind":"string","value":"donut/donut_SMTP_handler.py"},"new_file":{"kind":"string","value":"donut/donut_SMTP_handler.py"},"old_contents":{"kind":"string","value":"from logging.handlers import SMTPHandler\n\nDEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM\n            members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups \n            WHERE group_name = \"Devteam\" \n        '''\n\n\nclass DonutSMTPHandler(SMTPHandler):\n    def __init__(self,\n                 mailhost,\n                 fromaddr,\n                 toaddrs,\n                 subject,\n                 db_instance,\n                 credentials=None,\n                 secure=None,\n                 timeout=5.0):\n        super().__init__(mailhost, fromaddr, toaddrs, subject, credentials,\n                         secure, timeout)\n        self.db_instance = db_instance\n\n    def emit(self, record):\n        '''\n        Overrides SMTPHandler's emit such that we dynamically\n        get current donut dev team members\n        '''\n        self.toaddrs = self.getAdmins()\n        super().emit(record)\n\n    def getAdmins(self):\n        ''' Returns current members in Devteam '''\n\n        with self.db_instance.cursor() as cursor:\n            cursor.execute(DEV_TEAM_EMAILS_QUERY, [])\n            res = cursor.fetchall()\n        return [result['email'] for result in res]\n"},"new_contents":{"kind":"string","value":"from logging.handlers import SMTPHandler\n\nDEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM\n            members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups\n            WHERE group_name = \"Devteam\"\n        '''\nDEFAULT_DEV_TEAM_EMAILS = ['devteam@donut.caltech.edu']\n\n\nclass DonutSMTPHandler(SMTPHandler):\n    def __init__(self,\n                 mailhost,\n                 fromaddr,\n                 toaddrs,\n                 subject,\n                 db_instance,\n                 credentials=None,\n                 secure=None,\n                 timeout=5.0):\n        super().__init__(mailhost, fromaddr, toaddrs, subject, credentials,\n                         secure, timeout)\n        self.db_instance = db_instance\n\n    def emit(self, record):\n        '''\n        Overrides SMTPHandler's emit such that we dynamically\n        get current donut dev team members\n        '''\n        self.toaddrs = self.getAdmins()\n        super().emit(record)\n\n    def getAdmins(self):\n        ''' Returns current members in Devteam '''\n\n        try:\n            with self.db_instance.cursor() as cursor:\n                cursor.execute(DEV_TEAM_EMAILS_QUERY)\n                res = cursor.fetchall()\n            return [result['email'] for result in res]\n        except Exception:\n            # If the database is inaccessible, fallback to a hard-coded email list\n            return DEFAULT_DEV_TEAM_EMAILS\n"},"subject":{"kind":"string","value":"Allow error email to still be sent if DB is down"},"message":{"kind":"string","value":"Allow error email to still be sent if DB is down\n\nWe were seeing errors in the logs where the database was inaccessible,\nbut the errors were not being emailed out because the handler makes a DB query.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"ASCIT/donut,ASCIT/donut,ASCIT/donut"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom logging.handlers import SMTPHandler\n\nDEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM\n            members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups \n            WHERE group_name = \"Devteam\" \n        '''\n\n\nclass DonutSMTPHandler(SMTPHandler):\n    def __init__(self,\n                 mailhost,\n                 fromaddr,\n                 toaddrs,\n                 subject,\n                 db_instance,\n                 credentials=None,\n                 secure=None,\n                 timeout=5.0):\n        super().__init__(mailhost, fromaddr, toaddrs, subject, credentials,\n                         secure, timeout)\n        self.db_instance = db_instance\n\n    def emit(self, record):\n        '''\n        Overrides SMTPHandler's emit such that we dynamically\n        get current donut dev team members\n        '''\n        self.toaddrs = self.getAdmins()\n        super().emit(record)\n\n    def getAdmins(self):\n        ''' Returns current members in Devteam '''\n\n        with self.db_instance.cursor() as cursor:\n            cursor.execute(DEV_TEAM_EMAILS_QUERY, [])\n            res = cursor.fetchall()\n        return [result['email'] for result in res]\n\n## Instruction:\nAllow error email to still be sent if DB is down\n\nWe were seeing errors in the logs where the database was inaccessible,\nbut the errors were not being emailed out because the handler makes a DB query.\n\n## Code After:\nfrom logging.handlers import SMTPHandler\n\nDEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM\n            members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups\n            WHERE group_name = \"Devteam\"\n        '''\nDEFAULT_DEV_TEAM_EMAILS = ['devteam@donut.caltech.edu']\n\n\nclass DonutSMTPHandler(SMTPHandler):\n    def __init__(self,\n                 mailhost,\n                 fromaddr,\n                 toaddrs,\n                 subject,\n                 db_instance,\n                 credentials=None,\n                 secure=None,\n                 timeout=5.0):\n        super().__init__(mailhost, fromaddr, toaddrs, subject, credentials,\n                         secure, timeout)\n        self.db_instance = db_instance\n\n    def emit(self, record):\n        '''\n        Overrides SMTPHandler's emit such that we dynamically\n        get current donut dev team members\n        '''\n        self.toaddrs = self.getAdmins()\n        super().emit(record)\n\n    def getAdmins(self):\n        ''' Returns current members in Devteam '''\n\n        try:\n            with self.db_instance.cursor() as cursor:\n                cursor.execute(DEV_TEAM_EMAILS_QUERY)\n                res = cursor.fetchall()\n            return [result['email'] for result in res]\n        except Exception:\n            # If the database is inaccessible, fallback to a hard-coded email list\n            return DEFAULT_DEV_TEAM_EMAILS\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nfrom logging.handlers import SMTPHandler\n\nDEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM\n            members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups\n            WHERE group_name = \"Devteam\"\n        '''\nDEFAULT_DEV_TEAM_EMAILS = ['devteam@donut.caltech.edu']\n\n\nclass DonutSMTPHandler(SMTPHandler):\n\n\n# ... modified code ... \n\n\n    def getAdmins(self):\n        ''' Returns current members in Devteam '''\n\n        try:\n            with self.db_instance.cursor() as cursor:\n                cursor.execute(DEV_TEAM_EMAILS_QUERY)\n                res = cursor.fetchall()\n            return [result['email'] for result in res]\n        except Exception:\n            # If the database is inaccessible, fallback to a hard-coded email list\n            return DEFAULT_DEV_TEAM_EMAILS\n\n\n# ... rest of the code ..."}}},{"rowIdx":3759,"cells":{"commit":{"kind":"string","value":"ca3978b6068add93418b4c5db8346143533beb7e"},"old_file":{"kind":"string","value":"examples/forwarder_device.py"},"new_file":{"kind":"string","value":"examples/forwarder_device.py"},"old_contents":{"kind":"string","value":"import os\nimport zmq\nimport yaml\n\n\nname = 'zmq_document_forwarder'\nfilenames = [\n    os.path.join('/etc', name + '.yml'),\n    os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),\n    ]\n\nconfig = {}\nfor filename in filenames:\n    if os.path.isfile(filename):\n        print('found config file at', filename)\n        with open(filename) as f:\n            config.update(yaml.load(f))\n\n\ndef main(frontend_port, backend_port):\n\n    try:\n        context = zmq.Context(1)\n        # Socket facing clients\n        frontend = context.socket(zmq.SUB)\n        frontend.bind(\"tcp://*:%d\" % frontend_port)\n        \n        frontend.setsockopt_string(zmq.SUBSCRIBE, \"\")\n        \n        # Socket facing services\n        backend = context.socket(zmq.PUB)\n        backend.bind(\"tcp://*:%d\" % backend_port)\n\n        zmq.device(zmq.FORWARDER, frontend, backend)\n    finally:\n        frontend.close()\n        backend.close()\n        context.term()\n\n\nif __name__ == \"__main__\":\n    main(int(config['frontend_port']), int(config['backend_port']))\n"},"new_contents":{"kind":"string","value":"import os\nimport zmq\nimport yaml\n\n\nname = 'zmq_document_forwarder'\nfilenames = [\n    os.path.join('/etc', name + '.yml'),\n    os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),\n    ]\n\nconfig = {}\nfor filename in filenames:\n    if os.path.isfile(filename):\n        print('found config file at', filename)\n        with open(filename) as f:\n            config.update(yaml.load(f))\n\n\ndef main(frontend_port, backend_port):\n    try:\n        context = zmq.Context(1)\n        # Socket facing clients\n        frontend = context.socket(zmq.SUB)\n        frontend.bind(\"tcp://*:%d\" % frontend_port)\n        \n        frontend.setsockopt_string(zmq.SUBSCRIBE, \"\")\n        \n        # Socket facing services\n        backend = context.socket(zmq.PUB)\n        backend.bind(\"tcp://*:%d\" % backend_port)\n        print(\"Receiving on %d; publishing to %d\" % (frontend_port,\n                                                     backend_port))\n        zmq.device(zmq.FORWARDER, frontend, backend)\n    finally:\n        frontend.close()\n        backend.close()\n        context.term()\n\n\nif __name__ == \"__main__\":\n    main(int(config['frontend_port']), int(config['backend_port']))\n"},"subject":{"kind":"string","value":"Print ports when forwarder device starts."},"message":{"kind":"string","value":"MNT: Print ports when forwarder device starts.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"ericdill/bluesky,ericdill/bluesky"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport os\nimport zmq\nimport yaml\n\n\nname = 'zmq_document_forwarder'\nfilenames = [\n    os.path.join('/etc', name + '.yml'),\n    os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),\n    ]\n\nconfig = {}\nfor filename in filenames:\n    if os.path.isfile(filename):\n        print('found config file at', filename)\n        with open(filename) as f:\n            config.update(yaml.load(f))\n\n\ndef main(frontend_port, backend_port):\n\n    try:\n        context = zmq.Context(1)\n        # Socket facing clients\n        frontend = context.socket(zmq.SUB)\n        frontend.bind(\"tcp://*:%d\" % frontend_port)\n        \n        frontend.setsockopt_string(zmq.SUBSCRIBE, \"\")\n        \n        # Socket facing services\n        backend = context.socket(zmq.PUB)\n        backend.bind(\"tcp://*:%d\" % backend_port)\n\n        zmq.device(zmq.FORWARDER, frontend, backend)\n    finally:\n        frontend.close()\n        backend.close()\n        context.term()\n\n\nif __name__ == \"__main__\":\n    main(int(config['frontend_port']), int(config['backend_port']))\n\n## Instruction:\nMNT: Print ports when forwarder device starts.\n\n## Code After:\nimport os\nimport zmq\nimport yaml\n\n\nname = 'zmq_document_forwarder'\nfilenames = [\n    os.path.join('/etc', name + '.yml'),\n    os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'),\n    ]\n\nconfig = {}\nfor filename in filenames:\n    if os.path.isfile(filename):\n        print('found config file at', filename)\n        with open(filename) as f:\n            config.update(yaml.load(f))\n\n\ndef main(frontend_port, backend_port):\n    try:\n        context = zmq.Context(1)\n        # Socket facing clients\n        frontend = context.socket(zmq.SUB)\n        frontend.bind(\"tcp://*:%d\" % frontend_port)\n        \n        frontend.setsockopt_string(zmq.SUBSCRIBE, \"\")\n        \n        # Socket facing services\n        backend = context.socket(zmq.PUB)\n        backend.bind(\"tcp://*:%d\" % backend_port)\n        print(\"Receiving on %d; publishing to %d\" % (frontend_port,\n                                                     backend_port))\n        zmq.device(zmq.FORWARDER, frontend, backend)\n    finally:\n        frontend.close()\n        backend.close()\n        context.term()\n\n\nif __name__ == \"__main__\":\n    main(int(config['frontend_port']), int(config['backend_port']))\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\n\ndef main(frontend_port, backend_port):\n    try:\n        context = zmq.Context(1)\n        # Socket facing clients\n\n\n# ... modified code ... \n\n\n        # Socket facing services\n        backend = context.socket(zmq.PUB)\n        backend.bind(\"tcp://*:%d\" % backend_port)\n        print(\"Receiving on %d; publishing to %d\" % (frontend_port,\n                                                     backend_port))\n        zmq.device(zmq.FORWARDER, frontend, backend)\n    finally:\n        frontend.close()\n\n\n# ... rest of the code ..."}}},{"rowIdx":3760,"cells":{"commit":{"kind":"string","value":"bb11252c277d40c8ec8c579100c04a6a676accfe"},"old_file":{"kind":"string","value":"tests/run.py"},"new_file":{"kind":"string","value":"tests/run.py"},"old_contents":{"kind":"string","value":"from os import path\nimport sys\n\nfrom colour_runner.django_runner import ColourRunnerMixin\nfrom django.conf import settings\nfrom django.test.runner import DiscoverRunner\n\n\nsettings.configure(\n    INSTALLED_APPS=(\n        # Put contenttypes before auth to work around test issue.\n        # See: https://code.djangoproject.com/ticket/10827#comment:12\n        'django.contrib.contenttypes',\n        'django.contrib.auth',\n        'django.contrib.sessions',\n        'django.contrib.admin',\n        'django-admin-sso',\n        'django-crispy-forms',\n\n        'incuna_auth',\n    ),\n    PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',),\n    AUTH_USER_MODEL='tests.User',\n    ROOT_URLCONF='incuna_auth.urls',\n    REST_FRAMEWORK={\n        'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAuthenticated',),\n        'DEFAULT_RENDERER_CLASSES': ('rest_framework.renderers.JSONRenderer',),\n    },\n    TEST_DISCOVER_TOP_LEVEL=path.dirname(path.dirname(__file__)),\n)\n\n\nclass Runner(ColourRunnerMixin, DiscoverRunner):\n    pass\n\ntest_runner = Runner(verbosity=1)\nfailures = test_runner.run_tests(['tests'])\nif failures:\n    sys.exit(1)\n"},"new_contents":{"kind":"string","value":"from os import path\nimport sys\n\nfrom colour_runner.django_runner import ColourRunnerMixin\nfrom django.conf import settings\n\n\nsettings.configure(\n    INSTALLED_APPS=(\n        # Put contenttypes before auth to work around test issue.\n        # See: https://code.djangoproject.com/ticket/10827#comment:12\n        'django.contrib.contenttypes',\n        'django.contrib.auth',\n        'django.contrib.sessions',\n        'django.contrib.admin',\n        'django-admin-sso',\n        'django-crispy-forms',\n\n        'incuna_auth',\n    ),\n    PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',),\n    AUTH_USER_MODEL='tests.User',\n    ROOT_URLCONF='incuna_auth.urls',\n    REST_FRAMEWORK={\n        'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAuthenticated',),\n        'DEFAULT_RENDERER_CLASSES': ('rest_framework.renderers.JSONRenderer',),\n    },\n    TEST_DISCOVER_TOP_LEVEL=path.dirname(path.dirname(__file__)),\n)\n\n\nfrom django.test.runner import DiscoverRunner\n\n\nclass Runner(ColourRunnerMixin, DiscoverRunner):\n    pass\n\ntest_runner = Runner(verbosity=1)\nfailures = test_runner.run_tests(['tests'])\nif failures:\n    sys.exit(1)\n"},"subject":{"kind":"string","value":"Reorder imports to dodge a settings problem."},"message":{"kind":"string","value":"Reorder imports to dodge a settings problem.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-2-clause"},"repos":{"kind":"string","value":"incuna/incuna-auth,ghickman/incuna-auth,incuna/incuna-auth,ghickman/incuna-auth"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom os import path\nimport sys\n\nfrom colour_runner.django_runner import ColourRunnerMixin\nfrom django.conf import settings\nfrom django.test.runner import DiscoverRunner\n\n\nsettings.configure(\n    INSTALLED_APPS=(\n        # Put contenttypes before auth to work around test issue.\n        # See: https://code.djangoproject.com/ticket/10827#comment:12\n        'django.contrib.contenttypes',\n        'django.contrib.auth',\n        'django.contrib.sessions',\n        'django.contrib.admin',\n        'django-admin-sso',\n        'django-crispy-forms',\n\n        'incuna_auth',\n    ),\n    PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',),\n    AUTH_USER_MODEL='tests.User',\n    ROOT_URLCONF='incuna_auth.urls',\n    REST_FRAMEWORK={\n        'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAuthenticated',),\n        'DEFAULT_RENDERER_CLASSES': ('rest_framework.renderers.JSONRenderer',),\n    },\n    TEST_DISCOVER_TOP_LEVEL=path.dirname(path.dirname(__file__)),\n)\n\n\nclass Runner(ColourRunnerMixin, DiscoverRunner):\n    pass\n\ntest_runner = Runner(verbosity=1)\nfailures = test_runner.run_tests(['tests'])\nif failures:\n    sys.exit(1)\n\n## Instruction:\nReorder imports to dodge a settings problem.\n\n## Code After:\nfrom os import path\nimport sys\n\nfrom colour_runner.django_runner import ColourRunnerMixin\nfrom django.conf import settings\n\n\nsettings.configure(\n    INSTALLED_APPS=(\n        # Put contenttypes before auth to work around test issue.\n        # See: https://code.djangoproject.com/ticket/10827#comment:12\n        'django.contrib.contenttypes',\n        'django.contrib.auth',\n        'django.contrib.sessions',\n        'django.contrib.admin',\n        'django-admin-sso',\n        'django-crispy-forms',\n\n        'incuna_auth',\n    ),\n    PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',),\n    AUTH_USER_MODEL='tests.User',\n    ROOT_URLCONF='incuna_auth.urls',\n    REST_FRAMEWORK={\n        'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAuthenticated',),\n        'DEFAULT_RENDERER_CLASSES': ('rest_framework.renderers.JSONRenderer',),\n    },\n    TEST_DISCOVER_TOP_LEVEL=path.dirname(path.dirname(__file__)),\n)\n\n\nfrom django.test.runner import DiscoverRunner\n\n\nclass Runner(ColourRunnerMixin, DiscoverRunner):\n    pass\n\ntest_runner = Runner(verbosity=1)\nfailures = test_runner.run_tests(['tests'])\nif failures:\n    sys.exit(1)\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\nfrom colour_runner.django_runner import ColourRunnerMixin\nfrom django.conf import settings\n\n\nsettings.configure(\n\n\n// ... modified code ... \n\n\n)\n\n\nfrom django.test.runner import DiscoverRunner\n\n\nclass Runner(ColourRunnerMixin, DiscoverRunner):\n    pass\n\n\n\n// ... rest of the code ..."}}},{"rowIdx":3761,"cells":{"commit":{"kind":"string","value":"e751cb4f4805aed079fc025b9b1655f30cf5e69a"},"old_file":{"kind":"string","value":"watson/html/entities.py"},"new_file":{"kind":"string","value":"watson/html/entities.py"},"old_contents":{"kind":"string","value":"import re\nfrom html import _escape_map_full\nfrom html.entities import codepoint2name\n\nhtml_entities = {_ord: '&{0};'.format(value)\n                 for _ord, value in codepoint2name.items()}\nhtml_entities.update(_escape_map_full)\nentities_html = {value: _ord for _ord, value in html_entities.items()}\n\n\ndef encode(string):\n    \"\"\"Encodes html entities.\n\n    This is a little more full featured than html.escape, as it will\n    replace all charactes from codepoint2name.\n\n    Returns:\n        string with replaced html entities.\n    \"\"\"\n    return string.translate(html_entities)\n\n\ndef decode(string):\n    \"\"\"Decodes html entities.\n\n    Returns:\n        string with html entities decoded.\n    \"\"\"\n    return (\n        re.sub(\n            '&(?:[#a-z][a-z0-9]+);',\n            lambda m: chr(entities_html[m.group()]),\n            string)\n    )\n"},"new_contents":{"kind":"string","value":"import re\nfrom html.entities import codepoint2name\n\ntry:\n    from html import _escape_map_full\nexcept:\n    # taken from the 3.3 standard lib, as it's removed in 3.4\n    _escape_map_full = {ord('&'): '&amp;', ord('<'): '&lt;', ord('>'): '&gt;',\n                        ord('\"'): '&quot;', ord('\\''): '&#x27;'}\n\nhtml_entities = {_ord: '&{0};'.format(value)\n                 for _ord, value in codepoint2name.items()}\nhtml_entities.update(_escape_map_full)\nentities_html = {value: _ord for _ord, value in html_entities.items()}\n\n\ndef encode(string):\n    \"\"\"Encodes html entities.\n\n    This is a little more full featured than html.escape, as it will\n    replace all charactes from codepoint2name.\n\n    Returns:\n        string with replaced html entities.\n    \"\"\"\n    return string.translate(html_entities)\n\n\ndef decode(string):\n    \"\"\"Decodes html entities.\n\n    Returns:\n        string with html entities decoded.\n    \"\"\"\n    return (\n        re.sub(\n            '&(?:[#a-z][a-z0-9]+);',\n            lambda m: chr(entities_html[m.group()]),\n            string)\n    )\n"},"subject":{"kind":"string","value":"Fix for Python 3.4 html module not containing _escape_map_full"},"message":{"kind":"string","value":"Fix for Python 3.4 html module not containing _escape_map_full\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"watsonpy/watson-html"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport re\nfrom html import _escape_map_full\nfrom html.entities import codepoint2name\n\nhtml_entities = {_ord: '&{0};'.format(value)\n                 for _ord, value in codepoint2name.items()}\nhtml_entities.update(_escape_map_full)\nentities_html = {value: _ord for _ord, value in html_entities.items()}\n\n\ndef encode(string):\n    \"\"\"Encodes html entities.\n\n    This is a little more full featured than html.escape, as it will\n    replace all charactes from codepoint2name.\n\n    Returns:\n        string with replaced html entities.\n    \"\"\"\n    return string.translate(html_entities)\n\n\ndef decode(string):\n    \"\"\"Decodes html entities.\n\n    Returns:\n        string with html entities decoded.\n    \"\"\"\n    return (\n        re.sub(\n            '&(?:[#a-z][a-z0-9]+);',\n            lambda m: chr(entities_html[m.group()]),\n            string)\n    )\n\n## Instruction:\nFix for Python 3.4 html module not containing _escape_map_full\n\n## Code After:\nimport re\nfrom html.entities import codepoint2name\n\ntry:\n    from html import _escape_map_full\nexcept:\n    # taken from the 3.3 standard lib, as it's removed in 3.4\n    _escape_map_full = {ord('&'): '&amp;', ord('<'): '&lt;', ord('>'): '&gt;',\n                        ord('\"'): '&quot;', ord('\\''): '&#x27;'}\n\nhtml_entities = {_ord: '&{0};'.format(value)\n                 for _ord, value in codepoint2name.items()}\nhtml_entities.update(_escape_map_full)\nentities_html = {value: _ord for _ord, value in html_entities.items()}\n\n\ndef encode(string):\n    \"\"\"Encodes html entities.\n\n    This is a little more full featured than html.escape, as it will\n    replace all charactes from codepoint2name.\n\n    Returns:\n        string with replaced html entities.\n    \"\"\"\n    return string.translate(html_entities)\n\n\ndef decode(string):\n    \"\"\"Decodes html entities.\n\n    Returns:\n        string with html entities decoded.\n    \"\"\"\n    return (\n        re.sub(\n            '&(?:[#a-z][a-z0-9]+);',\n            lambda m: chr(entities_html[m.group()]),\n            string)\n    )\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport re\nfrom html.entities import codepoint2name\n\ntry:\n    from html import _escape_map_full\nexcept:\n    # taken from the 3.3 standard lib, as it's removed in 3.4\n    _escape_map_full = {ord('&'): '&amp;', ord('<'): '&lt;', ord('>'): '&gt;',\n                        ord('\"'): '&quot;', ord('\\''): '&#x27;'}\n\nhtml_entities = {_ord: '&{0};'.format(value)\n                 for _ord, value in codepoint2name.items()}\n\n\n// ... rest of the code ..."}}},{"rowIdx":3762,"cells":{"commit":{"kind":"string","value":"7cef87a81278c227db0cb07329d1b659dbd175b3"},"old_file":{"kind":"string","value":"mail_factory/models.py"},"new_file":{"kind":"string","value":"mail_factory/models.py"},"old_contents":{"kind":"string","value":"\nimport django\nfrom django.conf import settings\nfrom django.utils.importlib import import_module\nfrom django.utils.module_loading import module_has_submodule\n\n\ndef autodiscover():\n    \"\"\"Auto-discover INSTALLED_APPS mails.py modules.\"\"\"\n\n    for app in settings.INSTALLED_APPS:\n        module = '%s.mails' % app  # Attempt to import the app's 'mails' module\n        try:\n            import_module(module)\n        except:\n            # Decide whether to bubble up this error. If the app just\n            # doesn't have a mails module, we can ignore the error\n            # attempting to import it, otherwise we want it to bubble up.\n            app_module = import_module(app)\n            if module_has_submodule(app_module, 'mails'):\n                raise\n\n\n# If we're using Django >= 1.7, use the new app-loading mecanism which is way\n# better.\nif django.VERSION < (1, 7):\n    autodiscover()\n"},"new_contents":{"kind":"string","value":"\nimport django\nfrom django.conf import settings\nfrom django.utils.module_loading import module_has_submodule\n\ntry:\n    from importlib import import_module\nexcept ImportError:\n    # Compatibility for python-2.6\n    from django.utils.importlib import import_module\n\n\ndef autodiscover():\n    \"\"\"Auto-discover INSTALLED_APPS mails.py modules.\"\"\"\n\n    for app in settings.INSTALLED_APPS:\n        module = '%s.mails' % app  # Attempt to import the app's 'mails' module\n        try:\n            import_module(module)\n        except:\n            # Decide whether to bubble up this error. If the app just\n            # doesn't have a mails module, we can ignore the error\n            # attempting to import it, otherwise we want it to bubble up.\n            app_module = import_module(app)\n            if module_has_submodule(app_module, 'mails'):\n                raise\n\n\n# If we're using Django >= 1.7, use the new app-loading mecanism which is way\n# better.\nif django.VERSION < (1, 7):\n    autodiscover()\n"},"subject":{"kind":"string","value":"Use standard library instead of django.utils.importlib"},"message":{"kind":"string","value":"Use standard library instead of django.utils.importlib\n\n> django.utils.importlib is a compatibility library for when Python 2.6 was\n> still supported. It has been obsolete since Django 1.7, which dropped support\n> for Python 2.6, and is removed in 1.9 per the deprecation cycle.\n> Use Python's import_module function instead\n> — [1]\n\nReferences:\n[1] http://stackoverflow.com/a/32763639\n[2] https://docs.djangoproject.com/en/1.9/internals/deprecation/#deprecation-removed-in-1-9\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"novafloss/django-mail-factory,novafloss/django-mail-factory"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nimport django\nfrom django.conf import settings\nfrom django.utils.importlib import import_module\nfrom django.utils.module_loading import module_has_submodule\n\n\ndef autodiscover():\n    \"\"\"Auto-discover INSTALLED_APPS mails.py modules.\"\"\"\n\n    for app in settings.INSTALLED_APPS:\n        module = '%s.mails' % app  # Attempt to import the app's 'mails' module\n        try:\n            import_module(module)\n        except:\n            # Decide whether to bubble up this error. If the app just\n            # doesn't have a mails module, we can ignore the error\n            # attempting to import it, otherwise we want it to bubble up.\n            app_module = import_module(app)\n            if module_has_submodule(app_module, 'mails'):\n                raise\n\n\n# If we're using Django >= 1.7, use the new app-loading mecanism which is way\n# better.\nif django.VERSION < (1, 7):\n    autodiscover()\n\n## Instruction:\nUse standard library instead of django.utils.importlib\n\n> django.utils.importlib is a compatibility library for when Python 2.6 was\n> still supported. It has been obsolete since Django 1.7, which dropped support\n> for Python 2.6, and is removed in 1.9 per the deprecation cycle.\n> Use Python's import_module function instead\n> — [1]\n\nReferences:\n[1] http://stackoverflow.com/a/32763639\n[2] https://docs.djangoproject.com/en/1.9/internals/deprecation/#deprecation-removed-in-1-9\n\n## Code After:\n\nimport django\nfrom django.conf import settings\nfrom django.utils.module_loading import module_has_submodule\n\ntry:\n    from importlib import import_module\nexcept ImportError:\n    # Compatibility for python-2.6\n    from django.utils.importlib import import_module\n\n\ndef autodiscover():\n    \"\"\"Auto-discover INSTALLED_APPS mails.py modules.\"\"\"\n\n    for app in settings.INSTALLED_APPS:\n        module = '%s.mails' % app  # Attempt to import the app's 'mails' module\n        try:\n            import_module(module)\n        except:\n            # Decide whether to bubble up this error. If the app just\n            # doesn't have a mails module, we can ignore the error\n            # attempting to import it, otherwise we want it to bubble up.\n            app_module = import_module(app)\n            if module_has_submodule(app_module, 'mails'):\n                raise\n\n\n# If we're using Django >= 1.7, use the new app-loading mecanism which is way\n# better.\nif django.VERSION < (1, 7):\n    autodiscover()\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\nimport django\nfrom django.conf import settings\nfrom django.utils.module_loading import module_has_submodule\n\ntry:\n    from importlib import import_module\nexcept ImportError:\n    # Compatibility for python-2.6\n    from django.utils.importlib import import_module\n\n\ndef autodiscover():\n\n\n# ... rest of the code ..."}}},{"rowIdx":3763,"cells":{"commit":{"kind":"string","value":"10ae930f6f14c2840d0b87cbec17054b4cc318d2"},"old_file":{"kind":"string","value":"facebook_auth/models.py"},"new_file":{"kind":"string","value":"facebook_auth/models.py"},"old_contents":{"kind":"string","value":"from django.contrib.auth import models as auth_models\nfrom django.db import models\nimport facepy\nimport simplejson\n\nfrom facebook_auth import utils\n\nclass FacebookUser(auth_models.User):\n    user_id = models.BigIntegerField(unique=True)\n    access_token = models.TextField(blank=True, null=True)\n    app_friends = models.ManyToManyField('self')\n\n    @property\n    def graph(self):\n        return facepy.GraphAPI(self.access_token)\n\n    @property\n    def js_session(self):\n        return simplejson.dumps({\n            'access_token': self.access_token,\n            'uid': self.user_id\n        })\n\n    @property\n    def friends(self):\n        return utils.get_from_graph_api(self.graph, \"me/friends\")['data']\n\n    def update_app_friends(self):\n        friends = self.friends\n        friends_ids = [f['id'] for f in friends]\n        self.app_friends.clear()\n        self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids))\n"},"new_contents":{"kind":"string","value":"from uuid import uuid1\n\nfrom django.conf import settings\nfrom django.contrib.auth import models as auth_models\nfrom django.db import models\nimport facepy\nimport simplejson\n\nfrom facebook_auth import utils\n\nclass FacebookUser(auth_models.User):\n    user_id = models.BigIntegerField(unique=True)\n    access_token = models.TextField(blank=True, null=True)\n    app_friends = models.ManyToManyField('self')\n\n    @property\n    def graph(self):\n        return facepy.GraphAPI(self.access_token)\n\n    @property\n    def js_session(self):\n        return simplejson.dumps({\n            'access_token': self.access_token,\n            'uid': self.user_id\n        })\n\n    @property\n    def friends(self):\n        return utils.get_from_graph_api(self.graph, \"me/friends\")['data']\n\n    def update_app_friends(self):\n        friends = self.friends\n        friends_ids = [f['id'] for f in friends]\n        self.app_friends.clear()\n        self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids))\n\n\ndef get_auth_address(request, redirect_to, scope=''):\n    state = unicode(uuid1())\n    request.session['state'] = state\n    return 'https://www.facebook.com/dialog/oauth?client_id=%s&redirect_uri=%s&scope=%s&state=%s' % (\n        settings.FACEBOOK_APP_ID, redirect_to, scope, state\n    )\n"},"subject":{"kind":"string","value":"Add support for server side authentication."},"message":{"kind":"string","value":"Add support for server side authentication.\n\nChange-Id: Iff45fa00b5a5b389f998570827e33d9d232f5d1e\nReviewed-on: http://review.pozytywnie.pl:8080/5087\nReviewed-by: Tomasz Wysocki \nTested-by: Tomasz Wysocki \n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"pozytywnie/django-facebook-auth,pozytywnie/django-facebook-auth,jgoclawski/django-facebook-auth,jgoclawski/django-facebook-auth"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom django.contrib.auth import models as auth_models\nfrom django.db import models\nimport facepy\nimport simplejson\n\nfrom facebook_auth import utils\n\nclass FacebookUser(auth_models.User):\n    user_id = models.BigIntegerField(unique=True)\n    access_token = models.TextField(blank=True, null=True)\n    app_friends = models.ManyToManyField('self')\n\n    @property\n    def graph(self):\n        return facepy.GraphAPI(self.access_token)\n\n    @property\n    def js_session(self):\n        return simplejson.dumps({\n            'access_token': self.access_token,\n            'uid': self.user_id\n        })\n\n    @property\n    def friends(self):\n        return utils.get_from_graph_api(self.graph, \"me/friends\")['data']\n\n    def update_app_friends(self):\n        friends = self.friends\n        friends_ids = [f['id'] for f in friends]\n        self.app_friends.clear()\n        self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids))\n\n## Instruction:\nAdd support for server side authentication.\n\nChange-Id: Iff45fa00b5a5b389f998570827e33d9d232f5d1e\nReviewed-on: http://review.pozytywnie.pl:8080/5087\nReviewed-by: Tomasz Wysocki \nTested-by: Tomasz Wysocki \n\n## Code After:\nfrom uuid import uuid1\n\nfrom django.conf import settings\nfrom django.contrib.auth import models as auth_models\nfrom django.db import models\nimport facepy\nimport simplejson\n\nfrom facebook_auth import utils\n\nclass FacebookUser(auth_models.User):\n    user_id = models.BigIntegerField(unique=True)\n    access_token = models.TextField(blank=True, null=True)\n    app_friends = models.ManyToManyField('self')\n\n    @property\n    def graph(self):\n        return facepy.GraphAPI(self.access_token)\n\n    @property\n    def js_session(self):\n        return simplejson.dumps({\n            'access_token': self.access_token,\n            'uid': self.user_id\n        })\n\n    @property\n    def friends(self):\n        return utils.get_from_graph_api(self.graph, \"me/friends\")['data']\n\n    def update_app_friends(self):\n        friends = self.friends\n        friends_ids = [f['id'] for f in friends]\n        self.app_friends.clear()\n        self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids))\n\n\ndef get_auth_address(request, redirect_to, scope=''):\n    state = unicode(uuid1())\n    request.session['state'] = state\n    return 'https://www.facebook.com/dialog/oauth?client_id=%s&redirect_uri=%s&scope=%s&state=%s' % (\n        settings.FACEBOOK_APP_ID, redirect_to, scope, state\n    )\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nfrom uuid import uuid1\n\nfrom django.conf import settings\nfrom django.contrib.auth import models as auth_models\nfrom django.db import models\nimport facepy\n\n\n ... \n\n\n        friends_ids = [f['id'] for f in friends]\n        self.app_friends.clear()\n        self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids))\n\n\ndef get_auth_address(request, redirect_to, scope=''):\n    state = unicode(uuid1())\n    request.session['state'] = state\n    return 'https://www.facebook.com/dialog/oauth?client_id=%s&redirect_uri=%s&scope=%s&state=%s' % (\n        settings.FACEBOOK_APP_ID, redirect_to, scope, state\n    )\n\n\n ... "}}},{"rowIdx":3764,"cells":{"commit":{"kind":"string","value":"ccf3bcfc962a37d088507b542bd8e3af2ce515b6"},"old_file":{"kind":"string","value":"tests/test_with_testcase.py"},"new_file":{"kind":"string","value":"tests/test_with_testcase.py"},"old_contents":{"kind":"string","value":"import time\nimport unittest\n\nimport pytest\n\n\nclass TerribleTerribleWayToWriteTests(unittest.TestCase):\n    @pytest.fixture(autouse=True)\n    def setupBenchmark(self, benchmark):\n        self.benchmark = benchmark\n\n    def test_foo(self):\n        self.benchmark(time.sleep, 0.000001)\n\n\nclass TerribleTerribleWayToWritePatchTests(unittest.TestCase):\n    @pytest.fixture(autouse=True)\n    def setupBenchmark(self, benchmark_weave):\n        self.benchmark_weave = benchmark_weave\n\n    def test_foo2(self):\n        with self.benchmark_weave('time.sleep'):\n            time.sleep(0.0000001)\n"},"new_contents":{"kind":"string","value":"import time\nimport unittest\n\nimport pytest\n\n\nclass TerribleTerribleWayToWriteTests(unittest.TestCase):\n    @pytest.fixture(autouse=True)\n    def setupBenchmark(self, benchmark):\n        self.benchmark = benchmark\n\n    def test_foo(self):\n        self.benchmark(time.sleep, 0.000001)\n\n\nclass TerribleTerribleWayToWritePatchTests(unittest.TestCase):\n    @pytest.fixture(autouse=True)\n    def setupBenchmark(self, benchmark_weave):\n        self.benchmark_weave = benchmark_weave\n\n    def test_foo2(self):\n        self.benchmark_weave('time.sleep')\n        time.sleep(0.0000001)\n"},"subject":{"kind":"string","value":"Remove use of context manager."},"message":{"kind":"string","value":"Remove use of context manager.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-2-clause"},"repos":{"kind":"string","value":"thedrow/pytest-benchmark,aldanor/pytest-benchmark,SectorLabs/pytest-benchmark,ionelmc/pytest-benchmark"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport time\nimport unittest\n\nimport pytest\n\n\nclass TerribleTerribleWayToWriteTests(unittest.TestCase):\n    @pytest.fixture(autouse=True)\n    def setupBenchmark(self, benchmark):\n        self.benchmark = benchmark\n\n    def test_foo(self):\n        self.benchmark(time.sleep, 0.000001)\n\n\nclass TerribleTerribleWayToWritePatchTests(unittest.TestCase):\n    @pytest.fixture(autouse=True)\n    def setupBenchmark(self, benchmark_weave):\n        self.benchmark_weave = benchmark_weave\n\n    def test_foo2(self):\n        with self.benchmark_weave('time.sleep'):\n            time.sleep(0.0000001)\n\n## Instruction:\nRemove use of context manager.\n\n## Code After:\nimport time\nimport unittest\n\nimport pytest\n\n\nclass TerribleTerribleWayToWriteTests(unittest.TestCase):\n    @pytest.fixture(autouse=True)\n    def setupBenchmark(self, benchmark):\n        self.benchmark = benchmark\n\n    def test_foo(self):\n        self.benchmark(time.sleep, 0.000001)\n\n\nclass TerribleTerribleWayToWritePatchTests(unittest.TestCase):\n    @pytest.fixture(autouse=True)\n    def setupBenchmark(self, benchmark_weave):\n        self.benchmark_weave = benchmark_weave\n\n    def test_foo2(self):\n        self.benchmark_weave('time.sleep')\n        time.sleep(0.0000001)\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n        self.benchmark_weave = benchmark_weave\n\n    def test_foo2(self):\n        self.benchmark_weave('time.sleep')\n        time.sleep(0.0000001)\n\n\n# ... rest of the code ..."}}},{"rowIdx":3765,"cells":{"commit":{"kind":"string","value":"d042b90239d2c995b69ea5352d72373faa5a72cc"},"old_file":{"kind":"string","value":"java/src/main/java/com/google/appengine/tools/cloudstorage/oauth/AppIdentityAccessTokenProvider.java"},"new_file":{"kind":"string","value":"java/src/main/java/com/google/appengine/tools/cloudstorage/oauth/AppIdentityAccessTokenProvider.java"},"old_contents":{"kind":"string","value":"package com.google.appengine.tools.cloudstorage.oauth;\n\nimport com.google.appengine.api.appidentity.AppIdentityService;\nimport com.google.appengine.api.appidentity.AppIdentityService.GetAccessTokenResult;\nimport com.google.appengine.api.appidentity.AppIdentityServiceFactory;\nimport com.google.appengine.api.utils.SystemProperty;\n\nimport java.util.List;\n\n/**\n * Provider that uses the AppIdentityService for generating access tokens.\n */\nfinal class AppIdentityAccessTokenProvider implements AccessTokenProvider {\n  private final AppIdentityService appIdentityService;\n\n  public AppIdentityAccessTokenProvider() {\n    if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Development) {\n      throw new IllegalStateException(\n          \"The access token from AppIdentity won't work in the development environment.\");\n    }\n    this.appIdentityService = AppIdentityServiceFactory.getAppIdentityService();\n  }\n\n  @Override\n  public GetAccessTokenResult getNewAccessToken(List scopes) {\n    return appIdentityService.getAccessToken(scopes);\n  }\n}\n"},"new_contents":{"kind":"string","value":"package com.google.appengine.tools.cloudstorage.oauth;\n\nimport com.google.appengine.api.appidentity.AppIdentityService;\nimport com.google.appengine.api.appidentity.AppIdentityService.GetAccessTokenResult;\nimport com.google.appengine.api.appidentity.AppIdentityServiceFactory;\nimport com.google.appengine.api.utils.SystemProperty;\n\nimport java.util.List;\n\n/**\n * Provider that uses the AppIdentityService for generating access tokens.\n */\nfinal class AppIdentityAccessTokenProvider implements AccessTokenProvider {\n  private final AppIdentityService appIdentityService;\n\n  public AppIdentityAccessTokenProvider() {\n    this.appIdentityService = AppIdentityServiceFactory.getAppIdentityService();\n  }\n\n  @Override\n  public GetAccessTokenResult getNewAccessToken(List scopes) {\n    if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Development) {\n      throw new IllegalStateException(\n          \"The access token from AppIdentity won't work in the development environment.\");\n    }\n    return appIdentityService.getAccessToken(scopes);\n  }\n}\n"},"subject":{"kind":"string","value":"Move check for development environment into the getNewAccessToken method."},"message":{"kind":"string","value":"Move check for development environment into the getNewAccessToken method.\n\nRevision created by MOE tool push_codebase.\nMOE_MIGRATION=7102\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"GoogleCloudPlatform/appengine-gcs-client,GoogleCloudPlatform/appengine-gcs-client,aozarov/appengine-gcs-client,aozarov/appengine-gcs-client,GoogleCloudPlatform/appengine-gcs-client,aozarov/appengine-gcs-client"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage com.google.appengine.tools.cloudstorage.oauth;\n\nimport com.google.appengine.api.appidentity.AppIdentityService;\nimport com.google.appengine.api.appidentity.AppIdentityService.GetAccessTokenResult;\nimport com.google.appengine.api.appidentity.AppIdentityServiceFactory;\nimport com.google.appengine.api.utils.SystemProperty;\n\nimport java.util.List;\n\n/**\n * Provider that uses the AppIdentityService for generating access tokens.\n */\nfinal class AppIdentityAccessTokenProvider implements AccessTokenProvider {\n  private final AppIdentityService appIdentityService;\n\n  public AppIdentityAccessTokenProvider() {\n    if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Development) {\n      throw new IllegalStateException(\n          \"The access token from AppIdentity won't work in the development environment.\");\n    }\n    this.appIdentityService = AppIdentityServiceFactory.getAppIdentityService();\n  }\n\n  @Override\n  public GetAccessTokenResult getNewAccessToken(List scopes) {\n    return appIdentityService.getAccessToken(scopes);\n  }\n}\n\n## Instruction:\nMove check for development environment into the getNewAccessToken method.\n\nRevision created by MOE tool push_codebase.\nMOE_MIGRATION=7102\n\n## Code After:\npackage com.google.appengine.tools.cloudstorage.oauth;\n\nimport com.google.appengine.api.appidentity.AppIdentityService;\nimport com.google.appengine.api.appidentity.AppIdentityService.GetAccessTokenResult;\nimport com.google.appengine.api.appidentity.AppIdentityServiceFactory;\nimport com.google.appengine.api.utils.SystemProperty;\n\nimport java.util.List;\n\n/**\n * Provider that uses the AppIdentityService for generating access tokens.\n */\nfinal class AppIdentityAccessTokenProvider implements AccessTokenProvider {\n  private final AppIdentityService appIdentityService;\n\n  public AppIdentityAccessTokenProvider() {\n    this.appIdentityService = AppIdentityServiceFactory.getAppIdentityService();\n  }\n\n  @Override\n  public GetAccessTokenResult getNewAccessToken(List scopes) {\n    if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Development) {\n      throw new IllegalStateException(\n          \"The access token from AppIdentity won't work in the development environment.\");\n    }\n    return appIdentityService.getAccessToken(scopes);\n  }\n}\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n  private final AppIdentityService appIdentityService;\n\n  public AppIdentityAccessTokenProvider() {\n    this.appIdentityService = AppIdentityServiceFactory.getAppIdentityService();\n  }\n\n  @Override\n  public GetAccessTokenResult getNewAccessToken(List scopes) {\n    if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Development) {\n      throw new IllegalStateException(\n          \"The access token from AppIdentity won't work in the development environment.\");\n    }\n    return appIdentityService.getAccessToken(scopes);\n  }\n}\n\n\n ... "}}},{"rowIdx":3766,"cells":{"commit":{"kind":"string","value":"824c8cd3eb563de60ddf13fac1f7ca1341aa01f1"},"old_file":{"kind":"string","value":"astral/api/tests/test_streams.py"},"new_file":{"kind":"string","value":"astral/api/tests/test_streams.py"},"old_contents":{"kind":"string","value":"from tornado.httpclient import HTTPRequest\nfrom nose.tools import eq_, ok_\nimport json\nimport faker\n\nfrom astral.api.tests import BaseTest\nfrom astral.models import Stream\nfrom astral.models.tests.factories import StreamFactory\n\nclass StreamsHandlerTest(BaseTest):\n    def test_get_streams(self):\n        [StreamFactory() for _ in range(3)]\n        response = self.fetch('/streams')\n        eq_(response.code, 200)\n        result = json.loads(response.body)\n        ok_('streams' in result)\n        for stream in result['streams']:\n            ok_(Stream.get_by(name=stream['name']))\n\n    def test_create_stream(self):\n        data = {'name': faker.lorem.sentence()}\n        eq_(Stream.get_by(name=data['name']), None)\n        self.http_client.fetch(HTTPRequest(\n            self.get_url('/streams'), 'POST', body=json.dumps(data)), self.stop)\n        response = self.wait()\n        eq_(response.code, 200)\n        ok_(Stream.get_by(name=data['name']))\n"},"new_contents":{"kind":"string","value":"from tornado.httpclient import HTTPRequest\nfrom nose.tools import eq_, ok_\nimport json\nimport faker\n\nfrom astral.api.tests import BaseTest\nfrom astral.models import Stream\nfrom astral.models.tests.factories import StreamFactory\n\nclass StreamsHandlerTest(BaseTest):\n    def test_get_streams(self):\n        [StreamFactory() for _ in range(3)]\n        response = self.fetch('/streams')\n        eq_(response.code, 200)\n        result = json.loads(response.body)\n        ok_('streams' in result)\n        for stream in result['streams']:\n            ok_(Stream.get_by(name=stream['name']))\n\n    def test_create_stream(self):\n        data = {'name': faker.lorem.sentence()}\n        eq_(Stream.get_by(name=data['name']), None)\n        self.http_client.fetch(HTTPRequest(\n            self.get_url('/streams'), 'POST', body=json.dumps(data),\n                follow_redirects=False), self.stop)\n        response = self.wait()\n        eq_(response.code, 302)\n        ok_(Stream.get_by(name=data['name']))\n"},"subject":{"kind":"string","value":"Update tests for new redirect-after-create stream."},"message":{"kind":"string","value":"Update tests for new redirect-after-create stream.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"peplin/astral"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom tornado.httpclient import HTTPRequest\nfrom nose.tools import eq_, ok_\nimport json\nimport faker\n\nfrom astral.api.tests import BaseTest\nfrom astral.models import Stream\nfrom astral.models.tests.factories import StreamFactory\n\nclass StreamsHandlerTest(BaseTest):\n    def test_get_streams(self):\n        [StreamFactory() for _ in range(3)]\n        response = self.fetch('/streams')\n        eq_(response.code, 200)\n        result = json.loads(response.body)\n        ok_('streams' in result)\n        for stream in result['streams']:\n            ok_(Stream.get_by(name=stream['name']))\n\n    def test_create_stream(self):\n        data = {'name': faker.lorem.sentence()}\n        eq_(Stream.get_by(name=data['name']), None)\n        self.http_client.fetch(HTTPRequest(\n            self.get_url('/streams'), 'POST', body=json.dumps(data)), self.stop)\n        response = self.wait()\n        eq_(response.code, 200)\n        ok_(Stream.get_by(name=data['name']))\n\n## Instruction:\nUpdate tests for new redirect-after-create stream.\n\n## Code After:\nfrom tornado.httpclient import HTTPRequest\nfrom nose.tools import eq_, ok_\nimport json\nimport faker\n\nfrom astral.api.tests import BaseTest\nfrom astral.models import Stream\nfrom astral.models.tests.factories import StreamFactory\n\nclass StreamsHandlerTest(BaseTest):\n    def test_get_streams(self):\n        [StreamFactory() for _ in range(3)]\n        response = self.fetch('/streams')\n        eq_(response.code, 200)\n        result = json.loads(response.body)\n        ok_('streams' in result)\n        for stream in result['streams']:\n            ok_(Stream.get_by(name=stream['name']))\n\n    def test_create_stream(self):\n        data = {'name': faker.lorem.sentence()}\n        eq_(Stream.get_by(name=data['name']), None)\n        self.http_client.fetch(HTTPRequest(\n            self.get_url('/streams'), 'POST', body=json.dumps(data),\n                follow_redirects=False), self.stop)\n        response = self.wait()\n        eq_(response.code, 302)\n        ok_(Stream.get_by(name=data['name']))\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n        data = {'name': faker.lorem.sentence()}\n        eq_(Stream.get_by(name=data['name']), None)\n        self.http_client.fetch(HTTPRequest(\n            self.get_url('/streams'), 'POST', body=json.dumps(data),\n                follow_redirects=False), self.stop)\n        response = self.wait()\n        eq_(response.code, 302)\n        ok_(Stream.get_by(name=data['name']))\n\n\n// ... rest of the code ..."}}},{"rowIdx":3767,"cells":{"commit":{"kind":"string","value":"70d009834123cb5a10788763fed3193017cc8162"},"old_file":{"kind":"string","value":"libpebble2/__init__.py"},"new_file":{"kind":"string","value":"libpebble2/__init__.py"},"old_contents":{"kind":"string","value":"__author__ = 'katharine'\n\nfrom .exceptions import *\n"},"new_contents":{"kind":"string","value":"__author__ = 'katharine'\n\nimport logging\n\nfrom .exceptions import *\n\nlogging.getLogger('libpebble2').addHandler(logging.NullHandler())\n"},"subject":{"kind":"string","value":"Add a default null logger per python recommendations."},"message":{"kind":"string","value":"Add a default null logger per python recommendations.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"pebble/libpebble2"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n__author__ = 'katharine'\n\nfrom .exceptions import *\n\n## Instruction:\nAdd a default null logger per python recommendations.\n\n## Code After:\n__author__ = 'katharine'\n\nimport logging\n\nfrom .exceptions import *\n\nlogging.getLogger('libpebble2').addHandler(logging.NullHandler())\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n__author__ = 'katharine'\n\nimport logging\n\nfrom .exceptions import *\n\nlogging.getLogger('libpebble2').addHandler(logging.NullHandler())\n\n\n# ... rest of the code ..."}}},{"rowIdx":3768,"cells":{"commit":{"kind":"string","value":"29562b08e436abc8465404e49d9193537721b717"},"old_file":{"kind":"string","value":"src/odin/contrib/money/fields.py"},"new_file":{"kind":"string","value":"src/odin/contrib/money/fields.py"},"old_contents":{"kind":"string","value":"from __future__ import absolute_import, print_function\nfrom odin import exceptions\nfrom odin.fields import ScalarField\nfrom odin.validators import EMPTY_VALUES\nfrom .datatypes import Amount\n\n__all__ = ('AmountField', )\n\n\nclass AmountField(ScalarField):\n    \"\"\"\n    Field that contains a monetary amount (with an optional currency).\n    \"\"\"\n    default_error_messages = {\n        'invalid': \"'%s' value must be a (amount, currency).\",\n        'invalid_currency': \"'%s' currency is not supported.\",\n    }\n    data_type_name = \"Amount\"\n\n    def __init__(self, allowed_currencies=None, **kwargs):\n        super(AmountField, self).__init__(**kwargs)\n        self.allowed_currencies = allowed_currencies\n\n    def to_python(self, value):\n        if value in EMPTY_VALUES:\n            return\n        if isinstance(value, Amount):\n            return value\n\n        try:\n            return Amount(value)\n        except (ValueError, TypeError):\n            msg = self.error_messages['invalid'] % value\n            raise exceptions.ValidationError(msg)\n\n    def validate(self, value):\n        super(AmountField, self).validate(value)\n        if self.allowed_currencies and value not in EMPTY_VALUES:\n            if value.currency not in self.allowed_currencies:\n                msg = self.error_messages['invalid_currency'] % str(value.currency)\n                raise exceptions.ValidationError(msg)\n\n    def prepare(self, value):\n        if value in EMPTY_VALUES:\n            return\n        return float(value), value.currency.code\n"},"new_contents":{"kind":"string","value":"from __future__ import absolute_import, print_function\nfrom odin import exceptions\nfrom odin.fields import ScalarField\nfrom odin.validators import EMPTY_VALUES\nfrom .datatypes import Amount\n\n__all__ = (\"AmountField\",)\n\n\nclass AmountField(ScalarField):\n    \"\"\"\n    Field that contains a monetary amount (with an optional currency).\n    \"\"\"\n\n    default_error_messages = {\n        \"invalid\": \"'%s' value must be a (amount, currency).\",\n        \"invalid_currency\": \"'%s' currency is not supported.\",\n    }\n    data_type_name = \"Amount\"\n\n    def __init__(self, allowed_currencies=None, **kwargs):\n        super(AmountField, self).__init__(**kwargs)\n        self.allowed_currencies = allowed_currencies\n\n    def to_python(self, value):\n        if value in EMPTY_VALUES:\n            return\n        if isinstance(value, Amount):\n            return value\n\n        try:\n            return Amount(value)\n        except (ValueError, TypeError):\n            msg = self.error_messages[\"invalid\"] % value\n            raise exceptions.ValidationError(msg)\n\n    def validate(self, value):\n        super(AmountField, self).validate(value)\n        if (\n            self.allowed_currencies\n            and (value not in EMPTY_VALUES)\n            and (value.currency not in self.allowed_currencies)\n        ):\n            msg = self.error_messages[\"invalid_currency\"] % str(value.currency)\n            raise exceptions.ValidationError(msg)\n\n    def prepare(self, value):\n        if value in EMPTY_VALUES:\n            return\n        return float(value), value.currency.code\n"},"subject":{"kind":"string","value":"Correct issue from Sonar (and black file)"},"message":{"kind":"string","value":"Correct issue from Sonar (and black file)\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"python-odin/odin"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom __future__ import absolute_import, print_function\nfrom odin import exceptions\nfrom odin.fields import ScalarField\nfrom odin.validators import EMPTY_VALUES\nfrom .datatypes import Amount\n\n__all__ = ('AmountField', )\n\n\nclass AmountField(ScalarField):\n    \"\"\"\n    Field that contains a monetary amount (with an optional currency).\n    \"\"\"\n    default_error_messages = {\n        'invalid': \"'%s' value must be a (amount, currency).\",\n        'invalid_currency': \"'%s' currency is not supported.\",\n    }\n    data_type_name = \"Amount\"\n\n    def __init__(self, allowed_currencies=None, **kwargs):\n        super(AmountField, self).__init__(**kwargs)\n        self.allowed_currencies = allowed_currencies\n\n    def to_python(self, value):\n        if value in EMPTY_VALUES:\n            return\n        if isinstance(value, Amount):\n            return value\n\n        try:\n            return Amount(value)\n        except (ValueError, TypeError):\n            msg = self.error_messages['invalid'] % value\n            raise exceptions.ValidationError(msg)\n\n    def validate(self, value):\n        super(AmountField, self).validate(value)\n        if self.allowed_currencies and value not in EMPTY_VALUES:\n            if value.currency not in self.allowed_currencies:\n                msg = self.error_messages['invalid_currency'] % str(value.currency)\n                raise exceptions.ValidationError(msg)\n\n    def prepare(self, value):\n        if value in EMPTY_VALUES:\n            return\n        return float(value), value.currency.code\n\n## Instruction:\nCorrect issue from Sonar (and black file)\n\n## Code After:\nfrom __future__ import absolute_import, print_function\nfrom odin import exceptions\nfrom odin.fields import ScalarField\nfrom odin.validators import EMPTY_VALUES\nfrom .datatypes import Amount\n\n__all__ = (\"AmountField\",)\n\n\nclass AmountField(ScalarField):\n    \"\"\"\n    Field that contains a monetary amount (with an optional currency).\n    \"\"\"\n\n    default_error_messages = {\n        \"invalid\": \"'%s' value must be a (amount, currency).\",\n        \"invalid_currency\": \"'%s' currency is not supported.\",\n    }\n    data_type_name = \"Amount\"\n\n    def __init__(self, allowed_currencies=None, **kwargs):\n        super(AmountField, self).__init__(**kwargs)\n        self.allowed_currencies = allowed_currencies\n\n    def to_python(self, value):\n        if value in EMPTY_VALUES:\n            return\n        if isinstance(value, Amount):\n            return value\n\n        try:\n            return Amount(value)\n        except (ValueError, TypeError):\n            msg = self.error_messages[\"invalid\"] % value\n            raise exceptions.ValidationError(msg)\n\n    def validate(self, value):\n        super(AmountField, self).validate(value)\n        if (\n            self.allowed_currencies\n            and (value not in EMPTY_VALUES)\n            and (value.currency not in self.allowed_currencies)\n        ):\n            msg = self.error_messages[\"invalid_currency\"] % str(value.currency)\n            raise exceptions.ValidationError(msg)\n\n    def prepare(self, value):\n        if value in EMPTY_VALUES:\n            return\n        return float(value), value.currency.code\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nfrom odin.validators import EMPTY_VALUES\nfrom .datatypes import Amount\n\n__all__ = (\"AmountField\",)\n\n\nclass AmountField(ScalarField):\n\n\n ... \n\n\n    \"\"\"\n    Field that contains a monetary amount (with an optional currency).\n    \"\"\"\n\n    default_error_messages = {\n        \"invalid\": \"'%s' value must be a (amount, currency).\",\n        \"invalid_currency\": \"'%s' currency is not supported.\",\n    }\n    data_type_name = \"Amount\"\n\n\n\n ... \n\n\n        try:\n            return Amount(value)\n        except (ValueError, TypeError):\n            msg = self.error_messages[\"invalid\"] % value\n            raise exceptions.ValidationError(msg)\n\n    def validate(self, value):\n        super(AmountField, self).validate(value)\n        if (\n            self.allowed_currencies\n            and (value not in EMPTY_VALUES)\n            and (value.currency not in self.allowed_currencies)\n        ):\n            msg = self.error_messages[\"invalid_currency\"] % str(value.currency)\n            raise exceptions.ValidationError(msg)\n\n    def prepare(self, value):\n        if value in EMPTY_VALUES:\n\n\n ... "}}},{"rowIdx":3769,"cells":{"commit":{"kind":"string","value":"bf24b8dab13c3779514a00d61c3ea440704b1cbf"},"old_file":{"kind":"string","value":"setup.py"},"new_file":{"kind":"string","value":"setup.py"},"old_contents":{"kind":"string","value":"try:\n    from setuptools import setup\n    from setuptools import find_packages\n    packages = find_packages()\nexcept ImportError:\n    from distutils.core import setup\n    import os\n    packages = [x.strip('./').replace('/','.') for x in os.popen('find -name \"__init__.py\" | xargs -n1 dirname').read().strip().split('\\n')]\n\nif bytes is str:\n    raise Exception(\"This module is designed for python 3 only. Please install an older version to use python 2.\")\n\nsetup(\n    name='cle',\n    description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.',\n    version='8.20.1.7',\n    python_requires='>=3.5',\n    packages=packages,\n    install_requires=[\n        'pyelftools>=0.25',\n        'cffi',\n        'pyvex==8.20.1.7',\n        'pefile',\n        'sortedcontainers>=2.0',\n    ],\n    extras_require={\n        \"minidump\": [\"minidump==0.0.10\"],\n        \"xbe\": [\"pyxbe==0.0.2\"],\n    }\n)\n"},"new_contents":{"kind":"string","value":"try:\n    from setuptools import setup\n    from setuptools import find_packages\n    packages = find_packages()\nexcept ImportError:\n    from distutils.core import setup\n    import os\n    packages = [x.strip('./').replace('/','.') for x in os.popen('find -name \"__init__.py\" | xargs -n1 dirname').read().strip().split('\\n')]\n\nif bytes is str:\n    raise Exception(\"This module is designed for python 3 only. Please install an older version to use python 2.\")\n\nsetup(\n    name='cle',\n    description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.',\n    version='8.20.1.7',\n    python_requires='>=3.5',\n    packages=packages,\n    install_requires=[\n        'pyelftools>=0.25',\n        'cffi',\n        'pyvex==8.20.1.7',\n        'pefile',\n        'sortedcontainers>=2.0',\n    ],\n    extras_require={\n        \"minidump\": [\"minidump==0.0.10\"],\n        \"xbe\": [\"pyxbe==0.0.2\"],\n        \"ar\": [\"arpy==1.1.1\"],\n    }\n)\n"},"subject":{"kind":"string","value":"Add optional dependency on arpy"},"message":{"kind":"string","value":"Add optional dependency on arpy\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-2-clause"},"repos":{"kind":"string","value":"angr/cle"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\ntry:\n    from setuptools import setup\n    from setuptools import find_packages\n    packages = find_packages()\nexcept ImportError:\n    from distutils.core import setup\n    import os\n    packages = [x.strip('./').replace('/','.') for x in os.popen('find -name \"__init__.py\" | xargs -n1 dirname').read().strip().split('\\n')]\n\nif bytes is str:\n    raise Exception(\"This module is designed for python 3 only. Please install an older version to use python 2.\")\n\nsetup(\n    name='cle',\n    description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.',\n    version='8.20.1.7',\n    python_requires='>=3.5',\n    packages=packages,\n    install_requires=[\n        'pyelftools>=0.25',\n        'cffi',\n        'pyvex==8.20.1.7',\n        'pefile',\n        'sortedcontainers>=2.0',\n    ],\n    extras_require={\n        \"minidump\": [\"minidump==0.0.10\"],\n        \"xbe\": [\"pyxbe==0.0.2\"],\n    }\n)\n\n## Instruction:\nAdd optional dependency on arpy\n\n## Code After:\ntry:\n    from setuptools import setup\n    from setuptools import find_packages\n    packages = find_packages()\nexcept ImportError:\n    from distutils.core import setup\n    import os\n    packages = [x.strip('./').replace('/','.') for x in os.popen('find -name \"__init__.py\" | xargs -n1 dirname').read().strip().split('\\n')]\n\nif bytes is str:\n    raise Exception(\"This module is designed for python 3 only. Please install an older version to use python 2.\")\n\nsetup(\n    name='cle',\n    description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.',\n    version='8.20.1.7',\n    python_requires='>=3.5',\n    packages=packages,\n    install_requires=[\n        'pyelftools>=0.25',\n        'cffi',\n        'pyvex==8.20.1.7',\n        'pefile',\n        'sortedcontainers>=2.0',\n    ],\n    extras_require={\n        \"minidump\": [\"minidump==0.0.10\"],\n        \"xbe\": [\"pyxbe==0.0.2\"],\n        \"ar\": [\"arpy==1.1.1\"],\n    }\n)\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n    extras_require={\n        \"minidump\": [\"minidump==0.0.10\"],\n        \"xbe\": [\"pyxbe==0.0.2\"],\n        \"ar\": [\"arpy==1.1.1\"],\n    }\n)\n\n\n// ... rest of the code ..."}}},{"rowIdx":3770,"cells":{"commit":{"kind":"string","value":"65bfede8d8739699e57ddd4f66049ac0374d1a8d"},"old_file":{"kind":"string","value":"ydf/instructions.py"},"new_file":{"kind":"string","value":"ydf/instructions.py"},"old_contents":{"kind":"string","value":"\n__all__ = []\n\n\nFROM = 'FROM'\nRUN = 'RUN'\nCMD = 'CMD'\nLABEL = 'LABEL'\nEXPOSE = 'EXPOSE'\nENV = 'ENV'\nADD = 'ADD'\nCOPY = 'COPY'\nENTRYPOINT = 'ENTRYPOINT'\nVOLUME = 'VOLUME'\nUSER = 'USER'\nWORKDIR = 'WORKDIR'\nARG = 'ARG'\nONBUILD = 'ONBUILD'\nSTOPSIGNAL = 'STOPSIGNAL'\nHEALTHCHECK = 'HEALTHCHECK'\nSHELL = 'SHELL'\n"},"new_contents":{"kind":"string","value":"\nimport collections\nimport functools\n\nfrom ydf import meta\n\n\n__all__ = []\n\n\nFROM = 'FROM'\nRUN = 'RUN'\nCMD = 'CMD'\nLABEL = 'LABEL'\nEXPOSE = 'EXPOSE'\nENV = 'ENV'\nADD = 'ADD'\nCOPY = 'COPY'\nENTRYPOINT = 'ENTRYPOINT'\nVOLUME = 'VOLUME'\nUSER = 'USER'\nWORKDIR = 'WORKDIR'\nARG = 'ARG'\nONBUILD = 'ONBUILD'\nSTOPSIGNAL = 'STOPSIGNAL'\nHEALTHCHECK = 'HEALTHCHECK'\nSHELL = 'SHELL'\n\n\ndef get_instructions():\n    \"\"\"\n    Get all functions within this module that are decorated with :func:`~ydf.instructions.instruction`.\n    \"\"\"\n    instructions = collections.defaultdict(dict)\n    for func in (value for key, value in globals().items() if meta.is_instruction(value)):\n        instructions[func.instruction_name][func.instruction_type] = func\n    return instructions\n\n\ndef instruction(name, type, desc):\n    \"\"\"\n    Decorate a function to indicate that it is responsible for converting a python type to a Docker\n    instruction.\n\n    :param name: Name of docker instruction\n    :param type: Type of python object it can convert\n    :param desc: Short description of expected format for the python object.\n    \"\"\"\n    def decorator(func):\n        @functools.wraps(func)\n        def wrapper(*args, **kwargs):\n            return '{} {}'.format(name, func(*args, **kwargs))\n        wrapper.instruction_name = name\n        wrapper.instruction_type = type\n        wrapper.instruction_desc = desc\n        return wrapper\n    return decorator\n"},"subject":{"kind":"string","value":"Add @instruction decorator to mark module level funcs as handlers."},"message":{"kind":"string","value":"Add @instruction decorator to mark module level funcs as handlers.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"ahawker/ydf"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\n__all__ = []\n\n\nFROM = 'FROM'\nRUN = 'RUN'\nCMD = 'CMD'\nLABEL = 'LABEL'\nEXPOSE = 'EXPOSE'\nENV = 'ENV'\nADD = 'ADD'\nCOPY = 'COPY'\nENTRYPOINT = 'ENTRYPOINT'\nVOLUME = 'VOLUME'\nUSER = 'USER'\nWORKDIR = 'WORKDIR'\nARG = 'ARG'\nONBUILD = 'ONBUILD'\nSTOPSIGNAL = 'STOPSIGNAL'\nHEALTHCHECK = 'HEALTHCHECK'\nSHELL = 'SHELL'\n\n## Instruction:\nAdd @instruction decorator to mark module level funcs as handlers.\n\n## Code After:\n\nimport collections\nimport functools\n\nfrom ydf import meta\n\n\n__all__ = []\n\n\nFROM = 'FROM'\nRUN = 'RUN'\nCMD = 'CMD'\nLABEL = 'LABEL'\nEXPOSE = 'EXPOSE'\nENV = 'ENV'\nADD = 'ADD'\nCOPY = 'COPY'\nENTRYPOINT = 'ENTRYPOINT'\nVOLUME = 'VOLUME'\nUSER = 'USER'\nWORKDIR = 'WORKDIR'\nARG = 'ARG'\nONBUILD = 'ONBUILD'\nSTOPSIGNAL = 'STOPSIGNAL'\nHEALTHCHECK = 'HEALTHCHECK'\nSHELL = 'SHELL'\n\n\ndef get_instructions():\n    \"\"\"\n    Get all functions within this module that are decorated with :func:`~ydf.instructions.instruction`.\n    \"\"\"\n    instructions = collections.defaultdict(dict)\n    for func in (value for key, value in globals().items() if meta.is_instruction(value)):\n        instructions[func.instruction_name][func.instruction_type] = func\n    return instructions\n\n\ndef instruction(name, type, desc):\n    \"\"\"\n    Decorate a function to indicate that it is responsible for converting a python type to a Docker\n    instruction.\n\n    :param name: Name of docker instruction\n    :param type: Type of python object it can convert\n    :param desc: Short description of expected format for the python object.\n    \"\"\"\n    def decorator(func):\n        @functools.wraps(func)\n        def wrapper(*args, **kwargs):\n            return '{} {}'.format(name, func(*args, **kwargs))\n        wrapper.instruction_name = name\n        wrapper.instruction_type = type\n        wrapper.instruction_desc = desc\n        return wrapper\n    return decorator\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\nimport collections\nimport functools\n\nfrom ydf import meta\n\n\n__all__ = []\n\n\n\n# ... modified code ... \n\n\nSTOPSIGNAL = 'STOPSIGNAL'\nHEALTHCHECK = 'HEALTHCHECK'\nSHELL = 'SHELL'\n\n\ndef get_instructions():\n    \"\"\"\n    Get all functions within this module that are decorated with :func:`~ydf.instructions.instruction`.\n    \"\"\"\n    instructions = collections.defaultdict(dict)\n    for func in (value for key, value in globals().items() if meta.is_instruction(value)):\n        instructions[func.instruction_name][func.instruction_type] = func\n    return instructions\n\n\ndef instruction(name, type, desc):\n    \"\"\"\n    Decorate a function to indicate that it is responsible for converting a python type to a Docker\n    instruction.\n\n    :param name: Name of docker instruction\n    :param type: Type of python object it can convert\n    :param desc: Short description of expected format for the python object.\n    \"\"\"\n    def decorator(func):\n        @functools.wraps(func)\n        def wrapper(*args, **kwargs):\n            return '{} {}'.format(name, func(*args, **kwargs))\n        wrapper.instruction_name = name\n        wrapper.instruction_type = type\n        wrapper.instruction_desc = desc\n        return wrapper\n    return decorator\n\n\n# ... rest of the code ..."}}},{"rowIdx":3771,"cells":{"commit":{"kind":"string","value":"cf7086620df23d8af15f7c9898edf39f64965549"},"old_file":{"kind":"string","value":"dbaas/workflow/steps/util/region_migration/check_instances_status.py"},"new_file":{"kind":"string","value":"dbaas/workflow/steps/util/region_migration/check_instances_status.py"},"old_contents":{"kind":"string","value":"import logging\nfrom util import full_stack\nfrom workflow.steps.util.base import BaseStep\nfrom workflow.exceptions.error_codes import DBAAS_0020\n\nLOG = logging.getLogger(__name__)\n\n\nclass DecreaseTTL(BaseStep):\n\n    def __unicode__(self):\n        return \"Checking instances status...\"\n\n    def do(self, workflow_dict):\n        try:\n            return True\n        except Exception:\n            traceback = full_stack()\n\n            workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)\n            workflow_dict['exceptions']['traceback'].append(traceback)\n\n            return False\n\n    def undo(self, workflow_dict):\n        LOG.info(\"Running undo...\")\n        try:\n            return True\n        except Exception:\n            traceback = full_stack()\n\n            workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)\n            workflow_dict['exceptions']['traceback'].append(traceback)\n\n            return False\n"},"new_contents":{"kind":"string","value":"import logging\nfrom util import full_stack\nfrom workflow.steps.util.base import BaseStep\nfrom workflow.exceptions.error_codes import DBAAS_0020\nfrom drivers.base import ConnectionError\n\nLOG = logging.getLogger(__name__)\n\n\nclass CheckInstancesStatus(BaseStep):\n\n    def __unicode__(self):\n        return \"Checking instances status...\"\n\n    def do(self, workflow_dict):\n        try:\n            databaseinfra = workflow_dict['databaseinfra']\n            driver = databaseinfra.get_driver()\n            for instance in driver.get_database_instances():\n                msg = \"Instance({}) is down\".format(instance)\n                exception_msg = Exception(msg)\n                try:\n                    status = driver.check_status(instance)\n                except ConnectionError:\n                    raise exception_msg\n                else:\n                    if status is False:\n                        raise exception_msg\n\n            return True\n        except Exception:\n            traceback = full_stack()\n\n            workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)\n            workflow_dict['exceptions']['traceback'].append(traceback)\n\n            return False\n\n    def undo(self, workflow_dict):\n        LOG.info(\"Running undo...\")\n        try:\n            return True\n        except Exception:\n            traceback = full_stack()\n\n            workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)\n            workflow_dict['exceptions']['traceback'].append(traceback)\n\n            return False\n"},"subject":{"kind":"string","value":"Add step to check instances status"},"message":{"kind":"string","value":"Add step to check instances status\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport logging\nfrom util import full_stack\nfrom workflow.steps.util.base import BaseStep\nfrom workflow.exceptions.error_codes import DBAAS_0020\n\nLOG = logging.getLogger(__name__)\n\n\nclass DecreaseTTL(BaseStep):\n\n    def __unicode__(self):\n        return \"Checking instances status...\"\n\n    def do(self, workflow_dict):\n        try:\n            return True\n        except Exception:\n            traceback = full_stack()\n\n            workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)\n            workflow_dict['exceptions']['traceback'].append(traceback)\n\n            return False\n\n    def undo(self, workflow_dict):\n        LOG.info(\"Running undo...\")\n        try:\n            return True\n        except Exception:\n            traceback = full_stack()\n\n            workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)\n            workflow_dict['exceptions']['traceback'].append(traceback)\n\n            return False\n\n## Instruction:\nAdd step to check instances status\n\n## Code After:\nimport logging\nfrom util import full_stack\nfrom workflow.steps.util.base import BaseStep\nfrom workflow.exceptions.error_codes import DBAAS_0020\nfrom drivers.base import ConnectionError\n\nLOG = logging.getLogger(__name__)\n\n\nclass CheckInstancesStatus(BaseStep):\n\n    def __unicode__(self):\n        return \"Checking instances status...\"\n\n    def do(self, workflow_dict):\n        try:\n            databaseinfra = workflow_dict['databaseinfra']\n            driver = databaseinfra.get_driver()\n            for instance in driver.get_database_instances():\n                msg = \"Instance({}) is down\".format(instance)\n                exception_msg = Exception(msg)\n                try:\n                    status = driver.check_status(instance)\n                except ConnectionError:\n                    raise exception_msg\n                else:\n                    if status is False:\n                        raise exception_msg\n\n            return True\n        except Exception:\n            traceback = full_stack()\n\n            workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)\n            workflow_dict['exceptions']['traceback'].append(traceback)\n\n            return False\n\n    def undo(self, workflow_dict):\n        LOG.info(\"Running undo...\")\n        try:\n            return True\n        except Exception:\n            traceback = full_stack()\n\n            workflow_dict['exceptions']['error_codes'].append(DBAAS_0020)\n            workflow_dict['exceptions']['traceback'].append(traceback)\n\n            return False\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nfrom util import full_stack\nfrom workflow.steps.util.base import BaseStep\nfrom workflow.exceptions.error_codes import DBAAS_0020\nfrom drivers.base import ConnectionError\n\nLOG = logging.getLogger(__name__)\n\n\nclass CheckInstancesStatus(BaseStep):\n\n    def __unicode__(self):\n        return \"Checking instances status...\"\n\n\n ... \n\n\n\n    def do(self, workflow_dict):\n        try:\n            databaseinfra = workflow_dict['databaseinfra']\n            driver = databaseinfra.get_driver()\n            for instance in driver.get_database_instances():\n                msg = \"Instance({}) is down\".format(instance)\n                exception_msg = Exception(msg)\n                try:\n                    status = driver.check_status(instance)\n                except ConnectionError:\n                    raise exception_msg\n                else:\n                    if status is False:\n                        raise exception_msg\n\n            return True\n        except Exception:\n            traceback = full_stack()\n\n\n ... "}}},{"rowIdx":3772,"cells":{"commit":{"kind":"string","value":"f651d51d97b75f12ba68f1cbfca914724136d121"},"old_file":{"kind":"string","value":"tools/halide_image.h"},"new_file":{"kind":"string","value":"tools/halide_image.h"},"old_contents":{"kind":"string","value":"\n/*\n\nThis allows code that relied on halide_image.h and Halide::Tools::Image to\ncontinue to work with newer versions of Halide where HalideBuffer.h and\nHalide::Buffer are the way to work with data.\n\nBesides mapping Halide::Tools::Image to Halide::Buffer, it defines\nUSING_HALIDE_BUFFER to allow code to conditionally compile for one or the\nother.\n\nIt is intended as a stop-gap measure until the code can be updated.\n\n*/\n\n#include \"HalideBuffer.h\"\n\nnamespace Halide {\nnamespace Tools {\n\n#define USING_HALIDE_BUFFER\n\ntemplate< typename T >\nusing Image = Buffer;\n\n}   // namespace Tools\n}   // mamespace Halide\n\n#endif  // #ifndef HALIDE_TOOLS_IMAGE_H\n"},"new_contents":{"kind":"string","value":"\n/** \\file\n *\n * This allows code that relied on halide_image.h and\n * Halide::Tools::Image to continue to work with newer versions of\n * Halide where HalideBuffer.h and Halide::Buffer are the way to work\n * with data.\n *\n * Besides mapping Halide::Tools::Image to Halide::Buffer, it defines\n * USING_HALIDE_BUFFER to allow code to conditionally compile for one\n * or the other.\n *\n * It is intended as a stop-gap measure until the code can be updated.\n */\n\n#include \"HalideBuffer.h\"\n\nnamespace Halide {\nnamespace Tools {\n\n#define USING_HALIDE_BUFFER\n\ntemplate< typename T >\nusing Image = Buffer;\n\n}   // namespace Tools\n}   // mamespace Halide\n\n#endif  // #ifndef HALIDE_TOOLS_IMAGE_H\n"},"subject":{"kind":"string","value":"Reformat comment into Doxygen comment for file."},"message":{"kind":"string","value":"Reformat comment into Doxygen comment for file.\n"},"lang":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"kgnk/Halide,kgnk/Halide,psuriana/Halide,psuriana/Halide,kgnk/Halide,kgnk/Halide,psuriana/Halide,psuriana/Halide,kgnk/Halide,kgnk/Halide,psuriana/Halide,psuriana/Halide,kgnk/Halide,psuriana/Halide,kgnk/Halide"},"config":{"kind":"string","value":"c"},"content":{"kind":"string","value":"## Code Before:\n\n/*\n\nThis allows code that relied on halide_image.h and Halide::Tools::Image to\ncontinue to work with newer versions of Halide where HalideBuffer.h and\nHalide::Buffer are the way to work with data.\n\nBesides mapping Halide::Tools::Image to Halide::Buffer, it defines\nUSING_HALIDE_BUFFER to allow code to conditionally compile for one or the\nother.\n\nIt is intended as a stop-gap measure until the code can be updated.\n\n*/\n\n#include \"HalideBuffer.h\"\n\nnamespace Halide {\nnamespace Tools {\n\n#define USING_HALIDE_BUFFER\n\ntemplate< typename T >\nusing Image = Buffer;\n\n}   // namespace Tools\n}   // mamespace Halide\n\n#endif  // #ifndef HALIDE_TOOLS_IMAGE_H\n\n## Instruction:\nReformat comment into Doxygen comment for file.\n\n## Code After:\n\n/** \\file\n *\n * This allows code that relied on halide_image.h and\n * Halide::Tools::Image to continue to work with newer versions of\n * Halide where HalideBuffer.h and Halide::Buffer are the way to work\n * with data.\n *\n * Besides mapping Halide::Tools::Image to Halide::Buffer, it defines\n * USING_HALIDE_BUFFER to allow code to conditionally compile for one\n * or the other.\n *\n * It is intended as a stop-gap measure until the code can be updated.\n */\n\n#include \"HalideBuffer.h\"\n\nnamespace Halide {\nnamespace Tools {\n\n#define USING_HALIDE_BUFFER\n\ntemplate< typename T >\nusing Image = Buffer;\n\n}   // namespace Tools\n}   // mamespace Halide\n\n#endif  // #ifndef HALIDE_TOOLS_IMAGE_H\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\n/** \\file\n *\n * This allows code that relied on halide_image.h and\n * Halide::Tools::Image to continue to work with newer versions of\n * Halide where HalideBuffer.h and Halide::Buffer are the way to work\n * with data.\n *\n * Besides mapping Halide::Tools::Image to Halide::Buffer, it defines\n * USING_HALIDE_BUFFER to allow code to conditionally compile for one\n * or the other.\n *\n * It is intended as a stop-gap measure until the code can be updated.\n */\n\n#include \"HalideBuffer.h\"\n\n\n\n# ... rest of the code ..."}}},{"rowIdx":3773,"cells":{"commit":{"kind":"string","value":"98b0eb3d492cb816db7ffa7ad062dde36a1feadf"},"old_file":{"kind":"string","value":"tests/unit/test_gettext.py"},"new_file":{"kind":"string","value":"tests/unit/test_gettext.py"},"old_contents":{"kind":"string","value":"\nimport logging\nimport unittest\n\nfrom openstack.common.gettextutils import _\n\n\nLOG = logging.getLogger(__name__)\n\n\nclass GettextTest(unittest.TestCase):\n\n    def test_gettext_does_not_blow_up(self):\n        LOG.info(_('test'))\n"},"new_contents":{"kind":"string","value":"\nimport logging\nimport testtools\n\nfrom openstack.common.gettextutils import _\n\n\nLOG = logging.getLogger(__name__)\n\n\nclass GettextTest(testtools.TestCase):\n\n    def test_gettext_does_not_blow_up(self):\n        LOG.info(_('test'))\n"},"subject":{"kind":"string","value":"Use testtools as test base class."},"message":{"kind":"string","value":"Use testtools as test base class.\n\nOn the path to testr migration, we need to replace the unittest base classes\nwith testtools.\nReplace tearDown with addCleanup, addCleanup is more resilient than tearDown.\nThe fixtures library has excellent support for managing and cleaning\ntempfiles. Use it.\nReplace skip_ with testtools.skipTest\n\nPart of blueprint grizzly-testtools.\n\nChange-Id: I45e11bbb1ff9b31f3278d3b016737dcb7850cd98\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"varunarya10/oslo.i18n,openstack/oslo.i18n"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nimport logging\nimport unittest\n\nfrom openstack.common.gettextutils import _\n\n\nLOG = logging.getLogger(__name__)\n\n\nclass GettextTest(unittest.TestCase):\n\n    def test_gettext_does_not_blow_up(self):\n        LOG.info(_('test'))\n\n## Instruction:\nUse testtools as test base class.\n\nOn the path to testr migration, we need to replace the unittest base classes\nwith testtools.\nReplace tearDown with addCleanup, addCleanup is more resilient than tearDown.\nThe fixtures library has excellent support for managing and cleaning\ntempfiles. Use it.\nReplace skip_ with testtools.skipTest\n\nPart of blueprint grizzly-testtools.\n\nChange-Id: I45e11bbb1ff9b31f3278d3b016737dcb7850cd98\n\n## Code After:\n\nimport logging\nimport testtools\n\nfrom openstack.common.gettextutils import _\n\n\nLOG = logging.getLogger(__name__)\n\n\nclass GettextTest(testtools.TestCase):\n\n    def test_gettext_does_not_blow_up(self):\n        LOG.info(_('test'))\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\nimport logging\nimport testtools\n\nfrom openstack.common.gettextutils import _\n\n\n\n// ... modified code ... \n\n\nLOG = logging.getLogger(__name__)\n\n\nclass GettextTest(testtools.TestCase):\n\n    def test_gettext_does_not_blow_up(self):\n        LOG.info(_('test'))\n\n\n// ... rest of the code ..."}}},{"rowIdx":3774,"cells":{"commit":{"kind":"string","value":"c3284516e8dc2c7fccfbf7e4bff46a66b4ad2f15"},"old_file":{"kind":"string","value":"cref/evaluation/__init__.py"},"new_file":{"kind":"string","value":"cref/evaluation/__init__.py"},"old_contents":{"kind":"string","value":"import os\nimport statistics\n\nfrom cref.structure import rmsd\nfrom cref.app.terminal import download_pdb, download_fasta, predict_fasta\n\n\npdbs = ['1zdd', '1gab']\nruns = 100\nfragment_sizes = range(5, 13, 2)\nnumber_of_clusters = range(4, 20, 1)\n\nfor pdb in pdbs:\n    output_dir = 'predictions/evaluation/{}/'.format(pdb)\n    try:\n        os.mkdir(output_dir)\n    except FileExistsError as e:\n        print(e)\n\n    for fragment_size in fragment_sizes:\n        fasta_file = output_dir + pdb + '.fasta'\n        download_fasta(pdb, fasta_file)\n        for n in number_of_clusters:\n            rmsds = []\n            for run in range(runs):\n                params = {\n                    'pdb': pdb,\n                    'fragment_size': fragment_size,\n                    'number_of_clusters': n\n                }\n\n\n                output_files = predict_fasta(fasta_file, output_dir, params)\n                predicted_structure = output_files[0]\n                filepath = os.path.join(\n                    os.path.dirname(predicted_structure),\n                    'experimental_structure.pdb'\n                )\n                experimental_structure = download_pdb(pdb, filepath)\n\n                rmsds.append(rmsd(predicted_structure, experimental_structure))\n        print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))\n"},"new_contents":{"kind":"string","value":"import os\nimport statistics\n\nfrom cref.structure import rmsd\nfrom cref.app.terminal import download_pdb, download_fasta, predict_fasta\n\n\npdbs = ['1zdd', '1gab']\nruns = 5\nfragment_sizes = range(5, 13, 2)\nnumber_of_clusters = range(4, 20, 1)\n\nfor pdb in pdbs:\n    output_dir = 'predictions/evaluation/{}/'.format(pdb)\n    try:\n        os.mkdir(output_dir)\n    except FileExistsError as e:\n        print(e)\n\n    for fragment_size in fragment_sizes:\n        fasta_file = output_dir + pdb + '.fasta'\n        download_fasta(pdb, fasta_file)\n        for n in number_of_clusters:\n            rmsds = []\n            for run in range(runs):\n                params = {\n                    'pdb': pdb,\n                    'fragment_size': fragment_size,\n                    'number_of_clusters': n\n                }\n\n\n                prediction_output = output_dir + str(run)\n                os.mkdir(prediction_output)\n                output_files = predict_fasta(fasta_file, prediction_output, params)\n                predicted_structure = output_files[0]\n                filepath = os.path.join(\n                    os.path.dirname(predicted_structure),\n                    'experimental_structure.pdb'\n                )\n                experimental_structure = download_pdb(pdb, filepath)\n\n                rmsds.append(rmsd(predicted_structure, experimental_structure))\n        print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))\n"},"subject":{"kind":"string","value":"Save output for every run"},"message":{"kind":"string","value":"Save output for every run\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"mchelem/cref2,mchelem/cref2,mchelem/cref2"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport os\nimport statistics\n\nfrom cref.structure import rmsd\nfrom cref.app.terminal import download_pdb, download_fasta, predict_fasta\n\n\npdbs = ['1zdd', '1gab']\nruns = 100\nfragment_sizes = range(5, 13, 2)\nnumber_of_clusters = range(4, 20, 1)\n\nfor pdb in pdbs:\n    output_dir = 'predictions/evaluation/{}/'.format(pdb)\n    try:\n        os.mkdir(output_dir)\n    except FileExistsError as e:\n        print(e)\n\n    for fragment_size in fragment_sizes:\n        fasta_file = output_dir + pdb + '.fasta'\n        download_fasta(pdb, fasta_file)\n        for n in number_of_clusters:\n            rmsds = []\n            for run in range(runs):\n                params = {\n                    'pdb': pdb,\n                    'fragment_size': fragment_size,\n                    'number_of_clusters': n\n                }\n\n\n                output_files = predict_fasta(fasta_file, output_dir, params)\n                predicted_structure = output_files[0]\n                filepath = os.path.join(\n                    os.path.dirname(predicted_structure),\n                    'experimental_structure.pdb'\n                )\n                experimental_structure = download_pdb(pdb, filepath)\n\n                rmsds.append(rmsd(predicted_structure, experimental_structure))\n        print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))\n\n## Instruction:\nSave output for every run\n\n## Code After:\nimport os\nimport statistics\n\nfrom cref.structure import rmsd\nfrom cref.app.terminal import download_pdb, download_fasta, predict_fasta\n\n\npdbs = ['1zdd', '1gab']\nruns = 5\nfragment_sizes = range(5, 13, 2)\nnumber_of_clusters = range(4, 20, 1)\n\nfor pdb in pdbs:\n    output_dir = 'predictions/evaluation/{}/'.format(pdb)\n    try:\n        os.mkdir(output_dir)\n    except FileExistsError as e:\n        print(e)\n\n    for fragment_size in fragment_sizes:\n        fasta_file = output_dir + pdb + '.fasta'\n        download_fasta(pdb, fasta_file)\n        for n in number_of_clusters:\n            rmsds = []\n            for run in range(runs):\n                params = {\n                    'pdb': pdb,\n                    'fragment_size': fragment_size,\n                    'number_of_clusters': n\n                }\n\n\n                prediction_output = output_dir + str(run)\n                os.mkdir(prediction_output)\n                output_files = predict_fasta(fasta_file, prediction_output, params)\n                predicted_structure = output_files[0]\n                filepath = os.path.join(\n                    os.path.dirname(predicted_structure),\n                    'experimental_structure.pdb'\n                )\n                experimental_structure = download_pdb(pdb, filepath)\n\n                rmsds.append(rmsd(predicted_structure, experimental_structure))\n        print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n\n\npdbs = ['1zdd', '1gab']\nruns = 5\nfragment_sizes = range(5, 13, 2)\nnumber_of_clusters = range(4, 20, 1)\n\n\n\n ... \n\n\n                }\n\n\n                prediction_output = output_dir + str(run)\n                os.mkdir(prediction_output)\n                output_files = predict_fasta(fasta_file, prediction_output, params)\n                predicted_structure = output_files[0]\n                filepath = os.path.join(\n                    os.path.dirname(predicted_structure),\n\n\n ... "}}},{"rowIdx":3775,"cells":{"commit":{"kind":"string","value":"4653b9f493d28a6beb88a97d3d396ec1c9288f53"},"old_file":{"kind":"string","value":"Sketches/JT/Jam/library/trunk/Kamaelia/Apps/Jam/Audio/Mixer.py"},"new_file":{"kind":"string","value":"Sketches/JT/Jam/library/trunk/Kamaelia/Apps/Jam/Audio/Mixer.py"},"old_contents":{"kind":"string","value":"import numpy\nimport Axon\n\nclass MonoMixer(Axon.AdaptiveCommsComponent.AdaptiveCommsComponent):\n    channels = 8\n    bufferSize = 1024\n\n    def __init__(self, **argd):\n        super(MonoMixer, self).__init__(**argd)\n        for i in range(self.channels):\n            self.addInbox(\"in%i\" % i)\n\n    def main(self):\n        while 1:\n            output = numpy.zeros(self.bufferSize)\n            for i in range(self.channels):\n                if self.dataReady(\"in%i\" % i):\n                    output += self.recv(\"in%i\" % i)\n            output /= self.channels\n            self.send(output, \"outbox\")\n            if not self.anyReady():\n                self.pause()\n            yield 1\n\n"},"new_contents":{"kind":"string","value":"import numpy\nimport Axon\nimport time\nfrom Axon.SchedulingComponent import SchedulingAdaptiveCommsComponent\n\nclass MonoMixer(SchedulingAdaptiveCommsComponent):\n    channels = 8\n    bufferSize = 1024\n    sampleRate = 44100\n\n    def __init__(self, **argd):\n        super(MonoMixer, self).__init__(**argd)\n        for i in range(self.channels):\n            self.addInbox(\"in%i\" % i)\n        self.period = float(self.bufferSize)/self.sampleRate\n        self.lastSendTime = time.time()\n        self.scheduleAbs(\"Send\", self.lastSendTime + self.period)\n\n\n    def main(self):\n        while 1:\n            if self.dataReady(\"event\"):\n                output = numpy.zeros(self.bufferSize)\n                self.recv(\"event\")\n                for i in range(self.channels):\n                    if self.dataReady(\"in%i\" % i):\n                        data = self.recv(\"in%i\" % i)\n                        if data != None:\n                            output += data\n                output /= self.channels\n                self.send(output, \"outbox\")\n                self.lastSendTime += self.period\n                self.scheduleAbs(\"Send\", self.lastSendTime + self.period)\n            else:\n                self.pause()\n"},"subject":{"kind":"string","value":"Change the mixer to be a scheduled component, and stop it from sending unnecessary messages when it has only received data from a few of it's inputs."},"message":{"kind":"string","value":"Change the mixer to be a scheduled component, and stop it from sending unnecessary messages when it has only received data from a few of it's inputs.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport numpy\nimport Axon\n\nclass MonoMixer(Axon.AdaptiveCommsComponent.AdaptiveCommsComponent):\n    channels = 8\n    bufferSize = 1024\n\n    def __init__(self, **argd):\n        super(MonoMixer, self).__init__(**argd)\n        for i in range(self.channels):\n            self.addInbox(\"in%i\" % i)\n\n    def main(self):\n        while 1:\n            output = numpy.zeros(self.bufferSize)\n            for i in range(self.channels):\n                if self.dataReady(\"in%i\" % i):\n                    output += self.recv(\"in%i\" % i)\n            output /= self.channels\n            self.send(output, \"outbox\")\n            if not self.anyReady():\n                self.pause()\n            yield 1\n\n\n## Instruction:\nChange the mixer to be a scheduled component, and stop it from sending unnecessary messages when it has only received data from a few of it's inputs.\n\n## Code After:\nimport numpy\nimport Axon\nimport time\nfrom Axon.SchedulingComponent import SchedulingAdaptiveCommsComponent\n\nclass MonoMixer(SchedulingAdaptiveCommsComponent):\n    channels = 8\n    bufferSize = 1024\n    sampleRate = 44100\n\n    def __init__(self, **argd):\n        super(MonoMixer, self).__init__(**argd)\n        for i in range(self.channels):\n            self.addInbox(\"in%i\" % i)\n        self.period = float(self.bufferSize)/self.sampleRate\n        self.lastSendTime = time.time()\n        self.scheduleAbs(\"Send\", self.lastSendTime + self.period)\n\n\n    def main(self):\n        while 1:\n            if self.dataReady(\"event\"):\n                output = numpy.zeros(self.bufferSize)\n                self.recv(\"event\")\n                for i in range(self.channels):\n                    if self.dataReady(\"in%i\" % i):\n                        data = self.recv(\"in%i\" % i)\n                        if data != None:\n                            output += data\n                output /= self.channels\n                self.send(output, \"outbox\")\n                self.lastSendTime += self.period\n                self.scheduleAbs(\"Send\", self.lastSendTime + self.period)\n            else:\n                self.pause()\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nimport numpy\nimport Axon\nimport time\nfrom Axon.SchedulingComponent import SchedulingAdaptiveCommsComponent\n\nclass MonoMixer(SchedulingAdaptiveCommsComponent):\n    channels = 8\n    bufferSize = 1024\n    sampleRate = 44100\n\n    def __init__(self, **argd):\n        super(MonoMixer, self).__init__(**argd)\n        for i in range(self.channels):\n            self.addInbox(\"in%i\" % i)\n        self.period = float(self.bufferSize)/self.sampleRate\n        self.lastSendTime = time.time()\n        self.scheduleAbs(\"Send\", self.lastSendTime + self.period)\n\n\n    def main(self):\n        while 1:\n            if self.dataReady(\"event\"):\n                output = numpy.zeros(self.bufferSize)\n                self.recv(\"event\")\n                for i in range(self.channels):\n                    if self.dataReady(\"in%i\" % i):\n                        data = self.recv(\"in%i\" % i)\n                        if data != None:\n                            output += data\n                output /= self.channels\n                self.send(output, \"outbox\")\n                self.lastSendTime += self.period\n                self.scheduleAbs(\"Send\", self.lastSendTime + self.period)\n            else:\n                self.pause()\n\n\n ... "}}},{"rowIdx":3776,"cells":{"commit":{"kind":"string","value":"b2155e167b559367bc24ba614f51360793951f12"},"old_file":{"kind":"string","value":"mythril/support/source_support.py"},"new_file":{"kind":"string","value":"mythril/support/source_support.py"},"old_contents":{"kind":"string","value":"from mythril.solidity.soliditycontract import SolidityContract\nfrom mythril.ethereum.evmcontract import EVMContract\n\n\nclass Source:\n    def __init__(\n        self, source_type=None, source_format=None, source_list=None, meta=None\n    ):\n        self.source_type = source_type\n        self.source_format = source_format\n        self.source_list = []\n        self.meta = meta\n\n    def get_source_from_contracts_list(self, contracts):\n        if contracts is None or len(contracts) == 0:\n            return\n        if isinstance(contracts[0], SolidityContract):\n            self.source_type = \"solidity-file\"\n            self.source_format = \"text\"\n            for contract in contracts:\n                self.source_list += [file.filename for file in contract.solidity_files]\n        elif isinstance(contracts[0], EVMContract):\n            self.source_format = \"evm-byzantium-bytecode\"\n            self.source_type = (\n                \"raw-bytecode\" if contracts[0].name == \"MAIN\" else \"ethereum-address\"\n            )\n            for contract in contracts:\n                self.source_list.append(contract.bytecode_hash)\n\n        else:\n            assert False  # Fail hard\n        self.meta = \"\"\n"},"new_contents":{"kind":"string","value":"from mythril.solidity.soliditycontract import SolidityContract\nfrom mythril.ethereum.evmcontract import EVMContract\n\n\nclass Source:\n    def __init__(\n        self, source_type=None, source_format=None, source_list=None, meta=None\n    ):\n        self.source_type = source_type\n        self.source_format = source_format\n        self.source_list = []\n        self.meta = meta\n\n    def get_source_from_contracts_list(self, contracts):\n        if contracts is None or len(contracts) == 0:\n            return\n        if isinstance(contracts[0], SolidityContract):\n            self.source_type = \"solidity-file\"\n            self.source_format = \"text\"\n            for contract in contracts:\n                self.source_list += [file.filename for file in contract.solidity_files]\n        elif isinstance(contracts[0], EVMContract):\n            self.source_format = \"evm-byzantium-bytecode\"\n            self.source_type = (\n                \"raw-bytecode\" if contracts[0].name == \"MAIN\" else \"ethereum-address\"\n            )\n            for contract in contracts:\n                self.source_list.append(contract.bytecode_hash)\n\n        else:\n            assert False  # Fail hard\n"},"subject":{"kind":"string","value":"Remove meta from source class (belongs to issue not source)"},"message":{"kind":"string","value":"Remove meta from source class (belongs to issue not source)\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom mythril.solidity.soliditycontract import SolidityContract\nfrom mythril.ethereum.evmcontract import EVMContract\n\n\nclass Source:\n    def __init__(\n        self, source_type=None, source_format=None, source_list=None, meta=None\n    ):\n        self.source_type = source_type\n        self.source_format = source_format\n        self.source_list = []\n        self.meta = meta\n\n    def get_source_from_contracts_list(self, contracts):\n        if contracts is None or len(contracts) == 0:\n            return\n        if isinstance(contracts[0], SolidityContract):\n            self.source_type = \"solidity-file\"\n            self.source_format = \"text\"\n            for contract in contracts:\n                self.source_list += [file.filename for file in contract.solidity_files]\n        elif isinstance(contracts[0], EVMContract):\n            self.source_format = \"evm-byzantium-bytecode\"\n            self.source_type = (\n                \"raw-bytecode\" if contracts[0].name == \"MAIN\" else \"ethereum-address\"\n            )\n            for contract in contracts:\n                self.source_list.append(contract.bytecode_hash)\n\n        else:\n            assert False  # Fail hard\n        self.meta = \"\"\n\n## Instruction:\nRemove meta from source class (belongs to issue not source)\n\n## Code After:\nfrom mythril.solidity.soliditycontract import SolidityContract\nfrom mythril.ethereum.evmcontract import EVMContract\n\n\nclass Source:\n    def __init__(\n        self, source_type=None, source_format=None, source_list=None, meta=None\n    ):\n        self.source_type = source_type\n        self.source_format = source_format\n        self.source_list = []\n        self.meta = meta\n\n    def get_source_from_contracts_list(self, contracts):\n        if contracts is None or len(contracts) == 0:\n            return\n        if isinstance(contracts[0], SolidityContract):\n            self.source_type = \"solidity-file\"\n            self.source_format = \"text\"\n            for contract in contracts:\n                self.source_list += [file.filename for file in contract.solidity_files]\n        elif isinstance(contracts[0], EVMContract):\n            self.source_format = \"evm-byzantium-bytecode\"\n            self.source_type = (\n                \"raw-bytecode\" if contracts[0].name == \"MAIN\" else \"ethereum-address\"\n            )\n            for contract in contracts:\n                self.source_list.append(contract.bytecode_hash)\n\n        else:\n            assert False  # Fail hard\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n\n        else:\n            assert False  # Fail hard\n\n\n ... "}}},{"rowIdx":3777,"cells":{"commit":{"kind":"string","value":"c057f4865052c893af9abcae2c2f37ec02d56118"},"old_file":{"kind":"string","value":"example_test_set/tests/test_set_root.py"},"new_file":{"kind":"string","value":"example_test_set/tests/test_set_root.py"},"old_contents":{"kind":"string","value":"import pytest\n\nclass Dut(object):\n    'fake a device under test'\n\n    _allowed = ('a', 'b', 'c')\n\n    def __init__(self, mode=None):\n        self._mode = mode\n\n    def get_mode(self):\n        return self._mode\n\n    def set_mode(self, val):\n        self._mode = val\n\n    def check_mode(self):\n        assert self._mode in self._allowed\n\n# fixtures\n@pytest.fixture\ndef dut(request):\n    return Dut('c')\n\n\n@pytest.yield_fixture(params=('a', 'b', 'c'))\ndef mode(request, dut):\n    orig_mode = dut.get_mode()\n    dut.set_mode(request.param)\n    yield dut\n    dut.set_mode(orig_mode)\n\n\n@pytest.yield_fixture(params=[1, 2, 3])\ndef inputs(request):\n    yield request.param\n\n\ndef test_modes(mode):\n    assert mode.check_mode()\n\n\ndef test_inputs(inputs):\n    assert inputs < 2\n\n\nclass TestBoth(object):\n    def test_m(self, mode, inputs):\n        assert mode.check_mode()\n        assert inputs < 2\n"},"new_contents":{"kind":"string","value":"import pytest\n\nclass Dut(object):\n    'fake a device under test'\n\n    _allowed = ('a', 'b', 'c')\n\n    def __init__(self, mode=None):\n        self._mode = mode\n\n    def get_mode(self):\n        return self._mode\n\n    def set_mode(self, val):\n        self._mode = val\n\n    def check_mode(self):\n        assert self._mode in self._allowed\n\n# fixtures\n@pytest.fixture\ndef dut(request):\n    return Dut('c')\n\n\n@pytest.yield_fixture(params=('a', 'b', 'c'))\ndef mode(request, dut):\n    orig_mode = dut.get_mode()\n    dut.set_mode(request.param)\n    yield dut\n    dut.set_mode(orig_mode)\n\n\n@pytest.yield_fixture(params=['dog', 'cat', 'mouse'])\ndef inputs(request):\n    yield request.param\n\n\ndef test_modes(mode):\n    assert mode.check_mode()\n\n\ndef test_inputs(inputs):\n    assert inputs < 2\n\n\nclass TestBoth(object):\n    def test_m(self, mode, inputs):\n        assert mode.check_mode()\n        assert inputs < 2\n"},"subject":{"kind":"string","value":"Tweak some example fixture ids"},"message":{"kind":"string","value":"Tweak some example fixture ids\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"tgoodlet/pytest-interactive"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport pytest\n\nclass Dut(object):\n    'fake a device under test'\n\n    _allowed = ('a', 'b', 'c')\n\n    def __init__(self, mode=None):\n        self._mode = mode\n\n    def get_mode(self):\n        return self._mode\n\n    def set_mode(self, val):\n        self._mode = val\n\n    def check_mode(self):\n        assert self._mode in self._allowed\n\n# fixtures\n@pytest.fixture\ndef dut(request):\n    return Dut('c')\n\n\n@pytest.yield_fixture(params=('a', 'b', 'c'))\ndef mode(request, dut):\n    orig_mode = dut.get_mode()\n    dut.set_mode(request.param)\n    yield dut\n    dut.set_mode(orig_mode)\n\n\n@pytest.yield_fixture(params=[1, 2, 3])\ndef inputs(request):\n    yield request.param\n\n\ndef test_modes(mode):\n    assert mode.check_mode()\n\n\ndef test_inputs(inputs):\n    assert inputs < 2\n\n\nclass TestBoth(object):\n    def test_m(self, mode, inputs):\n        assert mode.check_mode()\n        assert inputs < 2\n\n## Instruction:\nTweak some example fixture ids\n\n## Code After:\nimport pytest\n\nclass Dut(object):\n    'fake a device under test'\n\n    _allowed = ('a', 'b', 'c')\n\n    def __init__(self, mode=None):\n        self._mode = mode\n\n    def get_mode(self):\n        return self._mode\n\n    def set_mode(self, val):\n        self._mode = val\n\n    def check_mode(self):\n        assert self._mode in self._allowed\n\n# fixtures\n@pytest.fixture\ndef dut(request):\n    return Dut('c')\n\n\n@pytest.yield_fixture(params=('a', 'b', 'c'))\ndef mode(request, dut):\n    orig_mode = dut.get_mode()\n    dut.set_mode(request.param)\n    yield dut\n    dut.set_mode(orig_mode)\n\n\n@pytest.yield_fixture(params=['dog', 'cat', 'mouse'])\ndef inputs(request):\n    yield request.param\n\n\ndef test_modes(mode):\n    assert mode.check_mode()\n\n\ndef test_inputs(inputs):\n    assert inputs < 2\n\n\nclass TestBoth(object):\n    def test_m(self, mode, inputs):\n        assert mode.check_mode()\n        assert inputs < 2\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n    dut.set_mode(orig_mode)\n\n\n@pytest.yield_fixture(params=['dog', 'cat', 'mouse'])\ndef inputs(request):\n    yield request.param\n\n\n\n ... "}}},{"rowIdx":3778,"cells":{"commit":{"kind":"string","value":"5a885124432ccb33d180a8e73c753ceab54ffdf5"},"old_file":{"kind":"string","value":"src/Itemizers.py"},"new_file":{"kind":"string","value":"src/Itemizers.py"},"old_contents":{"kind":"string","value":"\nfrom Foundation import objc\nfrom Foundation import NSBundle\nfrom AppKit import NSImage\n\ndef iconForName(klass, name):\n\t\"\"\"Return the NSImage instance representing a `name` item.\"\"\"\n\timgpath = NSBundle.bundleForClass_(klass).pathForResource_ofType_(name, 'png')\n\timg = NSImage.alloc().initWithContentsOfFile_(imgpath)\n\timg.autorelease()\n\treturn img\n\nclass HaskellModuleItem(objc.lookUpClass('ESBaseItem')):\n\t\"\"\"Itemizer for modules\"\"\"\n\t\n\tdef isDecorator(self):\n\t\treturn True\n\t\n\tdef image(self):\n\t\treturn iconForName(self.class__(), 'module')\n\t\n\nclass HaskellTypeItem(objc.lookUpClass('ESBaseItem')):\n\t\"\"\"Itemizer for datatypes\"\"\"\n\t\n\tdef isDecorator(self):\n\t\treturn True\n\t\n\tdef image(self):\n\t\treturn iconForName(self.class__(), 'type')\n\t\n\tdef isTextualizer(self):\n\t\treturn True\n\t\n\tdef title(self):\n\t\treturn self.text().lstrip()\n\t\n\nclass HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):\n\t\"\"\"Itemizer for functions\"\"\"\n\tpass\n\nclass HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):\n\t\"\"\"Itemizer for code blocks\"\"\"\n\t\n\tdef isTextualizer(self):\n\t\treturn True\n\t\n\tdef title(self):\n\t\treturn '%s %s' % (u'{…}', self.text().lstrip())\n\t\t\n\t\n"},"new_contents":{"kind":"string","value":"\nfrom Foundation import objc\nfrom Foundation import NSBundle\nfrom AppKit import NSImage\n\nhaskellBundleIdentifier = 'org.purl.net.mkhl.haskell'\n\ndef iconForName(name):\n\t\"\"\"Return the NSImage instance representing a `name` item.\"\"\"\n\tbundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier)\n\timgpath = bundle.pathForResource_ofType_(name, 'png')\n\timg = NSImage.alloc().initWithContentsOfFile_(imgpath)\n\timg.autorelease()\n\treturn img\n\nclass HaskellModuleItem(objc.lookUpClass('ESBaseItem')):\n\t\"\"\"Itemizer for modules\"\"\"\n\t\n\tdef isDecorator(self):\n\t\treturn True\n\t\n\tdef image(self):\n\t\treturn iconForName('module')\n\t\n\nclass HaskellTypeItem(objc.lookUpClass('ESBaseItem')):\n\t\"\"\"Itemizer for datatypes\"\"\"\n\t\n\tdef isDecorator(self):\n\t\treturn True\n\t\n\tdef image(self):\n\t\treturn iconForName('type')\n\t\n\tdef isTextualizer(self):\n\t\treturn True\n\t\n\tdef title(self):\n\t\treturn self.text().lstrip()\n\t\n\nclass HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):\n\t\"\"\"Itemizer for functions\"\"\"\n\tpass\n\nclass HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):\n\t\"\"\"Itemizer for code blocks\"\"\"\n\t\n\tdef isTextualizer(self):\n\t\treturn True\n\t\n\tdef title(self):\n\t\treturn '%s %s' % (u'{…}', self.text().lstrip())\n\t\t\n\t\n"},"subject":{"kind":"string","value":"Simplify the icon finder function."},"message":{"kind":"string","value":"Simplify the icon finder function.\n\nWe statically know our bundle identifier, so we don’t have too find the bundle by runtime class.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"mkhl/haskell.sugar"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nfrom Foundation import objc\nfrom Foundation import NSBundle\nfrom AppKit import NSImage\n\ndef iconForName(klass, name):\n\t\"\"\"Return the NSImage instance representing a `name` item.\"\"\"\n\timgpath = NSBundle.bundleForClass_(klass).pathForResource_ofType_(name, 'png')\n\timg = NSImage.alloc().initWithContentsOfFile_(imgpath)\n\timg.autorelease()\n\treturn img\n\nclass HaskellModuleItem(objc.lookUpClass('ESBaseItem')):\n\t\"\"\"Itemizer for modules\"\"\"\n\t\n\tdef isDecorator(self):\n\t\treturn True\n\t\n\tdef image(self):\n\t\treturn iconForName(self.class__(), 'module')\n\t\n\nclass HaskellTypeItem(objc.lookUpClass('ESBaseItem')):\n\t\"\"\"Itemizer for datatypes\"\"\"\n\t\n\tdef isDecorator(self):\n\t\treturn True\n\t\n\tdef image(self):\n\t\treturn iconForName(self.class__(), 'type')\n\t\n\tdef isTextualizer(self):\n\t\treturn True\n\t\n\tdef title(self):\n\t\treturn self.text().lstrip()\n\t\n\nclass HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):\n\t\"\"\"Itemizer for functions\"\"\"\n\tpass\n\nclass HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):\n\t\"\"\"Itemizer for code blocks\"\"\"\n\t\n\tdef isTextualizer(self):\n\t\treturn True\n\t\n\tdef title(self):\n\t\treturn '%s %s' % (u'{…}', self.text().lstrip())\n\t\t\n\t\n\n## Instruction:\nSimplify the icon finder function.\n\nWe statically know our bundle identifier, so we don’t have too find the bundle by runtime class.\n\n## Code After:\n\nfrom Foundation import objc\nfrom Foundation import NSBundle\nfrom AppKit import NSImage\n\nhaskellBundleIdentifier = 'org.purl.net.mkhl.haskell'\n\ndef iconForName(name):\n\t\"\"\"Return the NSImage instance representing a `name` item.\"\"\"\n\tbundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier)\n\timgpath = bundle.pathForResource_ofType_(name, 'png')\n\timg = NSImage.alloc().initWithContentsOfFile_(imgpath)\n\timg.autorelease()\n\treturn img\n\nclass HaskellModuleItem(objc.lookUpClass('ESBaseItem')):\n\t\"\"\"Itemizer for modules\"\"\"\n\t\n\tdef isDecorator(self):\n\t\treturn True\n\t\n\tdef image(self):\n\t\treturn iconForName('module')\n\t\n\nclass HaskellTypeItem(objc.lookUpClass('ESBaseItem')):\n\t\"\"\"Itemizer for datatypes\"\"\"\n\t\n\tdef isDecorator(self):\n\t\treturn True\n\t\n\tdef image(self):\n\t\treturn iconForName('type')\n\t\n\tdef isTextualizer(self):\n\t\treturn True\n\t\n\tdef title(self):\n\t\treturn self.text().lstrip()\n\t\n\nclass HaskellFunctionItem(objc.lookUpClass('ESBaseItem')):\n\t\"\"\"Itemizer for functions\"\"\"\n\tpass\n\nclass HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')):\n\t\"\"\"Itemizer for code blocks\"\"\"\n\t\n\tdef isTextualizer(self):\n\t\treturn True\n\t\n\tdef title(self):\n\t\treturn '%s %s' % (u'{…}', self.text().lstrip())\n\t\t\n\t\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nfrom Foundation import NSBundle\nfrom AppKit import NSImage\n\nhaskellBundleIdentifier = 'org.purl.net.mkhl.haskell'\n\ndef iconForName(name):\n\t\"\"\"Return the NSImage instance representing a `name` item.\"\"\"\n\tbundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier)\n\timgpath = bundle.pathForResource_ofType_(name, 'png')\n\timg = NSImage.alloc().initWithContentsOfFile_(imgpath)\n\timg.autorelease()\n\treturn img\n\n\n// ... modified code ... \n\n\n\t\treturn True\n\t\n\tdef image(self):\n\t\treturn iconForName('module')\n\t\n\nclass HaskellTypeItem(objc.lookUpClass('ESBaseItem')):\n\n\n ... \n\n\n\t\treturn True\n\t\n\tdef image(self):\n\t\treturn iconForName('type')\n\t\n\tdef isTextualizer(self):\n\t\treturn True\n\n\n// ... rest of the code ..."}}},{"rowIdx":3779,"cells":{"commit":{"kind":"string","value":"44d20ecaf13cb0245ee562d234939e762b5b0921"},"old_file":{"kind":"string","value":"include/agent.h"},"new_file":{"kind":"string","value":"include/agent.h"},"old_contents":{"kind":"string","value":"\n// forward declaration\nnamespace Url\n{\n    class Url;\n}\n\nnamespace Rep\n{\n    class Agent\n    {\n    public:\n        /* The type for the delay. */\n        typedef float delay_t;\n\n        /**\n         * Construct an agent.\n         */\n        explicit Agent(const std::string& host) :\n            directives_(), delay_(-1.0), sorted_(true), host_(host) {}\n\n        /**\n         * Add an allowed directive.\n         */\n        Agent& allow(const std::string& query);\n\n        /**\n         * Add a disallowed directive.\n         */\n        Agent& disallow(const std::string& query);\n\n        /**\n         * Set the delay for this agent.\n         */\n        Agent& delay(delay_t value) {\n            delay_ = value;\n            return *this;\n        }\n\n        /**\n         * Return the delay for this agent.\n         */\n        delay_t delay() const { return delay_; }\n\n        /**\n         * A vector of the directives, in priority-sorted order.\n         */\n        const std::vector& directives() const;\n\n        /**\n         * Return true if the URL (either a full URL or a path) is allowed.\n         */\n        bool allowed(const std::string& path) const;\n\n        std::string str() const;\n\n    private:\n        bool is_external(const Url::Url& url) const;\n\n        mutable std::vector directives_;\n        delay_t delay_;\n        mutable bool sorted_;\n        std::string host_;\n    };\n}\n\n#endif\n"},"new_contents":{"kind":"string","value":"\n// forward declaration\nnamespace Url\n{\n    class Url;\n}\n\nnamespace Rep\n{\n    class Agent\n    {\n    public:\n        /* The type for the delay. */\n        typedef float delay_t;\n\n        /**\n         * Default constructor\n         */\n        Agent() : Agent(\"\") {}\n\n        /**\n         * Construct an agent.\n         */\n        explicit Agent(const std::string& host) :\n            directives_(), delay_(-1.0), sorted_(true), host_(host) {}\n\n        /**\n         * Add an allowed directive.\n         */\n        Agent& allow(const std::string& query);\n\n        /**\n         * Add a disallowed directive.\n         */\n        Agent& disallow(const std::string& query);\n\n        /**\n         * Set the delay for this agent.\n         */\n        Agent& delay(delay_t value) {\n            delay_ = value;\n            return *this;\n        }\n\n        /**\n         * Return the delay for this agent.\n         */\n        delay_t delay() const { return delay_; }\n\n        /**\n         * A vector of the directives, in priority-sorted order.\n         */\n        const std::vector& directives() const;\n\n        /**\n         * Return true if the URL (either a full URL or a path) is allowed.\n         */\n        bool allowed(const std::string& path) const;\n\n        std::string str() const;\n\n    private:\n        bool is_external(const Url::Url& url) const;\n\n        mutable std::vector directives_;\n        delay_t delay_;\n        mutable bool sorted_;\n        std::string host_;\n    };\n}\n\n#endif\n"},"subject":{"kind":"string","value":"Add back default constructor for Agent."},"message":{"kind":"string","value":"Add back default constructor for Agent.\n\nPreviously, this was removed in #28, but the Cython bindings in reppy\n*really* want there to be a default constructor, so I'm adding it back\nfor convenience.\n"},"lang":{"kind":"string","value":"C"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"seomoz/rep-cpp,seomoz/rep-cpp"},"config":{"kind":"string","value":"c"},"content":{"kind":"string","value":"## Code Before:\n\n// forward declaration\nnamespace Url\n{\n    class Url;\n}\n\nnamespace Rep\n{\n    class Agent\n    {\n    public:\n        /* The type for the delay. */\n        typedef float delay_t;\n\n        /**\n         * Construct an agent.\n         */\n        explicit Agent(const std::string& host) :\n            directives_(), delay_(-1.0), sorted_(true), host_(host) {}\n\n        /**\n         * Add an allowed directive.\n         */\n        Agent& allow(const std::string& query);\n\n        /**\n         * Add a disallowed directive.\n         */\n        Agent& disallow(const std::string& query);\n\n        /**\n         * Set the delay for this agent.\n         */\n        Agent& delay(delay_t value) {\n            delay_ = value;\n            return *this;\n        }\n\n        /**\n         * Return the delay for this agent.\n         */\n        delay_t delay() const { return delay_; }\n\n        /**\n         * A vector of the directives, in priority-sorted order.\n         */\n        const std::vector& directives() const;\n\n        /**\n         * Return true if the URL (either a full URL or a path) is allowed.\n         */\n        bool allowed(const std::string& path) const;\n\n        std::string str() const;\n\n    private:\n        bool is_external(const Url::Url& url) const;\n\n        mutable std::vector directives_;\n        delay_t delay_;\n        mutable bool sorted_;\n        std::string host_;\n    };\n}\n\n#endif\n\n## Instruction:\nAdd back default constructor for Agent.\n\nPreviously, this was removed in #28, but the Cython bindings in reppy\n*really* want there to be a default constructor, so I'm adding it back\nfor convenience.\n\n## Code After:\n\n// forward declaration\nnamespace Url\n{\n    class Url;\n}\n\nnamespace Rep\n{\n    class Agent\n    {\n    public:\n        /* The type for the delay. */\n        typedef float delay_t;\n\n        /**\n         * Default constructor\n         */\n        Agent() : Agent(\"\") {}\n\n        /**\n         * Construct an agent.\n         */\n        explicit Agent(const std::string& host) :\n            directives_(), delay_(-1.0), sorted_(true), host_(host) {}\n\n        /**\n         * Add an allowed directive.\n         */\n        Agent& allow(const std::string& query);\n\n        /**\n         * Add a disallowed directive.\n         */\n        Agent& disallow(const std::string& query);\n\n        /**\n         * Set the delay for this agent.\n         */\n        Agent& delay(delay_t value) {\n            delay_ = value;\n            return *this;\n        }\n\n        /**\n         * Return the delay for this agent.\n         */\n        delay_t delay() const { return delay_; }\n\n        /**\n         * A vector of the directives, in priority-sorted order.\n         */\n        const std::vector& directives() const;\n\n        /**\n         * Return true if the URL (either a full URL or a path) is allowed.\n         */\n        bool allowed(const std::string& path) const;\n\n        std::string str() const;\n\n    private:\n        bool is_external(const Url::Url& url) const;\n\n        mutable std::vector directives_;\n        delay_t delay_;\n        mutable bool sorted_;\n        std::string host_;\n    };\n}\n\n#endif\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n    public:\n        /* The type for the delay. */\n        typedef float delay_t;\n\n        /**\n         * Default constructor\n         */\n        Agent() : Agent(\"\") {}\n\n        /**\n         * Construct an agent.\n\n\n# ... rest of the code ..."}}},{"rowIdx":3780,"cells":{"commit":{"kind":"string","value":"1a71fba6224a9757f19e702a3b9a1cebf496a754"},"old_file":{"kind":"string","value":"src/loop+blkback/plugin.py"},"new_file":{"kind":"string","value":"src/loop+blkback/plugin.py"},"old_contents":{"kind":"string","value":"\nimport os\nimport sys\nimport xapi\nimport xapi.plugin\nfrom xapi.storage.datapath import log\n\n\nclass Implementation(xapi.plugin.Plugin_skeleton):\n\n    def query(self, dbg):\n        return {\n            \"plugin\": \"loopdev+blkback\",\n            \"name\": \"The loopdev+blkback kernel-space datapath plugin\",\n            \"description\": (\"This plugin manages and configures loop\"\n                            \" devices which can be connected to VMs\"\n                            \" directly via kernel-space blkback\"),\n            \"vendor\": \"Citrix\",\n            \"copyright\": \"(C) 2015 Citrix Inc\",\n            \"version\": \"3.0\",\n            \"required_api_version\": \"3.0\",\n            \"features\": [\n            ],\n            \"configuration\": {},\n            \"required_cluster_stack\": []}\n\nif __name__ == \"__main__\":\n    log.log_call_argv()\n    cmd = xapi.plugin.Plugin_commandline(Implementation())\n    base = os.path.basename(sys.argv[0])\n    if base == \"Plugin.Query\":\n        cmd.query()\n    else:\n        raise xapi.plugin.Unimplemented(base)\n"},"new_contents":{"kind":"string","value":"\nimport os\nimport sys\nimport xapi\nimport xapi.storage.api.plugin\nfrom xapi.storage import log\n\n\nclass Implementation(xapi.storage.api.plugin.Plugin_skeleton):\n\n    def query(self, dbg):\n        return {\n            \"plugin\": \"loopdev+blkback\",\n            \"name\": \"The loopdev+blkback kernel-space datapath plugin\",\n            \"description\": (\"This plugin manages and configures loop\"\n                            \" devices which can be connected to VMs\"\n                            \" directly via kernel-space blkback\"),\n            \"vendor\": \"Citrix\",\n            \"copyright\": \"(C) 2015 Citrix Inc\",\n            \"version\": \"3.0\",\n            \"required_api_version\": \"3.0\",\n            \"features\": [\n            ],\n            \"configuration\": {},\n            \"required_cluster_stack\": []}\n\nif __name__ == \"__main__\":\n    log.log_call_argv()\n    cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation())\n    base = os.path.basename(sys.argv[0])\n    if base == \"Plugin.Query\":\n        cmd.query()\n    else:\n        raise xapi.storage.api.plugin.Unimplemented(base)\n"},"subject":{"kind":"string","value":"Use the new xapi.storage package hierarchy"},"message":{"kind":"string","value":"Use the new xapi.storage package hierarchy\n\nSigned-off-by: David Scott <63c9eb0ea83039690fefa11afe17873ba8278a56@eu.citrix.com>\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"lgpl-2.1"},"repos":{"kind":"string","value":"jjd27/xapi-storage-datapath-plugins,robertbreker/xapi-storage-datapath-plugins,djs55/xapi-storage-datapath-plugins,xapi-project/xapi-storage-datapath-plugins,stefanopanella/xapi-storage-plugins,stefanopanella/xapi-storage-plugins,stefanopanella/xapi-storage-plugins"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nimport os\nimport sys\nimport xapi\nimport xapi.plugin\nfrom xapi.storage.datapath import log\n\n\nclass Implementation(xapi.plugin.Plugin_skeleton):\n\n    def query(self, dbg):\n        return {\n            \"plugin\": \"loopdev+blkback\",\n            \"name\": \"The loopdev+blkback kernel-space datapath plugin\",\n            \"description\": (\"This plugin manages and configures loop\"\n                            \" devices which can be connected to VMs\"\n                            \" directly via kernel-space blkback\"),\n            \"vendor\": \"Citrix\",\n            \"copyright\": \"(C) 2015 Citrix Inc\",\n            \"version\": \"3.0\",\n            \"required_api_version\": \"3.0\",\n            \"features\": [\n            ],\n            \"configuration\": {},\n            \"required_cluster_stack\": []}\n\nif __name__ == \"__main__\":\n    log.log_call_argv()\n    cmd = xapi.plugin.Plugin_commandline(Implementation())\n    base = os.path.basename(sys.argv[0])\n    if base == \"Plugin.Query\":\n        cmd.query()\n    else:\n        raise xapi.plugin.Unimplemented(base)\n\n## Instruction:\nUse the new xapi.storage package hierarchy\n\nSigned-off-by: David Scott <63c9eb0ea83039690fefa11afe17873ba8278a56@eu.citrix.com>\n\n## Code After:\n\nimport os\nimport sys\nimport xapi\nimport xapi.storage.api.plugin\nfrom xapi.storage import log\n\n\nclass Implementation(xapi.storage.api.plugin.Plugin_skeleton):\n\n    def query(self, dbg):\n        return {\n            \"plugin\": \"loopdev+blkback\",\n            \"name\": \"The loopdev+blkback kernel-space datapath plugin\",\n            \"description\": (\"This plugin manages and configures loop\"\n                            \" devices which can be connected to VMs\"\n                            \" directly via kernel-space blkback\"),\n            \"vendor\": \"Citrix\",\n            \"copyright\": \"(C) 2015 Citrix Inc\",\n            \"version\": \"3.0\",\n            \"required_api_version\": \"3.0\",\n            \"features\": [\n            ],\n            \"configuration\": {},\n            \"required_cluster_stack\": []}\n\nif __name__ == \"__main__\":\n    log.log_call_argv()\n    cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation())\n    base = os.path.basename(sys.argv[0])\n    if base == \"Plugin.Query\":\n        cmd.query()\n    else:\n        raise xapi.storage.api.plugin.Unimplemented(base)\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nimport os\nimport sys\nimport xapi\nimport xapi.storage.api.plugin\nfrom xapi.storage import log\n\n\nclass Implementation(xapi.storage.api.plugin.Plugin_skeleton):\n\n    def query(self, dbg):\n        return {\n\n\n# ... modified code ... \n\n\n\nif __name__ == \"__main__\":\n    log.log_call_argv()\n    cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation())\n    base = os.path.basename(sys.argv[0])\n    if base == \"Plugin.Query\":\n        cmd.query()\n    else:\n        raise xapi.storage.api.plugin.Unimplemented(base)\n\n\n# ... rest of the code ..."}}},{"rowIdx":3781,"cells":{"commit":{"kind":"string","value":"fc7db2a55ad3f612ac6ef01cfa57ce03040708a5"},"old_file":{"kind":"string","value":"evelink/__init__.py"},"new_file":{"kind":"string","value":"evelink/__init__.py"},"old_contents":{"kind":"string","value":"\"\"\"EVELink - Python bindings for the EVE API.\"\"\"\n\nimport logging\n\nfrom evelink import account\nfrom evelink import api\nfrom evelink import char\nfrom evelink import constants\nfrom evelink import corp\nfrom evelink import eve\nfrom evelink import map\nfrom evelink import parsing\nfrom evelink import server\n\n# Implement NullHandler because it was only added in Python 2.7+.\nclass NullHandler(logging.Handler):\n    def emit(self, record):\n        pass\n\n# Create a logger, but by default, have it do nothing\n_log = logging.getLogger('evelink')\n_log.addHandler(NullHandler())\n\n__all__ = [\n  \"account\",\n  \"api\",\n  \"char\",\n  \"constants\",\n  \"corp\",\n  \"eve\",\n  \"map\",\n  \"parsing\",\n  \"server\",\n]\n"},"new_contents":{"kind":"string","value":"\"\"\"EVELink - Python bindings for the EVE API.\"\"\"\n\nimport logging\n\nfrom evelink import account\nfrom evelink import api\nfrom evelink import char\nfrom evelink import constants\nfrom evelink import corp\nfrom evelink import eve\nfrom evelink import map\nfrom evelink import server\n\n# Implement NullHandler because it was only added in Python 2.7+.\nclass NullHandler(logging.Handler):\n    def emit(self, record):\n        pass\n\n# Create a logger, but by default, have it do nothing\n_log = logging.getLogger('evelink')\n_log.addHandler(NullHandler())\n\n__all__ = [\n  \"account\",\n  \"api\",\n  \"char\",\n  \"constants\",\n  \"corp\",\n  \"eve\",\n  \"map\",\n  \"parsing\",\n  \"server\",\n]\n"},"subject":{"kind":"string","value":"Remove parsing from public interface"},"message":{"kind":"string","value":"Remove parsing from public interface\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"zigdon/evelink,FashtimeDotCom/evelink,bastianh/evelink,ayust/evelink,Morloth1274/EVE-Online-POCO-manager"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\"\"\"EVELink - Python bindings for the EVE API.\"\"\"\n\nimport logging\n\nfrom evelink import account\nfrom evelink import api\nfrom evelink import char\nfrom evelink import constants\nfrom evelink import corp\nfrom evelink import eve\nfrom evelink import map\nfrom evelink import parsing\nfrom evelink import server\n\n# Implement NullHandler because it was only added in Python 2.7+.\nclass NullHandler(logging.Handler):\n    def emit(self, record):\n        pass\n\n# Create a logger, but by default, have it do nothing\n_log = logging.getLogger('evelink')\n_log.addHandler(NullHandler())\n\n__all__ = [\n  \"account\",\n  \"api\",\n  \"char\",\n  \"constants\",\n  \"corp\",\n  \"eve\",\n  \"map\",\n  \"parsing\",\n  \"server\",\n]\n\n## Instruction:\nRemove parsing from public interface\n\n## Code After:\n\"\"\"EVELink - Python bindings for the EVE API.\"\"\"\n\nimport logging\n\nfrom evelink import account\nfrom evelink import api\nfrom evelink import char\nfrom evelink import constants\nfrom evelink import corp\nfrom evelink import eve\nfrom evelink import map\nfrom evelink import server\n\n# Implement NullHandler because it was only added in Python 2.7+.\nclass NullHandler(logging.Handler):\n    def emit(self, record):\n        pass\n\n# Create a logger, but by default, have it do nothing\n_log = logging.getLogger('evelink')\n_log.addHandler(NullHandler())\n\n__all__ = [\n  \"account\",\n  \"api\",\n  \"char\",\n  \"constants\",\n  \"corp\",\n  \"eve\",\n  \"map\",\n  \"parsing\",\n  \"server\",\n]\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nfrom evelink import corp\nfrom evelink import eve\nfrom evelink import map\nfrom evelink import server\n\n# Implement NullHandler because it was only added in Python 2.7+.\n\n\n# ... rest of the code ..."}}},{"rowIdx":3782,"cells":{"commit":{"kind":"string","value":"493637ace6881defedee22971f3bc39fe9a5bd0a"},"old_file":{"kind":"string","value":"freesas/test/__init__.py"},"new_file":{"kind":"string","value":"freesas/test/__init__.py"},"old_contents":{"kind":"string","value":"\n__author__ = \"Jérôme Kieffer\"\n__license__ = \"MIT\"\n__date__ = \"05/09/2017\"\n__copyright__ = \"2015, ESRF\"\n\nimport unittest\nfrom .test_all import suite\n\n\ndef run():\n    runner = unittest.TextTestRunner()\n    return runner.run(suite())\n\n\nif __name__ == '__main__':\n    run()\n"},"new_contents":{"kind":"string","value":"\n__author__ = \"Jérôme Kieffer\"\n__license__ = \"MIT\"\n__date__ = \"15/01/2021\"\n__copyright__ = \"2015-2021, ESRF\"\n\nimport sys\nimport unittest\nfrom .test_all import suite\n\n\ndef run_tests():\n    \"\"\"Run test complete test_suite\"\"\"\n    mysuite = suite()\n    runner = unittest.TextTestRunner()\n    if not runner.run(mysuite).wasSuccessful():\n        print(\"Test suite failed\")\n        return 1\n    else:\n        print(\"Test suite succeeded\")\n        return 0\n\n\nrun = run_tests\n\nif __name__ == '__main__':\n    sys.exit(run_tests())\n"},"subject":{"kind":"string","value":"Make it compatible with Bob"},"message":{"kind":"string","value":"Make it compatible with Bob"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"kif/freesas,kif/freesas,kif/freesas"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\n__author__ = \"Jérôme Kieffer\"\n__license__ = \"MIT\"\n__date__ = \"05/09/2017\"\n__copyright__ = \"2015, ESRF\"\n\nimport unittest\nfrom .test_all import suite\n\n\ndef run():\n    runner = unittest.TextTestRunner()\n    return runner.run(suite())\n\n\nif __name__ == '__main__':\n    run()\n\n## Instruction:\nMake it compatible with Bob\n## Code After:\n\n__author__ = \"Jérôme Kieffer\"\n__license__ = \"MIT\"\n__date__ = \"15/01/2021\"\n__copyright__ = \"2015-2021, ESRF\"\n\nimport sys\nimport unittest\nfrom .test_all import suite\n\n\ndef run_tests():\n    \"\"\"Run test complete test_suite\"\"\"\n    mysuite = suite()\n    runner = unittest.TextTestRunner()\n    if not runner.run(mysuite).wasSuccessful():\n        print(\"Test suite failed\")\n        return 1\n    else:\n        print(\"Test suite succeeded\")\n        return 0\n\n\nrun = run_tests\n\nif __name__ == '__main__':\n    sys.exit(run_tests())\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\n__author__ = \"Jérôme Kieffer\"\n__license__ = \"MIT\"\n__date__ = \"15/01/2021\"\n__copyright__ = \"2015-2021, ESRF\"\n\nimport sys\nimport unittest\nfrom .test_all import suite\n\n\ndef run_tests():\n    \"\"\"Run test complete test_suite\"\"\"\n    mysuite = suite()\n    runner = unittest.TextTestRunner()\n    if not runner.run(mysuite).wasSuccessful():\n        print(\"Test suite failed\")\n        return 1\n    else:\n        print(\"Test suite succeeded\")\n        return 0\n\n\nrun = run_tests\n\nif __name__ == '__main__':\n    sys.exit(run_tests())\n\n\n// ... rest of the code ..."}}},{"rowIdx":3783,"cells":{"commit":{"kind":"string","value":"e4a7139aaf7abb3df49079e9fbe150917a0225dc"},"old_file":{"kind":"string","value":"src/main/java/seedu/jimi/commons/events/ui/ShowTaskPanelSectionEvent.java"},"new_file":{"kind":"string","value":"src/main/java/seedu/jimi/commons/events/ui/ShowTaskPanelSectionEvent.java"},"old_contents":{"kind":"string","value":"package seedu.jimi.commons.events.ui;\n\nimport seedu.jimi.commons.events.BaseEvent;\n\n/**\n * Indicates user request to show a section of the taskList panel.\n * @author zexuan\n *\n */\npublic class ShowTaskPanelSectionEvent extends BaseEvent{\n\n    String sectionToDisplay;\n    \n    public ShowTaskPanelSectionEvent(String sectionToDisplay) {\n        this.sectionToDisplay = sectionToDisplay;\n    }\n    \n    @Override\n    public String toString() {\n        return this.getClass().getSimpleName();\n    }\n\n}\n"},"new_contents":{"kind":"string","value":"package seedu.jimi.commons.events.ui;\n\nimport seedu.jimi.commons.events.BaseEvent;\n\n/**\n * Indicates user request to show a section of the taskList panel.\n * @author zexuan\n *\n */\npublic class ShowTaskPanelSectionEvent extends BaseEvent{\n\n    public final String sectionToDisplay;\n    \n    public ShowTaskPanelSectionEvent(String sectionToDisplay) {\n        this.sectionToDisplay = sectionToDisplay;\n    }\n    \n    @Override\n    public String toString() {\n        return this.getClass().getSimpleName();\n    }\n\n}\n"},"subject":{"kind":"string","value":"Modify access level of member var"},"message":{"kind":"string","value":"Modify access level of member var\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"CS2103AUG2016-T09-C2/main,CS2103AUG2016-T09-C2/main"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage seedu.jimi.commons.events.ui;\n\nimport seedu.jimi.commons.events.BaseEvent;\n\n/**\n * Indicates user request to show a section of the taskList panel.\n * @author zexuan\n *\n */\npublic class ShowTaskPanelSectionEvent extends BaseEvent{\n\n    String sectionToDisplay;\n    \n    public ShowTaskPanelSectionEvent(String sectionToDisplay) {\n        this.sectionToDisplay = sectionToDisplay;\n    }\n    \n    @Override\n    public String toString() {\n        return this.getClass().getSimpleName();\n    }\n\n}\n\n## Instruction:\nModify access level of member var\n\n## Code After:\npackage seedu.jimi.commons.events.ui;\n\nimport seedu.jimi.commons.events.BaseEvent;\n\n/**\n * Indicates user request to show a section of the taskList panel.\n * @author zexuan\n *\n */\npublic class ShowTaskPanelSectionEvent extends BaseEvent{\n\n    public final String sectionToDisplay;\n    \n    public ShowTaskPanelSectionEvent(String sectionToDisplay) {\n        this.sectionToDisplay = sectionToDisplay;\n    }\n    \n    @Override\n    public String toString() {\n        return this.getClass().getSimpleName();\n    }\n\n}\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n */\npublic class ShowTaskPanelSectionEvent extends BaseEvent{\n\n    public final String sectionToDisplay;\n    \n    public ShowTaskPanelSectionEvent(String sectionToDisplay) {\n        this.sectionToDisplay = sectionToDisplay;\n\n\n ... "}}},{"rowIdx":3784,"cells":{"commit":{"kind":"string","value":"fbadf23356b40c36378cef8f3a9c8b382bce9e32"},"old_file":{"kind":"string","value":"comics/core/admin.py"},"new_file":{"kind":"string","value":"comics/core/admin.py"},"old_contents":{"kind":"string","value":"from django.contrib import admin\n\nfrom comics.core import models\n\n\nclass ComicAdmin(admin.ModelAdmin):\n    list_display = ('slug', 'name', 'language', 'url', 'rights')\n    prepopulated_fields = {\n        'slug': ('name',)\n    }\n\n\nclass ReleaseAdmin(admin.ModelAdmin):\n    list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')\n    list_filter = ['pub_date', 'fetched', 'comic']\n    date_hierarchy = 'pub_date'\n    exclude = ('images',)\n\n\nclass ImageAdmin(admin.ModelAdmin):\n    list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')\n    list_filter = ['fetched', 'comic']\n    date_hierarchy = 'fetched'\n\n\nadmin.site.register(models.Comic, ComicAdmin)\nadmin.site.register(models.Release, ReleaseAdmin)\nadmin.site.register(models.Image, ImageAdmin)\n"},"new_contents":{"kind":"string","value":"from django.contrib import admin\n\nfrom comics.core import models\n\n\nclass ComicAdmin(admin.ModelAdmin):\n    list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date',\n        'end_date', 'active')\n    prepopulated_fields = {\n        'slug': ('name',)\n    }\n\n\nclass ReleaseAdmin(admin.ModelAdmin):\n    list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')\n    list_filter = ['pub_date', 'fetched', 'comic']\n    date_hierarchy = 'pub_date'\n    exclude = ('images',)\n\n\nclass ImageAdmin(admin.ModelAdmin):\n    list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')\n    list_filter = ['fetched', 'comic']\n    date_hierarchy = 'fetched'\n\n\nadmin.site.register(models.Comic, ComicAdmin)\nadmin.site.register(models.Release, ReleaseAdmin)\nadmin.site.register(models.Image, ImageAdmin)\n"},"subject":{"kind":"string","value":"Include start date, end date, and active flag in comics list"},"message":{"kind":"string","value":"Include start date, end date, and active flag in comics list\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"jodal/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics,datagutten/comics,jodal/comics,datagutten/comics"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom django.contrib import admin\n\nfrom comics.core import models\n\n\nclass ComicAdmin(admin.ModelAdmin):\n    list_display = ('slug', 'name', 'language', 'url', 'rights')\n    prepopulated_fields = {\n        'slug': ('name',)\n    }\n\n\nclass ReleaseAdmin(admin.ModelAdmin):\n    list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')\n    list_filter = ['pub_date', 'fetched', 'comic']\n    date_hierarchy = 'pub_date'\n    exclude = ('images',)\n\n\nclass ImageAdmin(admin.ModelAdmin):\n    list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')\n    list_filter = ['fetched', 'comic']\n    date_hierarchy = 'fetched'\n\n\nadmin.site.register(models.Comic, ComicAdmin)\nadmin.site.register(models.Release, ReleaseAdmin)\nadmin.site.register(models.Image, ImageAdmin)\n\n## Instruction:\nInclude start date, end date, and active flag in comics list\n\n## Code After:\nfrom django.contrib import admin\n\nfrom comics.core import models\n\n\nclass ComicAdmin(admin.ModelAdmin):\n    list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date',\n        'end_date', 'active')\n    prepopulated_fields = {\n        'slug': ('name',)\n    }\n\n\nclass ReleaseAdmin(admin.ModelAdmin):\n    list_display = ('__unicode__', 'comic', 'pub_date', 'fetched')\n    list_filter = ['pub_date', 'fetched', 'comic']\n    date_hierarchy = 'pub_date'\n    exclude = ('images',)\n\n\nclass ImageAdmin(admin.ModelAdmin):\n    list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text')\n    list_filter = ['fetched', 'comic']\n    date_hierarchy = 'fetched'\n\n\nadmin.site.register(models.Comic, ComicAdmin)\nadmin.site.register(models.Release, ReleaseAdmin)\nadmin.site.register(models.Image, ImageAdmin)\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\n\nclass ComicAdmin(admin.ModelAdmin):\n    list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date',\n        'end_date', 'active')\n    prepopulated_fields = {\n        'slug': ('name',)\n    }\n\n\n// ... rest of the code ..."}}},{"rowIdx":3785,"cells":{"commit":{"kind":"string","value":"c6a161b5c0fa3d76b09b34dfab8f057e8b10bce2"},"old_file":{"kind":"string","value":"tests/test_extensions.py"},"new_file":{"kind":"string","value":"tests/test_extensions.py"},"old_contents":{"kind":"string","value":"import unittest\n\n\nclass TestExtensions(unittest.TestCase):\n    def test_import_extension(self):\n        import pybel.ext.test\n\n        assert pybel.ext.test.an_extension_function() == 42\n\n    def test_import_extension_2(self):\n        from pybel.ext.test import an_extension_function\n\n        assert an_extension_function() == 42\n\n    def test_import_extension_3(self):\n        from pybel.ext import test\n\n        assert test.an_extension_function() == 42\n"},"new_contents":{"kind":"string","value":"import unittest\n\n\nclass TestExtensions(unittest.TestCase):\n    def test_import_extension(self):\n        import pybel.ext.test\n\n        assert pybel.ext.test.an_extension_function() == 42\n\n    def test_import_extension_2(self):\n        from pybel.ext.test import an_extension_function\n\n        assert an_extension_function() == 42\n\n    def test_import_extension_3(self):\n        from pybel.ext import test\n\n        assert test.an_extension_function() == 42\n\n    def test_import_extension_4(self):\n        with self.assertRaises(ImportError):\n            from pybel.ext import not_an_extension\n"},"subject":{"kind":"string","value":"Add a test for importing a nonexistent extension"},"message":{"kind":"string","value":"Add a test for importing a nonexistent extension\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"pybel/pybel,pybel/pybel,pybel/pybel"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport unittest\n\n\nclass TestExtensions(unittest.TestCase):\n    def test_import_extension(self):\n        import pybel.ext.test\n\n        assert pybel.ext.test.an_extension_function() == 42\n\n    def test_import_extension_2(self):\n        from pybel.ext.test import an_extension_function\n\n        assert an_extension_function() == 42\n\n    def test_import_extension_3(self):\n        from pybel.ext import test\n\n        assert test.an_extension_function() == 42\n\n## Instruction:\nAdd a test for importing a nonexistent extension\n\n## Code After:\nimport unittest\n\n\nclass TestExtensions(unittest.TestCase):\n    def test_import_extension(self):\n        import pybel.ext.test\n\n        assert pybel.ext.test.an_extension_function() == 42\n\n    def test_import_extension_2(self):\n        from pybel.ext.test import an_extension_function\n\n        assert an_extension_function() == 42\n\n    def test_import_extension_3(self):\n        from pybel.ext import test\n\n        assert test.an_extension_function() == 42\n\n    def test_import_extension_4(self):\n        with self.assertRaises(ImportError):\n            from pybel.ext import not_an_extension\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\n        from pybel.ext import test\n\n        assert test.an_extension_function() == 42\n\n    def test_import_extension_4(self):\n        with self.assertRaises(ImportError):\n            from pybel.ext import not_an_extension\n\n\n ... "}}},{"rowIdx":3786,"cells":{"commit":{"kind":"string","value":"bf007267246bd317dc3ccad9f5cf8a9f452b3e0b"},"old_file":{"kind":"string","value":"firecares/utils/__init__.py"},"new_file":{"kind":"string","value":"firecares/utils/__init__.py"},"old_contents":{"kind":"string","value":"from django.core.files.storage import get_storage_class\nfrom storages.backends.s3boto import S3BotoStorage\nfrom PIL import Image\n\n\ndef convert_png_to_jpg(img):\n    \"\"\"\n    Converts a png to a jpg.\n    :param img: Absolute path to the image.\n    :returns: the filename\n    \"\"\"\n    im = Image.open(img)\n    bg = Image.new(\"RGB\", im.size, (255, 255, 255))\n    bg.paste(im, im)\n    filename = img.replace('png', 'jpg')\n    bg.save(filename, quality=85)\n    return filename\n\n\nclass CachedS3BotoStorage(S3BotoStorage):\n    \"\"\"\n    S3 storage backend that saves the files locally, too.\n    \"\"\"\n    def __init__(self, *args, **kwargs):\n        super(CachedS3BotoStorage, self).__init__(*args, **kwargs)\n        self.local_storage = get_storage_class(\n            \"compressor.storage.CompressorFileStorage\")()\n\n    def save(self, name, content):\n        name = super(CachedS3BotoStorage, self).save(name, content)\n        self.local_storage._save(name, content)\n        return name\n\n\ndef dictfetchall(cursor):\n    \"\"\"\n    Returns all rows from a cursor as a dict\n    \"\"\"\n    desc = cursor.description\n    return [\n        dict(zip([col[0] for col in desc], row))\n        for row in cursor.fetchall()\n    ]\n"},"new_contents":{"kind":"string","value":"from django.core.files.storage import get_storage_class\nfrom storages.backends.s3boto import S3BotoStorage\nfrom PIL import Image\n\n\nclass CachedS3BotoStorage(S3BotoStorage):\n    \"\"\"\n    S3 storage backend that saves the files locally, too.\n    \"\"\"\n    def __init__(self, *args, **kwargs):\n        super(CachedS3BotoStorage, self).__init__(*args, **kwargs)\n        self.local_storage = get_storage_class(\n            \"compressor.storage.CompressorFileStorage\")()\n\n    def save(self, name, content):\n        name = super(CachedS3BotoStorage, self).save(name, content)\n        self.local_storage._save(name, content)\n        return name\n\n\ndef dictfetchall(cursor):\n    \"\"\"\n    Returns all rows from a cursor as a dict\n    \"\"\"\n    desc = cursor.description\n    return [\n        dict(zip([col[0] for col in desc], row))\n        for row in cursor.fetchall()\n    ]\n"},"subject":{"kind":"string","value":"Remove the unused convert_png_to_jpg method."},"message":{"kind":"string","value":"Remove the unused convert_png_to_jpg method.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"FireCARES/firecares,FireCARES/firecares,meilinger/firecares,meilinger/firecares,FireCARES/firecares,meilinger/firecares,HunterConnelly/firecares,HunterConnelly/firecares,FireCARES/firecares,HunterConnelly/firecares,FireCARES/firecares,meilinger/firecares,HunterConnelly/firecares"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom django.core.files.storage import get_storage_class\nfrom storages.backends.s3boto import S3BotoStorage\nfrom PIL import Image\n\n\ndef convert_png_to_jpg(img):\n    \"\"\"\n    Converts a png to a jpg.\n    :param img: Absolute path to the image.\n    :returns: the filename\n    \"\"\"\n    im = Image.open(img)\n    bg = Image.new(\"RGB\", im.size, (255, 255, 255))\n    bg.paste(im, im)\n    filename = img.replace('png', 'jpg')\n    bg.save(filename, quality=85)\n    return filename\n\n\nclass CachedS3BotoStorage(S3BotoStorage):\n    \"\"\"\n    S3 storage backend that saves the files locally, too.\n    \"\"\"\n    def __init__(self, *args, **kwargs):\n        super(CachedS3BotoStorage, self).__init__(*args, **kwargs)\n        self.local_storage = get_storage_class(\n            \"compressor.storage.CompressorFileStorage\")()\n\n    def save(self, name, content):\n        name = super(CachedS3BotoStorage, self).save(name, content)\n        self.local_storage._save(name, content)\n        return name\n\n\ndef dictfetchall(cursor):\n    \"\"\"\n    Returns all rows from a cursor as a dict\n    \"\"\"\n    desc = cursor.description\n    return [\n        dict(zip([col[0] for col in desc], row))\n        for row in cursor.fetchall()\n    ]\n\n## Instruction:\nRemove the unused convert_png_to_jpg method.\n\n## Code After:\nfrom django.core.files.storage import get_storage_class\nfrom storages.backends.s3boto import S3BotoStorage\nfrom PIL import Image\n\n\nclass CachedS3BotoStorage(S3BotoStorage):\n    \"\"\"\n    S3 storage backend that saves the files locally, too.\n    \"\"\"\n    def __init__(self, *args, **kwargs):\n        super(CachedS3BotoStorage, self).__init__(*args, **kwargs)\n        self.local_storage = get_storage_class(\n            \"compressor.storage.CompressorFileStorage\")()\n\n    def save(self, name, content):\n        name = super(CachedS3BotoStorage, self).save(name, content)\n        self.local_storage._save(name, content)\n        return name\n\n\ndef dictfetchall(cursor):\n    \"\"\"\n    Returns all rows from a cursor as a dict\n    \"\"\"\n    desc = cursor.description\n    return [\n        dict(zip([col[0] for col in desc], row))\n        for row in cursor.fetchall()\n    ]\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nfrom django.core.files.storage import get_storage_class\nfrom storages.backends.s3boto import S3BotoStorage\nfrom PIL import Image\n\n\nclass CachedS3BotoStorage(S3BotoStorage):\n\n\n# ... rest of the code ..."}}},{"rowIdx":3787,"cells":{"commit":{"kind":"string","value":"7476a8d3a70bc0d69a4683afc45e0663640701dc"},"old_file":{"kind":"string","value":"src/main/java/de/flux/playground/deckcompare/dto/Deck.java"},"new_file":{"kind":"string","value":"src/main/java/de/flux/playground/deckcompare/dto/Deck.java"},"old_contents":{"kind":"string","value":"package de.flux.playground.deckcompare.dto;\n\nimport java.util.List;\n\nimport javax.xml.bind.annotation.XmlAccessType;\nimport javax.xml.bind.annotation.XmlAccessorType;\nimport javax.xml.bind.annotation.XmlAttribute;\nimport javax.xml.bind.annotation.XmlElement;\nimport javax.xml.bind.annotation.XmlRootElement;\n\nimport lombok.Data;\n\n@Data\n@XmlRootElement\n@XmlAccessorType(XmlAccessType.FIELD)\npublic class Deck {\n\n    @XmlAttribute(name = \"game\")\n    private String id;\n    @XmlElement(name = \"section\")\n    private List
sections;\n private String notes;\n\n}\n"},"new_contents":{"kind":"string","value":"package de.flux.playground.deckcompare.dto;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport javax.xml.bind.annotation.XmlAccessType;\nimport javax.xml.bind.annotation.XmlAccessorType;\nimport javax.xml.bind.annotation.XmlAttribute;\nimport javax.xml.bind.annotation.XmlElement;\nimport javax.xml.bind.annotation.XmlRootElement;\n\nimport lombok.Data;\n\n@Data\n@XmlRootElement\n@XmlAccessorType(XmlAccessType.FIELD)\npublic class Deck {\n\n private static final int DEFAULT_SECTIONS = 2;\n private static final int CARDS_SECTION = 1;\n\n @XmlAttribute(name = \"game\")\n private String id;\n @XmlElement(name = \"section\")\n private List
sections;\n private String notes;\n\n public List getCards() {\n List cards = new ArrayList();\n\n if (sections.size() > DEFAULT_SECTIONS) {\n cards = sections.get(CARDS_SECTION).getCards();\n }\n\n return cards;\n }\n\n}\n"},"subject":{"kind":"string","value":"Add method to extract playable cards form deck"},"message":{"kind":"string","value":"Add method to extract playable cards form deck\n\nWithout having to take the detour over the sections\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"JLengenfeld/deckcompare,JLengenfeld/deckcompare,JLengenfeld/deckcompare"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage de.flux.playground.deckcompare.dto;\n\nimport java.util.List;\n\nimport javax.xml.bind.annotation.XmlAccessType;\nimport javax.xml.bind.annotation.XmlAccessorType;\nimport javax.xml.bind.annotation.XmlAttribute;\nimport javax.xml.bind.annotation.XmlElement;\nimport javax.xml.bind.annotation.XmlRootElement;\n\nimport lombok.Data;\n\n@Data\n@XmlRootElement\n@XmlAccessorType(XmlAccessType.FIELD)\npublic class Deck {\n\n @XmlAttribute(name = \"game\")\n private String id;\n @XmlElement(name = \"section\")\n private List
sections;\n private String notes;\n\n}\n\n## Instruction:\nAdd method to extract playable cards form deck\n\nWithout having to take the detour over the sections\n\n## Code After:\npackage de.flux.playground.deckcompare.dto;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport javax.xml.bind.annotation.XmlAccessType;\nimport javax.xml.bind.annotation.XmlAccessorType;\nimport javax.xml.bind.annotation.XmlAttribute;\nimport javax.xml.bind.annotation.XmlElement;\nimport javax.xml.bind.annotation.XmlRootElement;\n\nimport lombok.Data;\n\n@Data\n@XmlRootElement\n@XmlAccessorType(XmlAccessType.FIELD)\npublic class Deck {\n\n private static final int DEFAULT_SECTIONS = 2;\n private static final int CARDS_SECTION = 1;\n\n @XmlAttribute(name = \"game\")\n private String id;\n @XmlElement(name = \"section\")\n private List
sections;\n private String notes;\n\n public List getCards() {\n List cards = new ArrayList();\n\n if (sections.size() > DEFAULT_SECTIONS) {\n cards = sections.get(CARDS_SECTION).getCards();\n }\n\n return cards;\n }\n\n}\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\npackage de.flux.playground.deckcompare.dto;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\nimport javax.xml.bind.annotation.XmlAccessType;\n\n\n// ... modified code ... \n\n\n@XmlAccessorType(XmlAccessType.FIELD)\npublic class Deck {\n\n private static final int DEFAULT_SECTIONS = 2;\n private static final int CARDS_SECTION = 1;\n\n @XmlAttribute(name = \"game\")\n private String id;\n @XmlElement(name = \"section\")\n\n\n ... \n\n\n private List
sections;\n private String notes;\n\n public List getCards() {\n List cards = new ArrayList();\n\n if (sections.size() > DEFAULT_SECTIONS) {\n cards = sections.get(CARDS_SECTION).getCards();\n }\n\n return cards;\n }\n\n}\n\n\n// ... rest of the code ..."}}},{"rowIdx":3788,"cells":{"commit":{"kind":"string","value":"f943aa57d6ee462146ff0ab2a091c406d009acce"},"old_file":{"kind":"string","value":"polyaxon/scheduler/spawners/templates/services/default_env_vars.py"},"new_file":{"kind":"string","value":"polyaxon/scheduler/spawners/templates/services/default_env_vars.py"},"old_contents":{"kind":"string","value":"from django.conf import settings\n\nfrom scheduler.spawners.templates.env_vars import get_from_app_secret\n\n\ndef get_service_env_vars():\n return [\n get_from_app_secret('POLYAXON_SECRET_KEY', 'polyaxon-secret'),\n get_from_app_secret('POLYAXON_INTERNAL_SECRET_TOKEN', 'polyaxon-internal-secret-token'),\n get_from_app_secret('POLYAXON_RABBITMQ_PASSWORD', 'rabbitmq-password',\n settings.POLYAXON_K8S_RABBITMQ_SECRET_NAME)\n ]\n"},"new_contents":{"kind":"string","value":"from django.conf import settings\n\nfrom libs.api import API_KEY_NAME, get_settings_api_url\nfrom scheduler.spawners.templates.env_vars import get_env_var, get_from_app_secret\n\n\ndef get_service_env_vars():\n return [\n get_from_app_secret('POLYAXON_SECRET_KEY', 'polyaxon-secret'),\n get_from_app_secret('POLYAXON_INTERNAL_SECRET_TOKEN', 'polyaxon-internal-secret-token'),\n get_from_app_secret('POLYAXON_RABBITMQ_PASSWORD', 'rabbitmq-password',\n settings.POLYAXON_K8S_RABBITMQ_SECRET_NAME),\n get_env_var(name=API_KEY_NAME, value=get_settings_api_url()),\n ]\n"},"subject":{"kind":"string","value":"Add api url to default env vars"},"message":{"kind":"string","value":"Add api url to default env vars\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom django.conf import settings\n\nfrom scheduler.spawners.templates.env_vars import get_from_app_secret\n\n\ndef get_service_env_vars():\n return [\n get_from_app_secret('POLYAXON_SECRET_KEY', 'polyaxon-secret'),\n get_from_app_secret('POLYAXON_INTERNAL_SECRET_TOKEN', 'polyaxon-internal-secret-token'),\n get_from_app_secret('POLYAXON_RABBITMQ_PASSWORD', 'rabbitmq-password',\n settings.POLYAXON_K8S_RABBITMQ_SECRET_NAME)\n ]\n\n## Instruction:\nAdd api url to default env vars\n\n## Code After:\nfrom django.conf import settings\n\nfrom libs.api import API_KEY_NAME, get_settings_api_url\nfrom scheduler.spawners.templates.env_vars import get_env_var, get_from_app_secret\n\n\ndef get_service_env_vars():\n return [\n get_from_app_secret('POLYAXON_SECRET_KEY', 'polyaxon-secret'),\n get_from_app_secret('POLYAXON_INTERNAL_SECRET_TOKEN', 'polyaxon-internal-secret-token'),\n get_from_app_secret('POLYAXON_RABBITMQ_PASSWORD', 'rabbitmq-password',\n settings.POLYAXON_K8S_RABBITMQ_SECRET_NAME),\n get_env_var(name=API_KEY_NAME, value=get_settings_api_url()),\n ]\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\nfrom django.conf import settings\n\nfrom libs.api import API_KEY_NAME, get_settings_api_url\nfrom scheduler.spawners.templates.env_vars import get_env_var, get_from_app_secret\n\n\ndef get_service_env_vars():\n\n\n# ... modified code ... \n\n\n get_from_app_secret('POLYAXON_SECRET_KEY', 'polyaxon-secret'),\n get_from_app_secret('POLYAXON_INTERNAL_SECRET_TOKEN', 'polyaxon-internal-secret-token'),\n get_from_app_secret('POLYAXON_RABBITMQ_PASSWORD', 'rabbitmq-password',\n settings.POLYAXON_K8S_RABBITMQ_SECRET_NAME),\n get_env_var(name=API_KEY_NAME, value=get_settings_api_url()),\n ]\n\n\n# ... rest of the code ..."}}},{"rowIdx":3789,"cells":{"commit":{"kind":"string","value":"1f3730ac4d531ca0d582a8b8bded871acb409847"},"old_file":{"kind":"string","value":"backend/api-server/warehaus_api/events/models.py"},"new_file":{"kind":"string","value":"backend/api-server/warehaus_api/events/models.py"},"old_contents":{"kind":"string","value":"from .. import db\n\nclass Event(db.Model):\n timestamp = db.Field()\n obj_id = db.Field() # The object for which this event was created about\n user_id = db.Field() # The user who performed the action\n\n # A list of IDs which are interested in this event. For example, when creating\n # a server we obviously want this event to be shows in the server page, but we\n # also want it to be shown in the lab page. So we put two IDs in the list: the\n # server ID and the lab ID.\n # Another example is when we delete the server. Then we would be able to show\n # that event in the lab page although the server is already deleted.\n interested_ids = db.Field()\n\n title = db.Field() # Event title\n content = db.Field() # Event content\n\ndef create_event(obj_id, user_id, interested_ids, title, content=''):\n event = Event(\n timestamp = db.times.now(),\n obj_id = obj_id,\n interested_ids = interested_ids,\n title = title,\n content = content,\n )\n event.save()\n"},"new_contents":{"kind":"string","value":"from .. import db\n\nclass Event(db.Model):\n timestamp = db.Field()\n obj_id = db.Field() # The object for which this event was created about\n user_id = db.Field() # The user who performed the action\n\n # A list of IDs which are interested in this event. For example, when creating\n # a server we obviously want this event to be shows in the server page, but we\n # also want it to be shown in the lab page. So we put two IDs in the list: the\n # server ID and the lab ID.\n # Another example is when we delete the server. Then we would be able to show\n # that event in the lab page although the server is already deleted.\n interested_ids = db.Field()\n\n title = db.Field() # Event title\n content = db.Field() # Event content\n\ndef create_event(obj_id, user_id, interested_ids, title, content=''):\n event = Event(\n timestamp = db.times.now(),\n obj_id = obj_id,\n user_id = user_id,\n interested_ids = interested_ids,\n title = title,\n content = content,\n )\n event.save()\n"},"subject":{"kind":"string","value":"Fix api-server events not saving the user ID"},"message":{"kind":"string","value":"Fix api-server events not saving the user ID\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"agpl-3.0"},"repos":{"kind":"string","value":"labsome/labsome,warehaus/warehaus,warehaus/warehaus,labsome/labsome,warehaus/warehaus,labsome/labsome"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom .. import db\n\nclass Event(db.Model):\n timestamp = db.Field()\n obj_id = db.Field() # The object for which this event was created about\n user_id = db.Field() # The user who performed the action\n\n # A list of IDs which are interested in this event. For example, when creating\n # a server we obviously want this event to be shows in the server page, but we\n # also want it to be shown in the lab page. So we put two IDs in the list: the\n # server ID and the lab ID.\n # Another example is when we delete the server. Then we would be able to show\n # that event in the lab page although the server is already deleted.\n interested_ids = db.Field()\n\n title = db.Field() # Event title\n content = db.Field() # Event content\n\ndef create_event(obj_id, user_id, interested_ids, title, content=''):\n event = Event(\n timestamp = db.times.now(),\n obj_id = obj_id,\n interested_ids = interested_ids,\n title = title,\n content = content,\n )\n event.save()\n\n## Instruction:\nFix api-server events not saving the user ID\n\n## Code After:\nfrom .. import db\n\nclass Event(db.Model):\n timestamp = db.Field()\n obj_id = db.Field() # The object for which this event was created about\n user_id = db.Field() # The user who performed the action\n\n # A list of IDs which are interested in this event. For example, when creating\n # a server we obviously want this event to be shows in the server page, but we\n # also want it to be shown in the lab page. So we put two IDs in the list: the\n # server ID and the lab ID.\n # Another example is when we delete the server. Then we would be able to show\n # that event in the lab page although the server is already deleted.\n interested_ids = db.Field()\n\n title = db.Field() # Event title\n content = db.Field() # Event content\n\ndef create_event(obj_id, user_id, interested_ids, title, content=''):\n event = Event(\n timestamp = db.times.now(),\n obj_id = obj_id,\n user_id = user_id,\n interested_ids = interested_ids,\n title = title,\n content = content,\n )\n event.save()\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n event = Event(\n timestamp = db.times.now(),\n obj_id = obj_id,\n user_id = user_id,\n interested_ids = interested_ids,\n title = title,\n content = content,\n\n\n# ... rest of the code ..."}}},{"rowIdx":3790,"cells":{"commit":{"kind":"string","value":"6108ec5c8790a2cf5244b1c8e707474cd142c05d"},"old_file":{"kind":"string","value":"src/main/java/org/hildan/fx/components/ValidatingTextFieldListCell.java"},"new_file":{"kind":"string","value":"src/main/java/org/hildan/fx/components/ValidatingTextFieldListCell.java"},"old_contents":{"kind":"string","value":"package org.hildan.fx.components;\n\nimport java.util.function.Predicate;\n\nimport javafx.scene.control.cell.TextFieldListCell;\nimport javafx.util.StringConverter;\n\nimport org.hildan.fxlog.themes.Css;\n\npublic class ValidatingTextFieldListCell extends TextFieldListCell {\n\n private final Predicate validator;\n\n public ValidatingTextFieldListCell(StringConverter converter, Predicate validator) {\n super(converter);\n this.validator = validator;\n }\n\n public void commitEdit(T item) {\n if (!isEditing()) {\n return;\n }\n boolean itemIsValid = validator.test(getText());\n pseudoClassStateChanged(Css.INVALID, !itemIsValid);\n if (itemIsValid) {\n // only commit if the item is valid, otherwise we stay in edit state\n super.commitEdit(item);\n }\n }\n}\n"},"new_contents":{"kind":"string","value":"package org.hildan.fx.components;\n\nimport java.util.function.Predicate;\n\nimport javafx.scene.control.*;\nimport javafx.scene.control.cell.TextFieldListCell;\nimport javafx.util.StringConverter;\n\nimport org.hildan.fxlog.themes.Css;\n\npublic class ValidatingTextFieldListCell extends TextFieldListCell {\n\n private final Predicate validator;\n\n public ValidatingTextFieldListCell(StringConverter converter, Predicate validator) {\n super(converter);\n this.validator = validator;\n }\n\n public void commitEdit(T item) {\n if (!isEditing()) {\n return;\n }\n\n // the edited text is not in getText() but in the TextField used as Graphic for this cell\n TextField textField = (TextField) getGraphic();\n String editedText = textField.getText();\n\n boolean itemIsValid = validator.test(editedText);\n pseudoClassStateChanged(Css.INVALID, !itemIsValid);\n if (itemIsValid) {\n // only commit if the item is valid, otherwise we stay in edit state\n super.commitEdit(item);\n }\n }\n}\n"},"subject":{"kind":"string","value":"Fix ValidatedTextFieldListCell to use the actual edited text"},"message":{"kind":"string","value":"Fix ValidatedTextFieldListCell to use the actual edited text\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"joffrey-bion/fx-log"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage org.hildan.fx.components;\n\nimport java.util.function.Predicate;\n\nimport javafx.scene.control.cell.TextFieldListCell;\nimport javafx.util.StringConverter;\n\nimport org.hildan.fxlog.themes.Css;\n\npublic class ValidatingTextFieldListCell extends TextFieldListCell {\n\n private final Predicate validator;\n\n public ValidatingTextFieldListCell(StringConverter converter, Predicate validator) {\n super(converter);\n this.validator = validator;\n }\n\n public void commitEdit(T item) {\n if (!isEditing()) {\n return;\n }\n boolean itemIsValid = validator.test(getText());\n pseudoClassStateChanged(Css.INVALID, !itemIsValid);\n if (itemIsValid) {\n // only commit if the item is valid, otherwise we stay in edit state\n super.commitEdit(item);\n }\n }\n}\n\n## Instruction:\nFix ValidatedTextFieldListCell to use the actual edited text\n\n## Code After:\npackage org.hildan.fx.components;\n\nimport java.util.function.Predicate;\n\nimport javafx.scene.control.*;\nimport javafx.scene.control.cell.TextFieldListCell;\nimport javafx.util.StringConverter;\n\nimport org.hildan.fxlog.themes.Css;\n\npublic class ValidatingTextFieldListCell extends TextFieldListCell {\n\n private final Predicate validator;\n\n public ValidatingTextFieldListCell(StringConverter converter, Predicate validator) {\n super(converter);\n this.validator = validator;\n }\n\n public void commitEdit(T item) {\n if (!isEditing()) {\n return;\n }\n\n // the edited text is not in getText() but in the TextField used as Graphic for this cell\n TextField textField = (TextField) getGraphic();\n String editedText = textField.getText();\n\n boolean itemIsValid = validator.test(editedText);\n pseudoClassStateChanged(Css.INVALID, !itemIsValid);\n if (itemIsValid) {\n // only commit if the item is valid, otherwise we stay in edit state\n super.commitEdit(item);\n }\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\nimport java.util.function.Predicate;\n\nimport javafx.scene.control.*;\nimport javafx.scene.control.cell.TextFieldListCell;\nimport javafx.util.StringConverter;\n\n\n\n// ... modified code ... \n\n\n if (!isEditing()) {\n return;\n }\n\n // the edited text is not in getText() but in the TextField used as Graphic for this cell\n TextField textField = (TextField) getGraphic();\n String editedText = textField.getText();\n\n boolean itemIsValid = validator.test(editedText);\n pseudoClassStateChanged(Css.INVALID, !itemIsValid);\n if (itemIsValid) {\n // only commit if the item is valid, otherwise we stay in edit state\n\n\n// ... rest of the code ..."}}},{"rowIdx":3791,"cells":{"commit":{"kind":"string","value":"234df393c438fdf729dc050d20084e1fe1a4c2ee"},"old_file":{"kind":"string","value":"backend/mcapi/mcdir.py"},"new_file":{"kind":"string","value":"backend/mcapi/mcdir.py"},"old_contents":{"kind":"string","value":"import utils\nfrom os import environ\nimport os.path\n\n\nMCDIR = environ.get(\"MCDIR\") or '/mcfs/data'\n\n\ndef for_uid(uidstr):\n pieces = uidstr.split('-')\n path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4])\n utils.mkdirp(path)\n return path\n"},"new_contents":{"kind":"string","value":"import utils\nfrom os import environ\nimport os.path\n\n\nMCDIR = environ.get(\"MCDIR\") or '/mcfs/data/materialscommons'\n\n\ndef for_uid(uidstr):\n pieces = uidstr.split('-')\n path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4])\n utils.mkdirp(path)\n return path\n"},"subject":{"kind":"string","value":"Change directory where data is written to."},"message":{"kind":"string","value":"Change directory where data is written to.\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport utils\nfrom os import environ\nimport os.path\n\n\nMCDIR = environ.get(\"MCDIR\") or '/mcfs/data'\n\n\ndef for_uid(uidstr):\n pieces = uidstr.split('-')\n path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4])\n utils.mkdirp(path)\n return path\n\n## Instruction:\nChange directory where data is written to.\n\n## Code After:\nimport utils\nfrom os import environ\nimport os.path\n\n\nMCDIR = environ.get(\"MCDIR\") or '/mcfs/data/materialscommons'\n\n\ndef for_uid(uidstr):\n pieces = uidstr.split('-')\n path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4])\n utils.mkdirp(path)\n return path\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\nimport os.path\n\n\nMCDIR = environ.get(\"MCDIR\") or '/mcfs/data/materialscommons'\n\n\ndef for_uid(uidstr):\n\n\n// ... rest of the code ..."}}},{"rowIdx":3792,"cells":{"commit":{"kind":"string","value":"6f464e422befe22e56bb759a7ac7ff52a353c6d9"},"old_file":{"kind":"string","value":"accountant/functional_tests/test_layout_and_styling.py"},"new_file":{"kind":"string","value":"accountant/functional_tests/test_layout_and_styling.py"},"old_contents":{"kind":"string","value":"import unittest\nfrom .base import FunctionalTestCase\nfrom .pages import game\n\nclass StylesheetTests(FunctionalTestCase):\n def test_color_css_loaded(self):\n self.story('Create a game')\n self.browser.get(self.live_server_url)\n page = game.Homepage(self.browser)\n page.start_button.click()\n\n self.assertTrue(any('css/color.css' in s.get_attribute('href')\n for s in page.stylesheets))\n\n def test_main_stylesheet_loaded(self):\n self.story('Load the start page')\n self.browser.get(self.live_server_url)\n page = game.Homepage(self.browser)\n\n self.assertTrue(any('css/main.css' in s.get_attribute('href')\n for s in page.stylesheets))\n"},"new_contents":{"kind":"string","value":"import unittest\nfrom .base import FunctionalTestCase\nfrom .pages import game\n\nclass StylesheetTests(FunctionalTestCase):\n def test_color_css_loaded(self):\n self.story('Create a game')\n self.browser.get(self.live_server_url)\n page = game.Homepage(self.browser)\n page.start_button.click()\n\n self.assertTrue(any('css/color.css' in s.get_attribute('href')\n for s in page.stylesheets))\n\n def test_main_stylesheet_loaded(self):\n self.story('Load the start page')\n self.browser.get(self.live_server_url)\n page = game.Homepage(self.browser)\n\n self.assertTrue(any('css/main.css' in s.get_attribute('href')\n for s in page.stylesheets))\n\n # Test constant to see if css actually gets loaded\n self.assertEqual('rgb(55, 71, 79)',\n page.bank_cash.value_of_css_property('border-color'))\n"},"subject":{"kind":"string","value":"Test is loaded CSS is applied"},"message":{"kind":"string","value":"Test is loaded CSS is applied\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"XeryusTC/18xx-accountant,XeryusTC/18xx-accountant,XeryusTC/18xx-accountant,XeryusTC/18xx-accountant,XeryusTC/18xx-accountant"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport unittest\nfrom .base import FunctionalTestCase\nfrom .pages import game\n\nclass StylesheetTests(FunctionalTestCase):\n def test_color_css_loaded(self):\n self.story('Create a game')\n self.browser.get(self.live_server_url)\n page = game.Homepage(self.browser)\n page.start_button.click()\n\n self.assertTrue(any('css/color.css' in s.get_attribute('href')\n for s in page.stylesheets))\n\n def test_main_stylesheet_loaded(self):\n self.story('Load the start page')\n self.browser.get(self.live_server_url)\n page = game.Homepage(self.browser)\n\n self.assertTrue(any('css/main.css' in s.get_attribute('href')\n for s in page.stylesheets))\n\n## Instruction:\nTest is loaded CSS is applied\n\n## Code After:\nimport unittest\nfrom .base import FunctionalTestCase\nfrom .pages import game\n\nclass StylesheetTests(FunctionalTestCase):\n def test_color_css_loaded(self):\n self.story('Create a game')\n self.browser.get(self.live_server_url)\n page = game.Homepage(self.browser)\n page.start_button.click()\n\n self.assertTrue(any('css/color.css' in s.get_attribute('href')\n for s in page.stylesheets))\n\n def test_main_stylesheet_loaded(self):\n self.story('Load the start page')\n self.browser.get(self.live_server_url)\n page = game.Homepage(self.browser)\n\n self.assertTrue(any('css/main.css' in s.get_attribute('href')\n for s in page.stylesheets))\n\n # Test constant to see if css actually gets loaded\n self.assertEqual('rgb(55, 71, 79)',\n page.bank_cash.value_of_css_property('border-color'))\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\n self.assertTrue(any('css/main.css' in s.get_attribute('href')\n for s in page.stylesheets))\n\n # Test constant to see if css actually gets loaded\n self.assertEqual('rgb(55, 71, 79)',\n page.bank_cash.value_of_css_property('border-color'))\n\n\n// ... rest of the code ..."}}},{"rowIdx":3793,"cells":{"commit":{"kind":"string","value":"a077a5b7731e7d609b5c3adc8f8176ad79053f17"},"old_file":{"kind":"string","value":"rmake/lib/twisted_extras/tools.py"},"new_file":{"kind":"string","value":"rmake/lib/twisted_extras/tools.py"},"old_contents":{"kind":"string","value":"\n\nfrom twisted.internet import defer\n\n\nclass Serializer(object):\n\n def __init__(self):\n self._lock = defer.DeferredLock()\n self._waiting = {}\n\n def call(self, func, args=(), kwargs=None, collapsible=False):\n d = self._lock.acquire()\n self._waiting[d] = collapsible\n if not kwargs:\n kwargs = {}\n @d.addCallback\n def _locked(_):\n if collapsible and len(self._waiting) > 1:\n # Superseded\n return\n return func(*args, **kwargs)\n @d.addBoth\n def _unlock(result):\n self._lock.release()\n del self._waiting[d]\n return result\n return d\n"},"new_contents":{"kind":"string","value":"\n\nfrom twisted.internet import defer\n\n\nclass Serializer(object):\n\n def __init__(self):\n self._lock = defer.DeferredLock()\n self._waiting = {}\n\n def call(self, func, args=(), kwargs=None, collapsible=False):\n d = self._lock.acquire()\n self._waiting[d] = collapsible\n if not kwargs:\n kwargs = {}\n @d.addCallback\n def _locked(_):\n if collapsible and len(self._waiting) > 1:\n # Superseded\n return\n return func(*args, **kwargs)\n @d.addBoth\n def _unlock(result):\n del self._waiting[d]\n self._lock.release()\n return result\n return d\n"},"subject":{"kind":"string","value":"Fix Serializer locking bug that caused it to skip calls it should have made"},"message":{"kind":"string","value":"Fix Serializer locking bug that caused it to skip calls it should have made\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"sassoftware/rmake3,sassoftware/rmake3,sassoftware/rmake3"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\n\nfrom twisted.internet import defer\n\n\nclass Serializer(object):\n\n def __init__(self):\n self._lock = defer.DeferredLock()\n self._waiting = {}\n\n def call(self, func, args=(), kwargs=None, collapsible=False):\n d = self._lock.acquire()\n self._waiting[d] = collapsible\n if not kwargs:\n kwargs = {}\n @d.addCallback\n def _locked(_):\n if collapsible and len(self._waiting) > 1:\n # Superseded\n return\n return func(*args, **kwargs)\n @d.addBoth\n def _unlock(result):\n self._lock.release()\n del self._waiting[d]\n return result\n return d\n\n## Instruction:\nFix Serializer locking bug that caused it to skip calls it should have made\n\n## Code After:\n\n\nfrom twisted.internet import defer\n\n\nclass Serializer(object):\n\n def __init__(self):\n self._lock = defer.DeferredLock()\n self._waiting = {}\n\n def call(self, func, args=(), kwargs=None, collapsible=False):\n d = self._lock.acquire()\n self._waiting[d] = collapsible\n if not kwargs:\n kwargs = {}\n @d.addCallback\n def _locked(_):\n if collapsible and len(self._waiting) > 1:\n # Superseded\n return\n return func(*args, **kwargs)\n @d.addBoth\n def _unlock(result):\n del self._waiting[d]\n self._lock.release()\n return result\n return d\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n return func(*args, **kwargs)\n @d.addBoth\n def _unlock(result):\n del self._waiting[d]\n self._lock.release()\n return result\n return d\n\n\n// ... rest of the code ..."}}},{"rowIdx":3794,"cells":{"commit":{"kind":"string","value":"d45391429f01d5d4ea22e28bef39a2bb419df04f"},"old_file":{"kind":"string","value":"djangae/apps.py"},"new_file":{"kind":"string","value":"djangae/apps.py"},"old_contents":{"kind":"string","value":"from django.apps import AppConfig\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.core.exceptions import ImproperlyConfigured\n\n\nclass DjangaeConfig(AppConfig):\n name = 'djangae'\n verbose_name = _(\"Djangae\")\n\n def ready(self):\n from djangae.db.backends.appengine.caching import reset_context\n from django.core.signals import request_finished, request_started\n\n request_finished.connect(reset_context, dispatch_uid=\"request_finished_context_reset\")\n request_started.connect(reset_context, dispatch_uid=\"request_started_context_reset\")\n\n from django.conf import settings\n if 'django.contrib.contenttypes' in settings.INSTALLED_APPS and (\n not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS):\n raise ImproperlyConfigured(\n \"If you're using django.contrib.contenttypes, then you need \"\n \"to add djangae.contrib.contenttypes to INSTALLED_APPS after \"\n \"django.contrib.contenttypes.\"\n )\n"},"new_contents":{"kind":"string","value":"from django.apps import AppConfig\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.core.exceptions import ImproperlyConfigured\n\n\nclass DjangaeConfig(AppConfig):\n name = 'djangae'\n verbose_name = _(\"Djangae\")\n\n def ready(self):\n from djangae.db.backends.appengine.caching import reset_context\n from django.core.signals import request_finished, request_started\n\n request_finished.connect(reset_context, dispatch_uid=\"request_finished_context_reset\")\n request_started.connect(reset_context, dispatch_uid=\"request_started_context_reset\")\n\n from django.conf import settings\n contenttype_configuration_error = ImproperlyConfigured(\n \"If you're using django.contrib.contenttypes, then you need \"\n \"to add djangae.contrib.contenttypes to INSTALLED_APPS after \"\n \"django.contrib.contenttypes.\"\n )\n if 'django.contrib.contenttypes' in settings.INSTALLED_APPS:\n if not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS:\n # Raise error if User is using Django CT, but not Djangae\n raise contenttype_configuration_error\n else:\n if settings.INSTALLED_APPS.index('django.contrib.contenttypes') > \\\n settings.INSTALLED_APPS.index('djangae.contrib.contenttypes'):\n # Raise error if User is using both Django and Djangae CT, but\n # Django CT comes after Djangae CT\n raise contenttype_configuration_error\n"},"subject":{"kind":"string","value":"Raise configuration error if django.contrib.contenttypes comes after djangae.contrib.contenttypes"},"message":{"kind":"string","value":"Raise configuration error if django.contrib.contenttypes comes after djangae.contrib.contenttypes\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"bsd-3-clause"},"repos":{"kind":"string","value":"potatolondon/djangae,grzes/djangae,kirberich/djangae,kirberich/djangae,kirberich/djangae,grzes/djangae,potatolondon/djangae,grzes/djangae"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom django.apps import AppConfig\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.core.exceptions import ImproperlyConfigured\n\n\nclass DjangaeConfig(AppConfig):\n name = 'djangae'\n verbose_name = _(\"Djangae\")\n\n def ready(self):\n from djangae.db.backends.appengine.caching import reset_context\n from django.core.signals import request_finished, request_started\n\n request_finished.connect(reset_context, dispatch_uid=\"request_finished_context_reset\")\n request_started.connect(reset_context, dispatch_uid=\"request_started_context_reset\")\n\n from django.conf import settings\n if 'django.contrib.contenttypes' in settings.INSTALLED_APPS and (\n not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS):\n raise ImproperlyConfigured(\n \"If you're using django.contrib.contenttypes, then you need \"\n \"to add djangae.contrib.contenttypes to INSTALLED_APPS after \"\n \"django.contrib.contenttypes.\"\n )\n\n## Instruction:\nRaise configuration error if django.contrib.contenttypes comes after djangae.contrib.contenttypes\n\n## Code After:\nfrom django.apps import AppConfig\nfrom django.utils.translation import ugettext_lazy as _\nfrom django.core.exceptions import ImproperlyConfigured\n\n\nclass DjangaeConfig(AppConfig):\n name = 'djangae'\n verbose_name = _(\"Djangae\")\n\n def ready(self):\n from djangae.db.backends.appengine.caching import reset_context\n from django.core.signals import request_finished, request_started\n\n request_finished.connect(reset_context, dispatch_uid=\"request_finished_context_reset\")\n request_started.connect(reset_context, dispatch_uid=\"request_started_context_reset\")\n\n from django.conf import settings\n contenttype_configuration_error = ImproperlyConfigured(\n \"If you're using django.contrib.contenttypes, then you need \"\n \"to add djangae.contrib.contenttypes to INSTALLED_APPS after \"\n \"django.contrib.contenttypes.\"\n )\n if 'django.contrib.contenttypes' in settings.INSTALLED_APPS:\n if not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS:\n # Raise error if User is using Django CT, but not Djangae\n raise contenttype_configuration_error\n else:\n if settings.INSTALLED_APPS.index('django.contrib.contenttypes') > \\\n settings.INSTALLED_APPS.index('djangae.contrib.contenttypes'):\n # Raise error if User is using both Django and Djangae CT, but\n # Django CT comes after Djangae CT\n raise contenttype_configuration_error\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n request_started.connect(reset_context, dispatch_uid=\"request_started_context_reset\")\n\n from django.conf import settings\n contenttype_configuration_error = ImproperlyConfigured(\n \"If you're using django.contrib.contenttypes, then you need \"\n \"to add djangae.contrib.contenttypes to INSTALLED_APPS after \"\n \"django.contrib.contenttypes.\"\n )\n if 'django.contrib.contenttypes' in settings.INSTALLED_APPS:\n if not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS:\n # Raise error if User is using Django CT, but not Djangae\n raise contenttype_configuration_error\n else:\n if settings.INSTALLED_APPS.index('django.contrib.contenttypes') > \\\n settings.INSTALLED_APPS.index('djangae.contrib.contenttypes'):\n # Raise error if User is using both Django and Djangae CT, but\n # Django CT comes after Djangae CT\n raise contenttype_configuration_error\n\n\n# ... rest of the code ..."}}},{"rowIdx":3795,"cells":{"commit":{"kind":"string","value":"99496d97f3e00284840d2127556bba0e21d1a99e"},"old_file":{"kind":"string","value":"frappe/tests/test_commands.py"},"new_file":{"kind":"string","value":"frappe/tests/test_commands.py"},"old_contents":{"kind":"string","value":"\nfrom __future__ import unicode_literals\n\nimport shlex\nimport subprocess\nimport unittest\n\nimport frappe\n\n\ndef clean(value):\n\tif isinstance(value, (bytes, str)):\n\t\tvalue = value.decode().strip()\n\treturn value\n\n\nclass BaseTestCommands:\n\tdef execute(self, command):\n\t\tcommand = command.format(**{\"site\": frappe.local.site})\n\t\tcommand = shlex.split(command)\n\t\tself._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n\t\tself.stdout = clean(self._proc.stdout)\n\t\tself.stderr = clean(self._proc.stderr)\n\t\tself.returncode = clean(self._proc.returncode)\n"},"new_contents":{"kind":"string","value":"\nfrom __future__ import unicode_literals\n\nimport shlex\nimport subprocess\nimport unittest\n\nimport frappe\n\n\ndef clean(value):\n\tif isinstance(value, (bytes, str)):\n\t\tvalue = value.decode().strip()\n\treturn value\n\n\nclass BaseTestCommands:\n\tdef execute(self, command):\n\t\tcommand = command.format(**{\"site\": frappe.local.site})\n\t\tcommand = shlex.split(command)\n\t\tself._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n\t\tself.stdout = clean(self._proc.stdout)\n\t\tself.stderr = clean(self._proc.stderr)\n\t\tself.returncode = clean(self._proc.returncode)\n\n\nclass TestCommands(BaseTestCommands, unittest.TestCase):\n\tdef test_execute(self):\n\t\t# execute a command expecting a numeric output\n\t\tself.execute(\"bench --site {site} execute frappe.db.get_database_size\")\n\t\tself.assertEquals(self.returncode, 0)\n\t\tself.assertIsInstance(float(self.stdout), float)\n\n\t\t# execute a command expecting an errored output as local won't exist\n\t\tself.execute(\"bench --site {site} execute frappe.local.site\")\n\t\tself.assertEquals(self.returncode, 1)\n\t\tself.assertIsNotNone(self.stderr)\n\n\t\t# execute a command with kwargs\n\t\tself.execute(\"\"\"bench --site {site} execute frappe.bold --kwargs '{{\"text\": \"DocType\"}}'\"\"\")\n\t\tself.assertEquals(self.returncode, 0)\n\t\tself.assertEquals(self.stdout[1:-1], frappe.bold(text='DocType'))\n"},"subject":{"kind":"string","value":"Add tests for bench execute"},"message":{"kind":"string","value":"test: Add tests for bench execute\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"saurabh6790/frappe,StrellaGroup/frappe,adityahase/frappe,mhbu50/frappe,adityahase/frappe,yashodhank/frappe,mhbu50/frappe,yashodhank/frappe,mhbu50/frappe,mhbu50/frappe,StrellaGroup/frappe,saurabh6790/frappe,yashodhank/frappe,frappe/frappe,almeidapaulopt/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,adityahase/frappe,frappe/frappe,saurabh6790/frappe,almeidapaulopt/frappe,yashodhank/frappe,adityahase/frappe,saurabh6790/frappe,almeidapaulopt/frappe,frappe/frappe"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\n\nfrom __future__ import unicode_literals\n\nimport shlex\nimport subprocess\nimport unittest\n\nimport frappe\n\n\ndef clean(value):\n\tif isinstance(value, (bytes, str)):\n\t\tvalue = value.decode().strip()\n\treturn value\n\n\nclass BaseTestCommands:\n\tdef execute(self, command):\n\t\tcommand = command.format(**{\"site\": frappe.local.site})\n\t\tcommand = shlex.split(command)\n\t\tself._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n\t\tself.stdout = clean(self._proc.stdout)\n\t\tself.stderr = clean(self._proc.stderr)\n\t\tself.returncode = clean(self._proc.returncode)\n\n## Instruction:\ntest: Add tests for bench execute\n\n## Code After:\n\nfrom __future__ import unicode_literals\n\nimport shlex\nimport subprocess\nimport unittest\n\nimport frappe\n\n\ndef clean(value):\n\tif isinstance(value, (bytes, str)):\n\t\tvalue = value.decode().strip()\n\treturn value\n\n\nclass BaseTestCommands:\n\tdef execute(self, command):\n\t\tcommand = command.format(**{\"site\": frappe.local.site})\n\t\tcommand = shlex.split(command)\n\t\tself._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)\n\t\tself.stdout = clean(self._proc.stdout)\n\t\tself.stderr = clean(self._proc.stderr)\n\t\tself.returncode = clean(self._proc.returncode)\n\n\nclass TestCommands(BaseTestCommands, unittest.TestCase):\n\tdef test_execute(self):\n\t\t# execute a command expecting a numeric output\n\t\tself.execute(\"bench --site {site} execute frappe.db.get_database_size\")\n\t\tself.assertEquals(self.returncode, 0)\n\t\tself.assertIsInstance(float(self.stdout), float)\n\n\t\t# execute a command expecting an errored output as local won't exist\n\t\tself.execute(\"bench --site {site} execute frappe.local.site\")\n\t\tself.assertEquals(self.returncode, 1)\n\t\tself.assertIsNotNone(self.stderr)\n\n\t\t# execute a command with kwargs\n\t\tself.execute(\"\"\"bench --site {site} execute frappe.bold --kwargs '{{\"text\": \"DocType\"}}'\"\"\")\n\t\tself.assertEquals(self.returncode, 0)\n\t\tself.assertEquals(self.stdout[1:-1], frappe.bold(text='DocType'))\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\t\tself.stdout = clean(self._proc.stdout)\n\t\tself.stderr = clean(self._proc.stderr)\n\t\tself.returncode = clean(self._proc.returncode)\n\n\nclass TestCommands(BaseTestCommands, unittest.TestCase):\n\tdef test_execute(self):\n\t\t# execute a command expecting a numeric output\n\t\tself.execute(\"bench --site {site} execute frappe.db.get_database_size\")\n\t\tself.assertEquals(self.returncode, 0)\n\t\tself.assertIsInstance(float(self.stdout), float)\n\n\t\t# execute a command expecting an errored output as local won't exist\n\t\tself.execute(\"bench --site {site} execute frappe.local.site\")\n\t\tself.assertEquals(self.returncode, 1)\n\t\tself.assertIsNotNone(self.stderr)\n\n\t\t# execute a command with kwargs\n\t\tself.execute(\"\"\"bench --site {site} execute frappe.bold --kwargs '{{\"text\": \"DocType\"}}'\"\"\")\n\t\tself.assertEquals(self.returncode, 0)\n\t\tself.assertEquals(self.stdout[1:-1], frappe.bold(text='DocType'))\n\n\n# ... rest of the code ..."}}},{"rowIdx":3796,"cells":{"commit":{"kind":"string","value":"3db3ed080c6a372188f3a7366b7bb001c3829b4f"},"old_file":{"kind":"string","value":"src/main/java/persistence/util/TransactionUtil.java"},"new_file":{"kind":"string","value":"src/main/java/persistence/util/TransactionUtil.java"},"old_contents":{"kind":"string","value":"package persistence.util;\r\n\r\nimport javax.persistence.EntityManager;\r\nimport javax.persistence.EntityManagerFactory;\r\nimport javax.persistence.EntityTransaction;\r\n\r\npublic class TransactionUtil {\r\n\tpublic static void doTransaction(EntityManagerFactory emf, Transaction t) {\r\n\t\tEntityManager em = emf.createEntityManager();\r\n\t\tEMInjectorConstraintValidatorFactory.setThreadLocalEntityManager(em);\r\n\t\ttry{\r\n\t\t\tdoTransaction(em, t);\r\n\t\t}finally {\r\n\t\t\tem.close();\r\n\t\t}\r\n\t}\r\n\t\r\n\tpublic static void doTransaction(EntityManager em, Transaction t) {\r\n\t\tEntityTransaction tx = null;\r\n\t\ttry{\r\n\t\t\ttx = em.getTransaction();\r\n\t\t\ttx.begin();\r\n\t\t\t\tt.doTransation(em);\r\n\t\t\ttx.commit();\r\n\t\t} finally {\r\n\t\t\tif (tx != null && tx.isActive()) {\r\n\t\t\t\ttx.rollback();\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\t\r\n}\r\n"},"new_contents":{"kind":"string","value":"package persistence.util;\r\n\r\nimport javax.persistence.EntityManager;\r\nimport javax.persistence.EntityManagerFactory;\r\nimport javax.persistence.EntityTransaction;\r\n\r\npublic class TransactionUtil {\r\n\tpublic static void doTransaction(EntityManagerFactory emf, Transaction t) {\r\n\t\tEntityManager em = emf.createEntityManager();\r\n\t\tEMInjectorConstraintValidatorFactory.setThreadLocalEntityManager(em);\r\n\t\ttry{\r\n\t\t\tdoTransaction(em, t);\r\n\t\t}finally {\r\n\t\t\tem.close();\r\n\t\t}\r\n\t}\r\n\t\r\n\tpublic static void doTransaction(EntityManager em, Transaction t) {\r\n\t\tEntityTransaction tx = null;\r\n\t\tEMInjectorConstraintValidatorFactory.setThreadLocalEntityManager(em);\r\n\t\ttry{\r\n\t\t\ttx = em.getTransaction();\r\n\t\t\ttx.begin();\r\n\t\t\t\tt.doTransation(em);\r\n\t\t\ttx.commit();\r\n\t\t} finally {\r\n\t\t\tif (tx != null && tx.isActive()) {\r\n\t\t\t\ttx.rollback();\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\t\r\n}\r\n"},"subject":{"kind":"string","value":"Set thread local the entity manager in validatorfactory also in doTransaction with already created EntityManagers"},"message":{"kind":"string","value":"Set thread local the entity manager in validatorfactory also in\ndoTransaction with already created EntityManagers"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"lipido/si1415_persistence"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage persistence.util;\n\nimport javax.persistence.EntityManager;\nimport javax.persistence.EntityManagerFactory;\nimport javax.persistence.EntityTransaction;\n\npublic class TransactionUtil {\n\tpublic static void doTransaction(EntityManagerFactory emf, Transaction t) {\n\t\tEntityManager em = emf.createEntityManager();\n\t\tEMInjectorConstraintValidatorFactory.setThreadLocalEntityManager(em);\n\t\ttry{\n\t\t\tdoTransaction(em, t);\n\t\t}finally {\n\t\t\tem.close();\n\t\t}\n\t}\n\t\n\tpublic static void doTransaction(EntityManager em, Transaction t) {\n\t\tEntityTransaction tx = null;\n\t\ttry{\n\t\t\ttx = em.getTransaction();\n\t\t\ttx.begin();\n\t\t\t\tt.doTransation(em);\n\t\t\ttx.commit();\n\t\t} finally {\n\t\t\tif (tx != null && tx.isActive()) {\n\t\t\t\ttx.rollback();\n\t\t\t}\n\t\t}\n\t}\n\t\n}\n\n## Instruction:\nSet thread local the entity manager in validatorfactory also in\ndoTransaction with already created EntityManagers\n## Code After:\npackage persistence.util;\n\nimport javax.persistence.EntityManager;\nimport javax.persistence.EntityManagerFactory;\nimport javax.persistence.EntityTransaction;\n\npublic class TransactionUtil {\n\tpublic static void doTransaction(EntityManagerFactory emf, Transaction t) {\n\t\tEntityManager em = emf.createEntityManager();\n\t\tEMInjectorConstraintValidatorFactory.setThreadLocalEntityManager(em);\n\t\ttry{\n\t\t\tdoTransaction(em, t);\n\t\t}finally {\n\t\t\tem.close();\n\t\t}\n\t}\n\t\n\tpublic static void doTransaction(EntityManager em, Transaction t) {\n\t\tEntityTransaction tx = null;\n\t\tEMInjectorConstraintValidatorFactory.setThreadLocalEntityManager(em);\n\t\ttry{\n\t\t\ttx = em.getTransaction();\n\t\t\ttx.begin();\n\t\t\t\tt.doTransation(em);\n\t\t\ttx.commit();\n\t\t} finally {\n\t\t\tif (tx != null && tx.isActive()) {\n\t\t\t\ttx.rollback();\n\t\t\t}\n\t\t}\n\t}\n\t\n}\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\n\t\n\tpublic static void doTransaction(EntityManager em, Transaction t) {\n\t\tEntityTransaction tx = null;\n\t\tEMInjectorConstraintValidatorFactory.setThreadLocalEntityManager(em);\n\t\ttry{\n\t\t\ttx = em.getTransaction();\n\t\t\ttx.begin();\n\n\n// ... rest of the code ..."}}},{"rowIdx":3797,"cells":{"commit":{"kind":"string","value":"1768207c57b66812931d2586c5544c9b74446918"},"old_file":{"kind":"string","value":"peering/management/commands/update_peering_session_states.py"},"new_file":{"kind":"string","value":"peering/management/commands/update_peering_session_states.py"},"old_contents":{"kind":"string","value":"import logging\n\nfrom django.core.management.base import BaseCommand\n\nfrom peering.models import InternetExchange\n\n\nclass Command(BaseCommand):\n help = \"Update peering session states for Internet Exchanges.\"\n logger = logging.getLogger(\"peering.manager.peering\")\n\n def handle(self, *args, **options):\n self.logger.info(\"Updating peering session states...\")\n\n internet_exchanges = InternetExchange.objects.all()\n for internet_exchange in internet_exchanges:\n internet_exchange.update_peering_session_states()\n"},"new_contents":{"kind":"string","value":"import logging\n\nfrom django.core.management.base import BaseCommand\n\nfrom peering.models import InternetExchange\n\n\nclass Command(BaseCommand):\n help = \"Update peering session states for Internet Exchanges.\"\n logger = logging.getLogger(\"peering.manager.peering\")\n\n def handle(self, *args, **options):\n self.logger.info(\"Updating peering session states...\")\n\n internet_exchanges = InternetExchange.objects.all()\n for internet_exchange in internet_exchanges:\n internet_exchange.poll_peering_sessions()\n"},"subject":{"kind":"string","value":"Fix command polling sessions for IX."},"message":{"kind":"string","value":"Fix command polling sessions for IX.\n\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"apache-2.0"},"repos":{"kind":"string","value":"respawner/peering-manager,respawner/peering-manager,respawner/peering-manager,respawner/peering-manager"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nimport logging\n\nfrom django.core.management.base import BaseCommand\n\nfrom peering.models import InternetExchange\n\n\nclass Command(BaseCommand):\n help = \"Update peering session states for Internet Exchanges.\"\n logger = logging.getLogger(\"peering.manager.peering\")\n\n def handle(self, *args, **options):\n self.logger.info(\"Updating peering session states...\")\n\n internet_exchanges = InternetExchange.objects.all()\n for internet_exchange in internet_exchanges:\n internet_exchange.update_peering_session_states()\n\n## Instruction:\nFix command polling sessions for IX.\n\n\n## Code After:\nimport logging\n\nfrom django.core.management.base import BaseCommand\n\nfrom peering.models import InternetExchange\n\n\nclass Command(BaseCommand):\n help = \"Update peering session states for Internet Exchanges.\"\n logger = logging.getLogger(\"peering.manager.peering\")\n\n def handle(self, *args, **options):\n self.logger.info(\"Updating peering session states...\")\n\n internet_exchanges = InternetExchange.objects.all()\n for internet_exchange in internet_exchanges:\n internet_exchange.poll_peering_sessions()\n"},"fuzzy_diff":{"kind":"string","value":"# ... existing code ... \n\n\n\n internet_exchanges = InternetExchange.objects.all()\n for internet_exchange in internet_exchanges:\n internet_exchange.poll_peering_sessions()\n\n\n# ... rest of the code ..."}}},{"rowIdx":3798,"cells":{"commit":{"kind":"string","value":"46231cf5938f090521f7d65c881f0df8b6e34511"},"old_file":{"kind":"string","value":"app/src/main/java/mozilla/org/webmaker/activity/Tinker.java"},"new_file":{"kind":"string","value":"app/src/main/java/mozilla/org/webmaker/activity/Tinker.java"},"old_contents":{"kind":"string","value":"package mozilla.org.webmaker.activity;\n\nimport android.app.ActionBar;\nimport android.graphics.Color;\nimport android.graphics.drawable.ColorDrawable;\nimport android.os.Bundle;\nimport android.view.Menu;\nimport android.view.Window;\nimport android.view.WindowManager;\nimport android.widget.RelativeLayout;\nimport mozilla.org.webmaker.R;\nimport mozilla.org.webmaker.WebmakerActivity;\nimport mozilla.org.webmaker.view.WebmakerWebView;\n\npublic class Tinker extends WebmakerActivity {\n public Tinker() {\n super(\"tinker\", R.id.tinker_layout, R.layout.tinker_layout, R.menu.menu_tinker);\n }\n\n @Override\n protected void onCreate(Bundle savedInstanceState) {\n super.onCreate(savedInstanceState);\n\n // Custom styles\n ActionBar actionBar = getActionBar();\n ColorDrawable colorOne = new ColorDrawable(Color.parseColor(\"#ff303250\"));\n ColorDrawable colorTwo = new ColorDrawable(Color.parseColor(\"#ff303250\"));\n actionBar.setStackedBackgroundDrawable(colorOne);\n actionBar.setBackgroundDrawable(colorTwo);\n\n Window window = getWindow();\n window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);\n window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);\n window.setStatusBarColor(0xff282733);\n }\n\n @Override\n public boolean onCreateOptionsMenu(Menu menu) {\n getMenuInflater().inflate(R.menu.menu_main, menu);\n return true;\n }\n}\n"},"new_contents":{"kind":"string","value":"package mozilla.org.webmaker.activity;\n\nimport android.app.ActionBar;\nimport android.content.res.Resources;\nimport android.graphics.Color;\nimport android.graphics.drawable.ColorDrawable;\nimport android.os.Bundle;\nimport android.view.Menu;\nimport android.view.Window;\nimport android.view.WindowManager;\nimport android.widget.RelativeLayout;\nimport mozilla.org.webmaker.R;\nimport mozilla.org.webmaker.WebmakerActivity;\nimport mozilla.org.webmaker.view.WebmakerWebView;\n\npublic class Tinker extends WebmakerActivity {\n public Tinker() {\n super(\"tinker\", R.id.tinker_layout, R.layout.tinker_layout, R.menu.menu_tinker);\n }\n\n @Override\n protected void onCreate(Bundle savedInstanceState) {\n super.onCreate(savedInstanceState);\n\n // Custom styles\n Resources res = getResources();\n int shadowPlum = res.getColor(R.color.shadow_plum);\n int plum = res.getColor(R.color.plum);\n\n ActionBar actionBar = getActionBar();\n actionBar.setStackedBackgroundDrawable(plum);\n actionBar.setBackgroundDrawable(plum);\n\n Window window = getWindow();\n window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);\n window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);\n window.setStatusBarColor(shadowPlum);\n }\n\n @Override\n public boolean onCreateOptionsMenu(Menu menu) {\n getMenuInflater().inflate(R.menu.menu_main, menu);\n return true;\n }\n}\n"},"subject":{"kind":"string","value":"Use plum and shadow_plum from colors.xml"},"message":{"kind":"string","value":"Use plum and shadow_plum from colors.xml\n"},"lang":{"kind":"string","value":"Java"},"license":{"kind":"string","value":"mpl-2.0"},"repos":{"kind":"string","value":"adamlofting/webmaker-android,k88hudson/webmaker-android,alicoding/webmaker-android,alanmoo/webmaker-android,alicoding/webmaker-android,bolaram/webmaker-android,alanmoo/webmaker-android,j796160836/webmaker-android,gvn/webmaker-android,rodmoreno/webmaker-android,j796160836/webmaker-android,rodmoreno/webmaker-android,mozilla/webmaker-android,gvn/webmaker-android,bolaram/webmaker-android,k88hudson/webmaker-android,mozilla/webmaker-android"},"config":{"kind":"string","value":"java"},"content":{"kind":"string","value":"## Code Before:\npackage mozilla.org.webmaker.activity;\n\nimport android.app.ActionBar;\nimport android.graphics.Color;\nimport android.graphics.drawable.ColorDrawable;\nimport android.os.Bundle;\nimport android.view.Menu;\nimport android.view.Window;\nimport android.view.WindowManager;\nimport android.widget.RelativeLayout;\nimport mozilla.org.webmaker.R;\nimport mozilla.org.webmaker.WebmakerActivity;\nimport mozilla.org.webmaker.view.WebmakerWebView;\n\npublic class Tinker extends WebmakerActivity {\n public Tinker() {\n super(\"tinker\", R.id.tinker_layout, R.layout.tinker_layout, R.menu.menu_tinker);\n }\n\n @Override\n protected void onCreate(Bundle savedInstanceState) {\n super.onCreate(savedInstanceState);\n\n // Custom styles\n ActionBar actionBar = getActionBar();\n ColorDrawable colorOne = new ColorDrawable(Color.parseColor(\"#ff303250\"));\n ColorDrawable colorTwo = new ColorDrawable(Color.parseColor(\"#ff303250\"));\n actionBar.setStackedBackgroundDrawable(colorOne);\n actionBar.setBackgroundDrawable(colorTwo);\n\n Window window = getWindow();\n window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);\n window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);\n window.setStatusBarColor(0xff282733);\n }\n\n @Override\n public boolean onCreateOptionsMenu(Menu menu) {\n getMenuInflater().inflate(R.menu.menu_main, menu);\n return true;\n }\n}\n\n## Instruction:\nUse plum and shadow_plum from colors.xml\n\n## Code After:\npackage mozilla.org.webmaker.activity;\n\nimport android.app.ActionBar;\nimport android.content.res.Resources;\nimport android.graphics.Color;\nimport android.graphics.drawable.ColorDrawable;\nimport android.os.Bundle;\nimport android.view.Menu;\nimport android.view.Window;\nimport android.view.WindowManager;\nimport android.widget.RelativeLayout;\nimport mozilla.org.webmaker.R;\nimport mozilla.org.webmaker.WebmakerActivity;\nimport mozilla.org.webmaker.view.WebmakerWebView;\n\npublic class Tinker extends WebmakerActivity {\n public Tinker() {\n super(\"tinker\", R.id.tinker_layout, R.layout.tinker_layout, R.menu.menu_tinker);\n }\n\n @Override\n protected void onCreate(Bundle savedInstanceState) {\n super.onCreate(savedInstanceState);\n\n // Custom styles\n Resources res = getResources();\n int shadowPlum = res.getColor(R.color.shadow_plum);\n int plum = res.getColor(R.color.plum);\n\n ActionBar actionBar = getActionBar();\n actionBar.setStackedBackgroundDrawable(plum);\n actionBar.setBackgroundDrawable(plum);\n\n Window window = getWindow();\n window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);\n window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);\n window.setStatusBarColor(shadowPlum);\n }\n\n @Override\n public boolean onCreateOptionsMenu(Menu menu) {\n getMenuInflater().inflate(R.menu.menu_main, menu);\n return true;\n }\n}\n"},"fuzzy_diff":{"kind":"string","value":"// ... existing code ... \n\n\npackage mozilla.org.webmaker.activity;\n\nimport android.app.ActionBar;\nimport android.content.res.Resources;\nimport android.graphics.Color;\nimport android.graphics.drawable.ColorDrawable;\nimport android.os.Bundle;\n\n\n// ... modified code ... \n\n\n super.onCreate(savedInstanceState);\n\n // Custom styles\n Resources res = getResources();\n int shadowPlum = res.getColor(R.color.shadow_plum);\n int plum = res.getColor(R.color.plum);\n\n ActionBar actionBar = getActionBar();\n actionBar.setStackedBackgroundDrawable(plum);\n actionBar.setBackgroundDrawable(plum);\n\n Window window = getWindow();\n window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);\n window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);\n window.setStatusBarColor(shadowPlum);\n }\n\n @Override\n\n\n// ... rest of the code ..."}}},{"rowIdx":3799,"cells":{"commit":{"kind":"string","value":"09c3c511687de8888180577fa66f4ca51f4bc237"},"old_file":{"kind":"string","value":"taggit_autosuggest_select2/views.py"},"new_file":{"kind":"string","value":"taggit_autosuggest_select2/views.py"},"old_contents":{"kind":"string","value":"from django.conf import settings\nfrom django.http import HttpResponse\nfrom django.utils import simplejson as json\nfrom taggit.models import Tag\n\n\nMAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20)\n\n\ndef list_tags(request):\n \"\"\"\n Returns a list of JSON objects with a `name` and a `value` property that\n all start like your query string `q` (not case sensitive).\n \"\"\"\n query = request.GET.get('q', '')\n limit = request.GET.get('limit', MAX_SUGGESTIONS)\n try:\n request.GET.get('limit', MAX_SUGGESTIONS)\n limit = min(int(limit), MAX_SUGGESTIONS) # max or less\n except ValueError:\n limit = MAX_SUGGESTIONS\n\n tag_name_qs = Tag.objects.filter(name__istartswith=query).\\\n values_list('name', flat=True)\n data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]]\n\n return HttpResponse(json.dumps(data), mimetype='application/json')\n\n\ndef list_all_tags(request):\n \"\"\"Returns all the tags in the database\"\"\"\n all_tags = Tag.objects.all().values_list('name', flat=True)\n return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')\n"},"new_contents":{"kind":"string","value":"from django.conf import settings\nfrom django.http import HttpResponse\nimport json\nfrom taggit.models import Tag\n\n\nMAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20)\n\n\ndef list_tags(request):\n \"\"\"\n Returns a list of JSON objects with a `name` and a `value` property that\n all start like your query string `q` (not case sensitive).\n \"\"\"\n query = request.GET.get('q', '')\n limit = request.GET.get('limit', MAX_SUGGESTIONS)\n try:\n request.GET.get('limit', MAX_SUGGESTIONS)\n limit = min(int(limit), MAX_SUGGESTIONS) # max or less\n except ValueError:\n limit = MAX_SUGGESTIONS\n\n tag_name_qs = Tag.objects.filter(name__istartswith=query).\\\n values_list('name', flat=True)\n data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]]\n\n return HttpResponse(json.dumps(data), mimetype='application/json')\n\n\ndef list_all_tags(request):\n \"\"\"Returns all the tags in the database\"\"\"\n all_tags = Tag.objects.all().values_list('name', flat=True)\n return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')\n"},"subject":{"kind":"string","value":"Remove deprecated django json shim"},"message":{"kind":"string","value":"Remove deprecated django json shim\n"},"lang":{"kind":"string","value":"Python"},"license":{"kind":"string","value":"mit"},"repos":{"kind":"string","value":"iris-edu/django-taggit-autosuggest-select2,iris-edu-int/django-taggit-autosuggest-select2,adam-iris/django-taggit-autosuggest-select2,adam-iris/django-taggit-autosuggest-select2,iris-edu/django-taggit-autosuggest-select2,iris-edu-int/django-taggit-autosuggest-select2,iris-edu-int/django-taggit-autosuggest-select2,iris-edu/django-taggit-autosuggest-select2,adam-iris/django-taggit-autosuggest-select2"},"config":{"kind":"string","value":"python"},"content":{"kind":"string","value":"## Code Before:\nfrom django.conf import settings\nfrom django.http import HttpResponse\nfrom django.utils import simplejson as json\nfrom taggit.models import Tag\n\n\nMAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20)\n\n\ndef list_tags(request):\n \"\"\"\n Returns a list of JSON objects with a `name` and a `value` property that\n all start like your query string `q` (not case sensitive).\n \"\"\"\n query = request.GET.get('q', '')\n limit = request.GET.get('limit', MAX_SUGGESTIONS)\n try:\n request.GET.get('limit', MAX_SUGGESTIONS)\n limit = min(int(limit), MAX_SUGGESTIONS) # max or less\n except ValueError:\n limit = MAX_SUGGESTIONS\n\n tag_name_qs = Tag.objects.filter(name__istartswith=query).\\\n values_list('name', flat=True)\n data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]]\n\n return HttpResponse(json.dumps(data), mimetype='application/json')\n\n\ndef list_all_tags(request):\n \"\"\"Returns all the tags in the database\"\"\"\n all_tags = Tag.objects.all().values_list('name', flat=True)\n return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')\n\n## Instruction:\nRemove deprecated django json shim\n\n## Code After:\nfrom django.conf import settings\nfrom django.http import HttpResponse\nimport json\nfrom taggit.models import Tag\n\n\nMAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20)\n\n\ndef list_tags(request):\n \"\"\"\n Returns a list of JSON objects with a `name` and a `value` property that\n all start like your query string `q` (not case sensitive).\n \"\"\"\n query = request.GET.get('q', '')\n limit = request.GET.get('limit', MAX_SUGGESTIONS)\n try:\n request.GET.get('limit', MAX_SUGGESTIONS)\n limit = min(int(limit), MAX_SUGGESTIONS) # max or less\n except ValueError:\n limit = MAX_SUGGESTIONS\n\n tag_name_qs = Tag.objects.filter(name__istartswith=query).\\\n values_list('name', flat=True)\n data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]]\n\n return HttpResponse(json.dumps(data), mimetype='application/json')\n\n\ndef list_all_tags(request):\n \"\"\"Returns all the tags in the database\"\"\"\n all_tags = Tag.objects.all().values_list('name', flat=True)\n return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')\n"},"fuzzy_diff":{"kind":"string","value":" ... \n\n\nfrom django.conf import settings\nfrom django.http import HttpResponse\nimport json\nfrom taggit.models import Tag\n\n\n\n\n ... "}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":37,"numItemsPerPage":100,"numTotalItems":36908,"offset":3700,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1ODE3MTM1Mywic3ViIjoiL2RhdGFzZXRzL2tzZW5pYXN5Y2gvRWRpdFBhY2tGVC1NdWx0aS1hcHBseS1mdXp6eS1kaWZmcy1oZXVyaXN0aWNzX2NvbnRleHQtMyIsImV4cCI6MTc1ODE3NDk1MywiaXNzIjoiaHR0cHM6Ly9odWdnaW5nZmFjZS5jbyJ9.Glotgi8XUN8x-9IMbPzOHKYoX8nhZIiXtulfloeysllxQ2M-lFd8NrfRGIpXIzFH2Qfo3KpJpXFnWPmUSr0fCQ","displayUrls":true},"discussionsStats":{"closed":0,"open":1,"total":1},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
commit
stringlengths
40
40
old_file
stringlengths
4
234
new_file
stringlengths
4
234
old_contents
stringlengths
10
3.01k
new_contents
stringlengths
19
3.38k
subject
stringlengths
16
736
message
stringlengths
17
2.63k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
82.6k
config
stringclasses
4 values
content
stringlengths
134
4.41k
fuzzy_diff
stringlengths
29
3.44k
2986a821162504bf0ef38eff95985a734b2dffde
ktor-client/ktor-client-core/common/src/io/ktor/client/features/HttpClientFeature.kt
ktor-client/ktor-client-core/common/src/io/ktor/client/features/HttpClientFeature.kt
/* * Copyright 2014-2019 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license. */ package io.ktor.client.features import io.ktor.client.* import io.ktor.util.* internal val FEATURE_INSTALLED_LIST = AttributeKey<Attributes>("ApplicationFeatureRegistry") /** * Base interface representing a [HttpClient] feature. */ interface HttpClientFeature<out TConfig : Any, TFeature : Any> { /** * The [AttributeKey] for this feature. */ val key: AttributeKey<TFeature> /** * Builds a [TFeature] by calling the [block] with a [TConfig] config instance as receiver. */ fun prepare(block: TConfig.() -> Unit = {}): TFeature /** * Installs the [feature] class for a [HttpClient] defined at [scope]. */ fun install(feature: TFeature, scope: HttpClient) } /** * Try to get a [feature] installed in this client. Returns `null` if the feature was not previously installed. */ fun <B : Any, F : Any> HttpClient.feature(feature: HttpClientFeature<B, F>): F? = attributes.getOrNull(FEATURE_INSTALLED_LIST)?.getOrNull(feature.key)
/* * Copyright 2014-2019 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license. */ package io.ktor.client.features import io.ktor.client.* import io.ktor.util.* import kotlin.native.concurrent.* @SharedImmutable internal val FEATURE_INSTALLED_LIST = AttributeKey<Attributes>("ApplicationFeatureRegistry") /** * Base interface representing a [HttpClient] feature. */ interface HttpClientFeature<out TConfig : Any, TFeature : Any> { /** * The [AttributeKey] for this feature. */ val key: AttributeKey<TFeature> /** * Builds a [TFeature] by calling the [block] with a [TConfig] config instance as receiver. */ fun prepare(block: TConfig.() -> Unit = {}): TFeature /** * Installs the [feature] class for a [HttpClient] defined at [scope]. */ fun install(feature: TFeature, scope: HttpClient) } /** * Try to get a [feature] installed in this client. Returns `null` if the feature was not previously installed. */ fun <B : Any, F : Any> HttpClient.feature(feature: HttpClientFeature<B, F>): F? = attributes.getOrNull(FEATURE_INSTALLED_LIST)?.getOrNull(feature.key)
Make attribute key available on native
Make attribute key available on native
Kotlin
apache-2.0
ktorio/ktor,ktorio/ktor,ktorio/ktor,ktorio/ktor
kotlin
## Code Before: /* * Copyright 2014-2019 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license. */ package io.ktor.client.features import io.ktor.client.* import io.ktor.util.* internal val FEATURE_INSTALLED_LIST = AttributeKey<Attributes>("ApplicationFeatureRegistry") /** * Base interface representing a [HttpClient] feature. */ interface HttpClientFeature<out TConfig : Any, TFeature : Any> { /** * The [AttributeKey] for this feature. */ val key: AttributeKey<TFeature> /** * Builds a [TFeature] by calling the [block] with a [TConfig] config instance as receiver. */ fun prepare(block: TConfig.() -> Unit = {}): TFeature /** * Installs the [feature] class for a [HttpClient] defined at [scope]. */ fun install(feature: TFeature, scope: HttpClient) } /** * Try to get a [feature] installed in this client. Returns `null` if the feature was not previously installed. */ fun <B : Any, F : Any> HttpClient.feature(feature: HttpClientFeature<B, F>): F? = attributes.getOrNull(FEATURE_INSTALLED_LIST)?.getOrNull(feature.key) ## Instruction: Make attribute key available on native ## Code After: /* * Copyright 2014-2019 JetBrains s.r.o and contributors. Use of this source code is governed by the Apache 2.0 license. */ package io.ktor.client.features import io.ktor.client.* import io.ktor.util.* import kotlin.native.concurrent.* @SharedImmutable internal val FEATURE_INSTALLED_LIST = AttributeKey<Attributes>("ApplicationFeatureRegistry") /** * Base interface representing a [HttpClient] feature. */ interface HttpClientFeature<out TConfig : Any, TFeature : Any> { /** * The [AttributeKey] for this feature. */ val key: AttributeKey<TFeature> /** * Builds a [TFeature] by calling the [block] with a [TConfig] config instance as receiver. */ fun prepare(block: TConfig.() -> Unit = {}): TFeature /** * Installs the [feature] class for a [HttpClient] defined at [scope]. */ fun install(feature: TFeature, scope: HttpClient) } /** * Try to get a [feature] installed in this client. Returns `null` if the feature was not previously installed. */ fun <B : Any, F : Any> HttpClient.feature(feature: HttpClientFeature<B, F>): F? = attributes.getOrNull(FEATURE_INSTALLED_LIST)?.getOrNull(feature.key)
# ... existing code ... import io.ktor.client.* import io.ktor.util.* import kotlin.native.concurrent.* @SharedImmutable internal val FEATURE_INSTALLED_LIST = AttributeKey<Attributes>("ApplicationFeatureRegistry") /** # ... rest of the code ...
9f5ad5b12c1602bad56c1fe3ee91cb86c02e4449
src/main/java/com/github/lookout/metrics/agent/ReportAgent.java
src/main/java/com/github/lookout/metrics/agent/ReportAgent.java
package com.github.lookout.metrics.agent; import com.timgroup.statsd.NonBlockingStatsDClient; import com.timgroup.statsd.StatsDClient; import java.io.IOException; import java.lang.instrument.Instrumentation; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.concurrent.TimeUnit; public class ReportAgent { public static void premain(final String agentArgs, final Instrumentation inst) throws IOException { String host; try { host = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { host = "unknown-host"; } for (final String reportingHostPort : agentArgs.split(",")) { final HostPortInterval hostPortInterval = new HostPortInterval(reportingHostPort); final StatsDClient client = new NonBlockingStatsDClient(host, hostPortInterval.getHost(), hostPortInterval.getPort()); final StatsdReporter reporter = new StatsdReporter(hostPortInterval, client); reporter.start(hostPortInterval.getInterval(), TimeUnit.SECONDS); } } public static void main(final String[] args) { try { Thread.sleep(10000); } catch (InterruptedException e) { // impossible } } }
package com.github.lookout.metrics.agent; import com.timgroup.statsd.NonBlockingStatsDClient; import com.timgroup.statsd.StatsDClient; import java.io.IOException; import java.lang.instrument.Instrumentation; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.concurrent.TimeUnit; public class ReportAgent { public static void premain(final String agentArgs, final Instrumentation inst) throws IOException { String host; try { host = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { host = "unknown-host"; } final String[] reportingHostPorts = (agentArgs != null) ? agentArgs.split(",") : new String[]{null}; for (final String reportingHostPort : reportingHostPorts) { final HostPortInterval hostPortInterval = new HostPortInterval(reportingHostPort); final StatsDClient client = new NonBlockingStatsDClient(host, hostPortInterval.getHost(), hostPortInterval.getPort()); final StatsdReporter reporter = new StatsdReporter(hostPortInterval, client); reporter.start(hostPortInterval.getInterval(), TimeUnit.SECONDS); } } public static void main(final String[] args) { try { Thread.sleep(10000); } catch (InterruptedException e) { // impossible } } }
Fix the agent not starting if args are not supplied
Fix the agent not starting if args are not supplied
Java
bsd-2-clause
petere/cassandra-statsd-agent,lookout/cassandra-statsd-agent
java
## Code Before: package com.github.lookout.metrics.agent; import com.timgroup.statsd.NonBlockingStatsDClient; import com.timgroup.statsd.StatsDClient; import java.io.IOException; import java.lang.instrument.Instrumentation; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.concurrent.TimeUnit; public class ReportAgent { public static void premain(final String agentArgs, final Instrumentation inst) throws IOException { String host; try { host = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { host = "unknown-host"; } for (final String reportingHostPort : agentArgs.split(",")) { final HostPortInterval hostPortInterval = new HostPortInterval(reportingHostPort); final StatsDClient client = new NonBlockingStatsDClient(host, hostPortInterval.getHost(), hostPortInterval.getPort()); final StatsdReporter reporter = new StatsdReporter(hostPortInterval, client); reporter.start(hostPortInterval.getInterval(), TimeUnit.SECONDS); } } public static void main(final String[] args) { try { Thread.sleep(10000); } catch (InterruptedException e) { // impossible } } } ## Instruction: Fix the agent not starting if args are not supplied ## Code After: package com.github.lookout.metrics.agent; import com.timgroup.statsd.NonBlockingStatsDClient; import com.timgroup.statsd.StatsDClient; import java.io.IOException; import java.lang.instrument.Instrumentation; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.concurrent.TimeUnit; public class ReportAgent { public static void premain(final String agentArgs, final Instrumentation inst) throws IOException { String host; try { host = InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException e) { host = "unknown-host"; } final String[] reportingHostPorts = (agentArgs != null) ? agentArgs.split(",") : new String[]{null}; for (final String reportingHostPort : reportingHostPorts) { final HostPortInterval hostPortInterval = new HostPortInterval(reportingHostPort); final StatsDClient client = new NonBlockingStatsDClient(host, hostPortInterval.getHost(), hostPortInterval.getPort()); final StatsdReporter reporter = new StatsdReporter(hostPortInterval, client); reporter.start(hostPortInterval.getInterval(), TimeUnit.SECONDS); } } public static void main(final String[] args) { try { Thread.sleep(10000); } catch (InterruptedException e) { // impossible } } }
# ... existing code ... host = "unknown-host"; } final String[] reportingHostPorts = (agentArgs != null) ? agentArgs.split(",") : new String[]{null}; for (final String reportingHostPort : reportingHostPorts) { final HostPortInterval hostPortInterval = new HostPortInterval(reportingHostPort); final StatsDClient client = new NonBlockingStatsDClient(host, hostPortInterval.getHost(), hostPortInterval.getPort()); final StatsdReporter reporter = new StatsdReporter(hostPortInterval, client); # ... rest of the code ...
51757c8a893640e2a9fa3a7b9f8e617b22e6db87
test/test_api.py
test/test_api.py
import unittest import appdirs class Test_AppDir(unittest.TestCase): def test_metadata(self): self.assertTrue(hasattr(appdirs, "__version__")) self.assertTrue(hasattr(appdirs, "__version_info__")) def test_helpers(self): self.assertTrue(isinstance( appdirs.user_data_dir('MyApp', 'MyCompany'), str)) self.assertTrue(isinstance( appdirs.site_data_dir('MyApp', 'MyCompany'), str)) self.assertTrue(isinstance( appdirs.user_cache_dir('MyApp', 'MyCompany'), str)) self.assertTrue(isinstance( appdirs.user_log_dir('MyApp', 'MyCompany'), str)) def test_dirs(self): dirs = appdirs.AppDirs('MyApp', 'MyCompany', version='1.0') self.assertTrue(isinstance(dirs.user_data_dir, str)) self.assertTrue(isinstance(dirs.site_data_dir, str)) self.assertTrue(isinstance(dirs.user_cache_dir, str)) self.assertTrue(isinstance(dirs.user_log_dir, str)) if __name__=="__main__": unittest.main()
import unittest import appdirs class Test_AppDir(unittest.TestCase): def test_metadata(self): self.assertTrue(hasattr(appdirs, "__version__")) self.assertTrue(hasattr(appdirs, "__version_info__")) def test_helpers(self): self.assertIsInstance( appdirs.user_data_dir('MyApp', 'MyCompany'), str) self.assertIsInstance( appdirs.site_data_dir('MyApp', 'MyCompany'), str) self.assertIsInstance( appdirs.user_cache_dir('MyApp', 'MyCompany'), str) self.assertIsInstance( appdirs.user_log_dir('MyApp', 'MyCompany'), str) def test_dirs(self): dirs = appdirs.AppDirs('MyApp', 'MyCompany', version='1.0') self.assertIsInstance(dirs.user_data_dir, str) self.assertIsInstance(dirs.site_data_dir, str) self.assertIsInstance(dirs.user_cache_dir, str) self.assertIsInstance(dirs.user_log_dir, str) if __name__=="__main__": unittest.main()
Use assertIsInstance() instead of assertTrue(isinstance()).
Use assertIsInstance() instead of assertTrue(isinstance()).
Python
mit
platformdirs/platformdirs
python
## Code Before: import unittest import appdirs class Test_AppDir(unittest.TestCase): def test_metadata(self): self.assertTrue(hasattr(appdirs, "__version__")) self.assertTrue(hasattr(appdirs, "__version_info__")) def test_helpers(self): self.assertTrue(isinstance( appdirs.user_data_dir('MyApp', 'MyCompany'), str)) self.assertTrue(isinstance( appdirs.site_data_dir('MyApp', 'MyCompany'), str)) self.assertTrue(isinstance( appdirs.user_cache_dir('MyApp', 'MyCompany'), str)) self.assertTrue(isinstance( appdirs.user_log_dir('MyApp', 'MyCompany'), str)) def test_dirs(self): dirs = appdirs.AppDirs('MyApp', 'MyCompany', version='1.0') self.assertTrue(isinstance(dirs.user_data_dir, str)) self.assertTrue(isinstance(dirs.site_data_dir, str)) self.assertTrue(isinstance(dirs.user_cache_dir, str)) self.assertTrue(isinstance(dirs.user_log_dir, str)) if __name__=="__main__": unittest.main() ## Instruction: Use assertIsInstance() instead of assertTrue(isinstance()). ## Code After: import unittest import appdirs class Test_AppDir(unittest.TestCase): def test_metadata(self): self.assertTrue(hasattr(appdirs, "__version__")) self.assertTrue(hasattr(appdirs, "__version_info__")) def test_helpers(self): self.assertIsInstance( appdirs.user_data_dir('MyApp', 'MyCompany'), str) self.assertIsInstance( appdirs.site_data_dir('MyApp', 'MyCompany'), str) self.assertIsInstance( appdirs.user_cache_dir('MyApp', 'MyCompany'), str) self.assertIsInstance( appdirs.user_log_dir('MyApp', 'MyCompany'), str) def test_dirs(self): dirs = appdirs.AppDirs('MyApp', 'MyCompany', version='1.0') self.assertIsInstance(dirs.user_data_dir, str) self.assertIsInstance(dirs.site_data_dir, str) self.assertIsInstance(dirs.user_cache_dir, str) self.assertIsInstance(dirs.user_log_dir, str) if __name__=="__main__": unittest.main()
# ... existing code ... self.assertTrue(hasattr(appdirs, "__version_info__")) def test_helpers(self): self.assertIsInstance( appdirs.user_data_dir('MyApp', 'MyCompany'), str) self.assertIsInstance( appdirs.site_data_dir('MyApp', 'MyCompany'), str) self.assertIsInstance( appdirs.user_cache_dir('MyApp', 'MyCompany'), str) self.assertIsInstance( appdirs.user_log_dir('MyApp', 'MyCompany'), str) def test_dirs(self): dirs = appdirs.AppDirs('MyApp', 'MyCompany', version='1.0') self.assertIsInstance(dirs.user_data_dir, str) self.assertIsInstance(dirs.site_data_dir, str) self.assertIsInstance(dirs.user_cache_dir, str) self.assertIsInstance(dirs.user_log_dir, str) if __name__=="__main__": unittest.main() # ... rest of the code ...
f292cfa783bcc30c2625b340ad763db2723ce056
test/test_db.py
test/test_db.py
from piper.db import DbCLI import mock class DbCLIBase(object): def setup_method(self, method): self.cli = DbCLI(mock.Mock()) self.ns = mock.Mock() self.config = mock.Mock() class TestDbCLIRun(DbCLIBase): def test_plain_run(self): self.cli.cls.init = mock.Mock() ret = self.cli.run(self.ns, self.config) assert ret == 0 self.cli.cls.init.assert_called_once_with(self.ns, self.config)
from piper.db import DbCLI from piper.db import DatabaseBase import mock import pytest class DbCLIBase(object): def setup_method(self, method): self.cli = DbCLI(mock.Mock()) self.ns = mock.Mock() self.config = mock.Mock() class TestDbCLIRun(DbCLIBase): def test_plain_run(self): self.cli.cls.init = mock.Mock() ret = self.cli.run(self.ns, self.config) assert ret == 0 self.cli.cls.init.assert_called_once_with(self.ns, self.config) class TestDatabaseBaseInit(object): def setup_method(self, method): self.db = DatabaseBase() self.ns = mock.Mock() self.config = mock.Mock() def test_raises_not_implemented_error(self): with pytest.raises(NotImplementedError): self.db.init(self.ns, self.config)
Add tests for DatabaseBase abstraction
Add tests for DatabaseBase abstraction
Python
mit
thiderman/piper
python
## Code Before: from piper.db import DbCLI import mock class DbCLIBase(object): def setup_method(self, method): self.cli = DbCLI(mock.Mock()) self.ns = mock.Mock() self.config = mock.Mock() class TestDbCLIRun(DbCLIBase): def test_plain_run(self): self.cli.cls.init = mock.Mock() ret = self.cli.run(self.ns, self.config) assert ret == 0 self.cli.cls.init.assert_called_once_with(self.ns, self.config) ## Instruction: Add tests for DatabaseBase abstraction ## Code After: from piper.db import DbCLI from piper.db import DatabaseBase import mock import pytest class DbCLIBase(object): def setup_method(self, method): self.cli = DbCLI(mock.Mock()) self.ns = mock.Mock() self.config = mock.Mock() class TestDbCLIRun(DbCLIBase): def test_plain_run(self): self.cli.cls.init = mock.Mock() ret = self.cli.run(self.ns, self.config) assert ret == 0 self.cli.cls.init.assert_called_once_with(self.ns, self.config) class TestDatabaseBaseInit(object): def setup_method(self, method): self.db = DatabaseBase() self.ns = mock.Mock() self.config = mock.Mock() def test_raises_not_implemented_error(self): with pytest.raises(NotImplementedError): self.db.init(self.ns, self.config)
// ... existing code ... from piper.db import DbCLI from piper.db import DatabaseBase import mock import pytest class DbCLIBase(object): // ... modified code ... assert ret == 0 self.cli.cls.init.assert_called_once_with(self.ns, self.config) class TestDatabaseBaseInit(object): def setup_method(self, method): self.db = DatabaseBase() self.ns = mock.Mock() self.config = mock.Mock() def test_raises_not_implemented_error(self): with pytest.raises(NotImplementedError): self.db.init(self.ns, self.config) // ... rest of the code ...
eb2cc64f5620d51bb571a93e2ba5bb5252215cc7
src/main/kotlin/com/ridi/books/helper/text/TimeHelper.kt
src/main/kotlin/com/ridi/books/helper/text/TimeHelper.kt
package com.ridi.books.helper.text import java.util.Calendar fun Long.elapsedTimeString(): String { var elapsed = System.currentTimeMillis() - this val second = 1000 val minute = second * 60 val hour = minute * 60 val day = (hour * 24).toLong() val week = day * 7 val suffix: String if (elapsed / week > 3) { val cal = Calendar.getInstance() cal.timeInMillis = this return "${cal.get(Calendar.YEAR)}.${cal.get(Calendar.MONDAY) + 1}.${cal.get(Calendar.DAY_OF_MONTH)}" } else if (elapsed / week > 0) { suffix = "주 전" elapsed /= week } else if (elapsed / day > 0) { suffix = "일 전" elapsed /= day } else if (elapsed / hour > 0) { suffix = "시간 전" elapsed /= hour } else if (elapsed / minute > 0) { suffix = "분 전" elapsed /= minute } else if (elapsed / second > 10) { suffix = "초 전" elapsed /= second } else { return "방금 전" } return "$elapsed$suffix" }
package com.ridi.books.helper.text fun Long.elapsedTimeString(tooMuchElapsed: String): String { var elapsed = System.currentTimeMillis() - this val second = 1000 val minute = second * 60 val hour = minute * 60 val day = (hour * 24).toLong() val week = day * 7 val suffix: String if (elapsed / week > 3) { return tooMuchElapsed } else if (elapsed / week > 0) { suffix = "주 전" elapsed /= week } else if (elapsed / day > 0) { suffix = "일 전" elapsed /= day } else if (elapsed / hour > 0) { suffix = "시간 전" elapsed /= hour } else if (elapsed / minute > 0) { suffix = "분 전" elapsed /= minute } else if (elapsed / second > 10) { suffix = "초 전" elapsed /= second } else { return "방금 전" } return "$elapsed$suffix" }
Update elapsedTimeString - add ‘tooMuchElasped’ parameter that returned if more than 3 weeks have elapsed
Update elapsedTimeString - add ‘tooMuchElasped’ parameter that returned if more than 3 weeks have elapsed
Kotlin
mit
ridibooks/rbhelper-android,ridibooks/rbhelper-android
kotlin
## Code Before: package com.ridi.books.helper.text import java.util.Calendar fun Long.elapsedTimeString(): String { var elapsed = System.currentTimeMillis() - this val second = 1000 val minute = second * 60 val hour = minute * 60 val day = (hour * 24).toLong() val week = day * 7 val suffix: String if (elapsed / week > 3) { val cal = Calendar.getInstance() cal.timeInMillis = this return "${cal.get(Calendar.YEAR)}.${cal.get(Calendar.MONDAY) + 1}.${cal.get(Calendar.DAY_OF_MONTH)}" } else if (elapsed / week > 0) { suffix = "주 전" elapsed /= week } else if (elapsed / day > 0) { suffix = "일 전" elapsed /= day } else if (elapsed / hour > 0) { suffix = "시간 전" elapsed /= hour } else if (elapsed / minute > 0) { suffix = "분 전" elapsed /= minute } else if (elapsed / second > 10) { suffix = "초 전" elapsed /= second } else { return "방금 전" } return "$elapsed$suffix" } ## Instruction: Update elapsedTimeString - add ‘tooMuchElasped’ parameter that returned if more than 3 weeks have elapsed ## Code After: package com.ridi.books.helper.text fun Long.elapsedTimeString(tooMuchElapsed: String): String { var elapsed = System.currentTimeMillis() - this val second = 1000 val minute = second * 60 val hour = minute * 60 val day = (hour * 24).toLong() val week = day * 7 val suffix: String if (elapsed / week > 3) { return tooMuchElapsed } else if (elapsed / week > 0) { suffix = "주 전" elapsed /= week } else if (elapsed / day > 0) { suffix = "일 전" elapsed /= day } else if (elapsed / hour > 0) { suffix = "시간 전" elapsed /= hour } else if (elapsed / minute > 0) { suffix = "분 전" elapsed /= minute } else if (elapsed / second > 10) { suffix = "초 전" elapsed /= second } else { return "방금 전" } return "$elapsed$suffix" }
... package com.ridi.books.helper.text fun Long.elapsedTimeString(tooMuchElapsed: String): String { var elapsed = System.currentTimeMillis() - this val second = 1000 val minute = second * 60 ... val suffix: String if (elapsed / week > 3) { return tooMuchElapsed } else if (elapsed / week > 0) { suffix = "주 전" elapsed /= week ...
aed18a3f9cbaf1eae1d7066b438437446513d912
sphinxcontrib/traceables/__init__.py
sphinxcontrib/traceables/__init__.py
import infrastructure import display import traceables import matrix import graph # ========================================================================== # Setup and register extension def setup(app): # Perform import within this function to avoid an import circle. from sphinxcontrib import traceables # Allow extension parts to set themselves up. traceables.infrastructure.setup(app) traceables.traceables.setup(app) traceables.matrix.setup(app) traceables.graph.setup(app) # Register business logic of extension parts. This is done explicitly # here to ensure correct ordering during processing. traceables.infrastructure.ProcessorManager.register_processor_classes([ traceables.traceables.RelationshipsProcessor, traceables.display.TraceableDisplayProcessor, traceables.traceables.XrefProcessor, traceables.matrix.ListProcessor, traceables.matrix.MatrixProcessor, traceables.graph.GraphProcessor, ]) return {"version": "0.0"}
import infrastructure import display import traceables import matrix import graph # ========================================================================== # Setup and register extension def setup(app): # Perform import within this function to avoid an import circle. from sphinxcontrib import traceables # Allow extension parts to set themselves up. traceables.infrastructure.setup(app) traceables.display.setup(app) traceables.traceables.setup(app) traceables.matrix.setup(app) traceables.graph.setup(app) # Register business logic of extension parts. This is done explicitly # here to ensure correct ordering during processing. traceables.infrastructure.ProcessorManager.register_processor_classes([ traceables.traceables.RelationshipsProcessor, traceables.display.TraceableDisplayProcessor, traceables.traceables.XrefProcessor, traceables.matrix.ListProcessor, traceables.matrix.MatrixProcessor, traceables.graph.GraphProcessor, ]) return {"version": "0.0"}
Fix missing call to display.setup()
Fix missing call to display.setup()
Python
apache-2.0
t4ngo/sphinxcontrib-traceables
python
## Code Before: import infrastructure import display import traceables import matrix import graph # ========================================================================== # Setup and register extension def setup(app): # Perform import within this function to avoid an import circle. from sphinxcontrib import traceables # Allow extension parts to set themselves up. traceables.infrastructure.setup(app) traceables.traceables.setup(app) traceables.matrix.setup(app) traceables.graph.setup(app) # Register business logic of extension parts. This is done explicitly # here to ensure correct ordering during processing. traceables.infrastructure.ProcessorManager.register_processor_classes([ traceables.traceables.RelationshipsProcessor, traceables.display.TraceableDisplayProcessor, traceables.traceables.XrefProcessor, traceables.matrix.ListProcessor, traceables.matrix.MatrixProcessor, traceables.graph.GraphProcessor, ]) return {"version": "0.0"} ## Instruction: Fix missing call to display.setup() ## Code After: import infrastructure import display import traceables import matrix import graph # ========================================================================== # Setup and register extension def setup(app): # Perform import within this function to avoid an import circle. from sphinxcontrib import traceables # Allow extension parts to set themselves up. traceables.infrastructure.setup(app) traceables.display.setup(app) traceables.traceables.setup(app) traceables.matrix.setup(app) traceables.graph.setup(app) # Register business logic of extension parts. This is done explicitly # here to ensure correct ordering during processing. traceables.infrastructure.ProcessorManager.register_processor_classes([ traceables.traceables.RelationshipsProcessor, traceables.display.TraceableDisplayProcessor, traceables.traceables.XrefProcessor, traceables.matrix.ListProcessor, traceables.matrix.MatrixProcessor, traceables.graph.GraphProcessor, ]) return {"version": "0.0"}
// ... existing code ... # Allow extension parts to set themselves up. traceables.infrastructure.setup(app) traceables.display.setup(app) traceables.traceables.setup(app) traceables.matrix.setup(app) traceables.graph.setup(app) // ... rest of the code ...
8befea283830f76dfa41cfd10d7eb916c68f7ef9
intern/views.py
intern/views.py
from django.contrib.auth.decorators import login_required from django.shortcuts import render from filer.models import File from filer.models import Folder @login_required def documents(request): files = File.objects.all() folders = Folder.objects.all() #print(files[0]) return render(request, 'intern/documents.html', {'files': files, 'folders': folders})
from django.contrib.auth.decorators import login_required from django.shortcuts import render from filer.models import File from filer.models import Folder @login_required def documents(request): files = File.objects.all().order_by("-modified_at") folders = Folder.objects.all() #print(files[0]) return render(request, 'intern/documents.html', {'files': files, 'folders': folders})
Sort files by last modification
Sort files by last modification
Python
mit
n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb,n2o/dpb
python
## Code Before: from django.contrib.auth.decorators import login_required from django.shortcuts import render from filer.models import File from filer.models import Folder @login_required def documents(request): files = File.objects.all() folders = Folder.objects.all() #print(files[0]) return render(request, 'intern/documents.html', {'files': files, 'folders': folders}) ## Instruction: Sort files by last modification ## Code After: from django.contrib.auth.decorators import login_required from django.shortcuts import render from filer.models import File from filer.models import Folder @login_required def documents(request): files = File.objects.all().order_by("-modified_at") folders = Folder.objects.all() #print(files[0]) return render(request, 'intern/documents.html', {'files': files, 'folders': folders})
... @login_required def documents(request): files = File.objects.all().order_by("-modified_at") folders = Folder.objects.all() #print(files[0]) return render(request, 'intern/documents.html', {'files': files, 'folders': folders}) ...
7b1773d5c3fa07899ad9d56d4ac488c1c2e2014e
dope_cherry.py
dope_cherry.py
from dope import app import cherrypy # graft to tree root cherrypy.tree.graft(app) # configure cherrypy.config.update({ 'server.socket_port': 80, 'server.socket_host': '0.0.0.0', 'run_as_user': 'nobody', 'run_as_group': 'nogroup', }) cherrypy.config.update('dope_cherry.cfg') # drop priviledges cherrypy.process.plugins.DropPrivileges(cherrypy.engine, uid = cherrypy.config['run_as_user'], gid = cherrypy.config['run_as_group']).subscribe()
from dope import app import cherrypy # graft to tree root cherrypy.tree.graft(app) # configure cherrypy.config.update({ 'server.socket_port': 80, 'server.socket_host': '0.0.0.0', 'server.max_request_body_size': 0, # unlimited 'run_as_user': 'nobody', 'run_as_group': 'nogroup', }) cherrypy.config.update('dope_cherry.cfg') # drop priviledges cherrypy.process.plugins.DropPrivileges(cherrypy.engine, uid = cherrypy.config['run_as_user'], gid = cherrypy.config['run_as_group']).subscribe()
Set server.max_request_body_size in cherrypy settings to allow more then 100M uploads.
Set server.max_request_body_size in cherrypy settings to allow more then 100M uploads.
Python
mit
mbr/dope,mbr/dope
python
## Code Before: from dope import app import cherrypy # graft to tree root cherrypy.tree.graft(app) # configure cherrypy.config.update({ 'server.socket_port': 80, 'server.socket_host': '0.0.0.0', 'run_as_user': 'nobody', 'run_as_group': 'nogroup', }) cherrypy.config.update('dope_cherry.cfg') # drop priviledges cherrypy.process.plugins.DropPrivileges(cherrypy.engine, uid = cherrypy.config['run_as_user'], gid = cherrypy.config['run_as_group']).subscribe() ## Instruction: Set server.max_request_body_size in cherrypy settings to allow more then 100M uploads. ## Code After: from dope import app import cherrypy # graft to tree root cherrypy.tree.graft(app) # configure cherrypy.config.update({ 'server.socket_port': 80, 'server.socket_host': '0.0.0.0', 'server.max_request_body_size': 0, # unlimited 'run_as_user': 'nobody', 'run_as_group': 'nogroup', }) cherrypy.config.update('dope_cherry.cfg') # drop priviledges cherrypy.process.plugins.DropPrivileges(cherrypy.engine, uid = cherrypy.config['run_as_user'], gid = cherrypy.config['run_as_group']).subscribe()
// ... existing code ... cherrypy.config.update({ 'server.socket_port': 80, 'server.socket_host': '0.0.0.0', 'server.max_request_body_size': 0, # unlimited 'run_as_user': 'nobody', 'run_as_group': 'nogroup', }) // ... rest of the code ...
185dcb9db26bd3dc5f76faebb4d56c7abb87f87f
test/parseJaguar.py
test/parseJaguar.py
import os from cclib.parser import Jaguar os.chdir(os.path.join("..","data","Jaguar","basicJaguar")) os.chdir("eg01") for file in ["dvb_gopt.out"]: t = Jaguar(file) t.parse() print t.moenergies[0,:] print t.homos[0] print t.moenergies[0,t.homos[0]]
import os from cclib.parser import Jaguar os.chdir(os.path.join("..","data","Jaguar","basicJaguar")) files = [ ["eg01","dvb_gopt.out"], ["eg02","dvb_sp.out"], ["eg03","dvb_ir.out"], ["eg06","dvb_un_sp.out"] ] for f in files: t = Jaguar(os.path.join(f[0],f[1])) t.parse() if f[0]!="eg03": print t.scfvalues
Test the parsing of all of the uploaded Jaguar files
Test the parsing of all of the uploaded Jaguar files git-svn-id: d468cea6ffe92bc1eb1f3bde47ad7e70b065426a@75 5acbf244-8a03-4a8b-a19b-0d601add4d27
Python
lgpl-2.1
Clyde-fare/cclib_bak,Clyde-fare/cclib_bak
python
## Code Before: import os from cclib.parser import Jaguar os.chdir(os.path.join("..","data","Jaguar","basicJaguar")) os.chdir("eg01") for file in ["dvb_gopt.out"]: t = Jaguar(file) t.parse() print t.moenergies[0,:] print t.homos[0] print t.moenergies[0,t.homos[0]] ## Instruction: Test the parsing of all of the uploaded Jaguar files git-svn-id: d468cea6ffe92bc1eb1f3bde47ad7e70b065426a@75 5acbf244-8a03-4a8b-a19b-0d601add4d27 ## Code After: import os from cclib.parser import Jaguar os.chdir(os.path.join("..","data","Jaguar","basicJaguar")) files = [ ["eg01","dvb_gopt.out"], ["eg02","dvb_sp.out"], ["eg03","dvb_ir.out"], ["eg06","dvb_un_sp.out"] ] for f in files: t = Jaguar(os.path.join(f[0],f[1])) t.parse() if f[0]!="eg03": print t.scfvalues
... os.chdir(os.path.join("..","data","Jaguar","basicJaguar")) files = [ ["eg01","dvb_gopt.out"], ["eg02","dvb_sp.out"], ["eg03","dvb_ir.out"], ["eg06","dvb_un_sp.out"] ] for f in files: t = Jaguar(os.path.join(f[0],f[1])) t.parse() if f[0]!="eg03": print t.scfvalues ...
6a508d01fa3fa0d4084406fcb2b5e41d1b614b7c
datalogger/__main__.py
datalogger/__main__.py
import sys from PyQt5.QtWidgets import QApplication from datalogger.api.workspace import Workspace from datalogger.analysis_window import AnalysisWindow from datalogger import __version__ def run_datalogger_full(): print("CUED DataLogger {}".format(__version__)) app = 0 app = QApplication(sys.argv) # Create the window w = AnalysisWindow() w.CurrentWorkspace = Workspace() #w.CurrentWorkspace.path = "//cued-fs/users/general/tab53/ts-home/Documents/urop/Logger 2017/cued_datalogger/" # Load the workspace #CurrentWorkspace.load("//cued-fs/users/general/tab53/ts-home/Documents/urop/Logger 2017/cued_datalogger/tests/test_workspace.wsp") w.addon_widget.discover_addons(w.CurrentWorkspace.path + "addons/") # Run the program w.show() sys.exit(app.exec_()) if __name__ == '__main__': run_datalogger_full()
import sys from PyQt5.QtWidgets import QApplication from datalogger.api.workspace import Workspace from datalogger.analysis_window import AnalysisWindow from datalogger import __version__ def run_datalogger_full(): print("CUED DataLogger {}".format(__version__)) app = 0 app = QApplication(sys.argv) CurrentWorkspace = Workspace() # Create the window w = AnalysisWindow() w.CurrentWorkspace = CurrentWorkspace w.addon_widget.discover_addons(w.CurrentWorkspace.path + "addons/") # Run the program w.show() sys.exit(app.exec_()) if __name__ == '__main__': run_datalogger_full()
Move workspace before window creation so config set for window
Move workspace before window creation so config set for window
Python
bsd-3-clause
torebutlin/cued_datalogger
python
## Code Before: import sys from PyQt5.QtWidgets import QApplication from datalogger.api.workspace import Workspace from datalogger.analysis_window import AnalysisWindow from datalogger import __version__ def run_datalogger_full(): print("CUED DataLogger {}".format(__version__)) app = 0 app = QApplication(sys.argv) # Create the window w = AnalysisWindow() w.CurrentWorkspace = Workspace() #w.CurrentWorkspace.path = "//cued-fs/users/general/tab53/ts-home/Documents/urop/Logger 2017/cued_datalogger/" # Load the workspace #CurrentWorkspace.load("//cued-fs/users/general/tab53/ts-home/Documents/urop/Logger 2017/cued_datalogger/tests/test_workspace.wsp") w.addon_widget.discover_addons(w.CurrentWorkspace.path + "addons/") # Run the program w.show() sys.exit(app.exec_()) if __name__ == '__main__': run_datalogger_full() ## Instruction: Move workspace before window creation so config set for window ## Code After: import sys from PyQt5.QtWidgets import QApplication from datalogger.api.workspace import Workspace from datalogger.analysis_window import AnalysisWindow from datalogger import __version__ def run_datalogger_full(): print("CUED DataLogger {}".format(__version__)) app = 0 app = QApplication(sys.argv) CurrentWorkspace = Workspace() # Create the window w = AnalysisWindow() w.CurrentWorkspace = CurrentWorkspace w.addon_widget.discover_addons(w.CurrentWorkspace.path + "addons/") # Run the program w.show() sys.exit(app.exec_()) if __name__ == '__main__': run_datalogger_full()
# ... existing code ... app = 0 app = QApplication(sys.argv) CurrentWorkspace = Workspace() # Create the window w = AnalysisWindow() w.CurrentWorkspace = CurrentWorkspace w.addon_widget.discover_addons(w.CurrentWorkspace.path + "addons/") # Run the program # ... rest of the code ...
44ac6ece920bb1602a053b31b78326a3f30be151
setup.py
setup.py
from setuptools import setup from guano import __version__ setup( name='guano', version=__version__, description='GUANO, the "Grand Unified" bat acoustics metadata format', long_description=open('README.md').read(), url='https://github.com/riggsd/guano-py', license='MIT', author='David A. Riggs', author_email='[email protected]', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', ], keywords='bats acoustics metadata', py_modules=['guano'], scripts=['bin/sb2guano.py'], )
from setuptools import setup from glob import glob from guano import __version__ setup( name='guano', version=__version__, description='GUANO, the "Grand Unified" bat acoustics metadata format', long_description=open('README.md').read(), url='https://github.com/riggsd/guano-py', license='MIT', author='David A. Riggs', author_email='[email protected]', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', ], keywords='bats acoustics metadata guano', py_modules=['guano'], scripts=glob('bin/*.py'), )
Declare to the PyPI that we support Python 3
Declare to the PyPI that we support Python 3
Python
mit
riggsd/guano-py
python
## Code Before: from setuptools import setup from guano import __version__ setup( name='guano', version=__version__, description='GUANO, the "Grand Unified" bat acoustics metadata format', long_description=open('README.md').read(), url='https://github.com/riggsd/guano-py', license='MIT', author='David A. Riggs', author_email='[email protected]', classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', ], keywords='bats acoustics metadata', py_modules=['guano'], scripts=['bin/sb2guano.py'], ) ## Instruction: Declare to the PyPI that we support Python 3 ## Code After: from setuptools import setup from glob import glob from guano import __version__ setup( name='guano', version=__version__, description='GUANO, the "Grand Unified" bat acoustics metadata format', long_description=open('README.md').read(), url='https://github.com/riggsd/guano-py', license='MIT', author='David A. Riggs', author_email='[email protected]', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', ], keywords='bats acoustics metadata guano', py_modules=['guano'], scripts=glob('bin/*.py'), )
... from setuptools import setup from glob import glob from guano import __version__ ... author='David A. Riggs', author_email='[email protected]', classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', ], keywords='bats acoustics metadata guano', py_modules=['guano'], scripts=glob('bin/*.py'), ) ...
96b592b00b9a353127ef9ee4c676d873e77edbfd
buildSrc/src/main/kotlin/Versions.kt
buildSrc/src/main/kotlin/Versions.kt
object Versions { const val LORITTA = "2021-SNAPSHOT" const val PUDDING = "0.0.2-20220412.131908-317" const val KOTLIN = "1.6.20-M1" const val KTOR = "1.6.7" const val JDA = "4.3.0_324" const val KOTLIN_SERIALIZATION = "1.3.2" const val KOTLIN_COROUTINES = "1.6.0" const val EXPOSED = "0.37.3" const val KOTLIN_LOGGING = "2.1.16" }
object Versions { const val LORITTA = "2021-SNAPSHOT" const val PUDDING = "0.0.2-last-version-using-kt-1621" const val KOTLIN = "1.6.20-M1" const val KTOR = "1.6.7" const val JDA = "4.3.0_324" const val KOTLIN_SERIALIZATION = "1.3.2" const val KOTLIN_COROUTINES = "1.6.0" const val EXPOSED = "0.37.3" const val KOTLIN_LOGGING = "2.1.16" }
Change Pudding version to the last version before we updated it to Kotlin 1.7.0
Change Pudding version to the last version before we updated it to Kotlin 1.7.0
Kotlin
agpl-3.0
LorittaBot/Loritta,LorittaBot/Loritta,LorittaBot/Loritta,LorittaBot/Loritta
kotlin
## Code Before: object Versions { const val LORITTA = "2021-SNAPSHOT" const val PUDDING = "0.0.2-20220412.131908-317" const val KOTLIN = "1.6.20-M1" const val KTOR = "1.6.7" const val JDA = "4.3.0_324" const val KOTLIN_SERIALIZATION = "1.3.2" const val KOTLIN_COROUTINES = "1.6.0" const val EXPOSED = "0.37.3" const val KOTLIN_LOGGING = "2.1.16" } ## Instruction: Change Pudding version to the last version before we updated it to Kotlin 1.7.0 ## Code After: object Versions { const val LORITTA = "2021-SNAPSHOT" const val PUDDING = "0.0.2-last-version-using-kt-1621" const val KOTLIN = "1.6.20-M1" const val KTOR = "1.6.7" const val JDA = "4.3.0_324" const val KOTLIN_SERIALIZATION = "1.3.2" const val KOTLIN_COROUTINES = "1.6.0" const val EXPOSED = "0.37.3" const val KOTLIN_LOGGING = "2.1.16" }
// ... existing code ... object Versions { const val LORITTA = "2021-SNAPSHOT" const val PUDDING = "0.0.2-last-version-using-kt-1621" const val KOTLIN = "1.6.20-M1" const val KTOR = "1.6.7" const val JDA = "4.3.0_324" // ... rest of the code ...
cf8b6685f8fcadb3f50999ee587c7316741808f9
kotlin/src/main/kotlin/2018/Lib05.kt
kotlin/src/main/kotlin/2018/Lib05.kt
package aoc.kt.y2018; /** * Day 5. */ /** Part 1 */ fun processPolymer1(input: String): String { val output = input.toCharArray() .forEachIndexed { i, c -> if (i != 0) { var reacting = true var range = 0..0 while (reacting) { var offset = 0 if (reactionOccurs(c, input.get(-1))) { } else { reacting = false } } } } return output.toString() } /** Part 2 */ fun processPolymer2(input: String): String { return "42" } fun reactionOccurs(char: Char, prev: Char): Boolean { return false }
package aoc.kt.y2018; /** * Day 5. */ /** Part 1 */ fun processPolymer1(input: String): String { var polymer = Pair(input, true) while (polymer.second) { polymer = react(polymer.first) } return polymer //.first.length .toString() } /** Part 2 */ fun processPolymer2(input: String): String { return "42" } fun react(input: String): Pair<String, Boolean> { var result = mutableListOf<Char>() var polymer = input.toMutableList() var reactionOccured = false while (polymer.next() != null) { polymer.dequeue()?.let { a -> if (polymer.next() != null) { polymer.dequeue()?.let { b -> if (a.equals(b, true)) { reactionOccured = true } else { result.push(a) polymer.enqueue(b) } } } } } val resultStr: String = result.map { it.toString() }.reduce { acc, n -> acc + n } return Pair(resultStr, reactionOccured) } fun <T> MutableList<T>.push(e: T) { this.add(e) } fun <T> MutableList<T>.dequeue(): T? { if (this.isNotEmpty()) { return this.removeAt(0) } else { return null } } fun <T> MutableList<T>.enqueue(e: T) { this.add(0, e) } fun <T> MutableList<T>.next(): T? { return this.getOrNull(0) }
Update with new approach to day 5
Update with new approach to day 5
Kotlin
mit
nathanjent/adventofcode-rust
kotlin
## Code Before: package aoc.kt.y2018; /** * Day 5. */ /** Part 1 */ fun processPolymer1(input: String): String { val output = input.toCharArray() .forEachIndexed { i, c -> if (i != 0) { var reacting = true var range = 0..0 while (reacting) { var offset = 0 if (reactionOccurs(c, input.get(-1))) { } else { reacting = false } } } } return output.toString() } /** Part 2 */ fun processPolymer2(input: String): String { return "42" } fun reactionOccurs(char: Char, prev: Char): Boolean { return false } ## Instruction: Update with new approach to day 5 ## Code After: package aoc.kt.y2018; /** * Day 5. */ /** Part 1 */ fun processPolymer1(input: String): String { var polymer = Pair(input, true) while (polymer.second) { polymer = react(polymer.first) } return polymer //.first.length .toString() } /** Part 2 */ fun processPolymer2(input: String): String { return "42" } fun react(input: String): Pair<String, Boolean> { var result = mutableListOf<Char>() var polymer = input.toMutableList() var reactionOccured = false while (polymer.next() != null) { polymer.dequeue()?.let { a -> if (polymer.next() != null) { polymer.dequeue()?.let { b -> if (a.equals(b, true)) { reactionOccured = true } else { result.push(a) polymer.enqueue(b) } } } } } val resultStr: String = result.map { it.toString() }.reduce { acc, n -> acc + n } return Pair(resultStr, reactionOccured) } fun <T> MutableList<T>.push(e: T) { this.add(e) } fun <T> MutableList<T>.dequeue(): T? { if (this.isNotEmpty()) { return this.removeAt(0) } else { return null } } fun <T> MutableList<T>.enqueue(e: T) { this.add(0, e) } fun <T> MutableList<T>.next(): T? { return this.getOrNull(0) }
// ... existing code ... /** Part 1 */ fun processPolymer1(input: String): String { var polymer = Pair(input, true) while (polymer.second) { polymer = react(polymer.first) } return polymer //.first.length .toString() } /** Part 2 */ // ... modified code ... return "42" } fun react(input: String): Pair<String, Boolean> { var result = mutableListOf<Char>() var polymer = input.toMutableList() var reactionOccured = false while (polymer.next() != null) { polymer.dequeue()?.let { a -> if (polymer.next() != null) { polymer.dequeue()?.let { b -> if (a.equals(b, true)) { reactionOccured = true } else { result.push(a) polymer.enqueue(b) } } } } } val resultStr: String = result.map { it.toString() }.reduce { acc, n -> acc + n } return Pair(resultStr, reactionOccured) } fun <T> MutableList<T>.push(e: T) { this.add(e) } fun <T> MutableList<T>.dequeue(): T? { if (this.isNotEmpty()) { return this.removeAt(0) } else { return null } } fun <T> MutableList<T>.enqueue(e: T) { this.add(0, e) } fun <T> MutableList<T>.next(): T? { return this.getOrNull(0) } // ... rest of the code ...
1f9bc1b6f9a796458d104c01b9a344cbb0c84a9b
Lib/fontParts/fontshell/groups.py
Lib/fontParts/fontshell/groups.py
import defcon from fontParts.base import BaseGroups from fontParts.fontshell.base import RBaseObject class RGroups(RBaseObject, BaseGroups): wrapClass = defcon.Groups def _items(self): return self.naked().items() def _contains(self, key): return key in self.naked() def _setItem(self, key, value): self.naked()[key] = value def _getItem(self, key): return self.naked()[key] def _delItem(self, key): del self.naked()[key]
import defcon from fontParts.base import BaseGroups from fontParts.fontshell.base import RBaseObject class RGroups(RBaseObject, BaseGroups): wrapClass = defcon.Groups def _get_base_side1KerningGroups(self): return self.naked().getRepresentation("defcon.groups.kerningSide1Groups") def _get_base_side2KerningGroups(self): return self.naked().getRepresentation("defcon.groups.kerningSide2Groups") def _items(self): return self.naked().items() def _contains(self, key): return key in self.naked() def _setItem(self, key, value): self.naked()[key] = value def _getItem(self, key): return self.naked()[key] def _delItem(self, key): del self.naked()[key]
Add defcon implementation of group lookup methods.
Add defcon implementation of group lookup methods.
Python
mit
robofab-developers/fontParts,robofab-developers/fontParts
python
## Code Before: import defcon from fontParts.base import BaseGroups from fontParts.fontshell.base import RBaseObject class RGroups(RBaseObject, BaseGroups): wrapClass = defcon.Groups def _items(self): return self.naked().items() def _contains(self, key): return key in self.naked() def _setItem(self, key, value): self.naked()[key] = value def _getItem(self, key): return self.naked()[key] def _delItem(self, key): del self.naked()[key] ## Instruction: Add defcon implementation of group lookup methods. ## Code After: import defcon from fontParts.base import BaseGroups from fontParts.fontshell.base import RBaseObject class RGroups(RBaseObject, BaseGroups): wrapClass = defcon.Groups def _get_base_side1KerningGroups(self): return self.naked().getRepresentation("defcon.groups.kerningSide1Groups") def _get_base_side2KerningGroups(self): return self.naked().getRepresentation("defcon.groups.kerningSide2Groups") def _items(self): return self.naked().items() def _contains(self, key): return key in self.naked() def _setItem(self, key, value): self.naked()[key] = value def _getItem(self, key): return self.naked()[key] def _delItem(self, key): del self.naked()[key]
# ... existing code ... class RGroups(RBaseObject, BaseGroups): wrapClass = defcon.Groups def _get_base_side1KerningGroups(self): return self.naked().getRepresentation("defcon.groups.kerningSide1Groups") def _get_base_side2KerningGroups(self): return self.naked().getRepresentation("defcon.groups.kerningSide2Groups") def _items(self): return self.naked().items() # ... rest of the code ...
3c0d52aa0a936b3ae138ddfba66e7ba9dcc5f934
sympy/plotting/proxy_pyglet.py
sympy/plotting/proxy_pyglet.py
from warnings import warn from sympy.core.compatibility import SymPyDeprecationWarning from pygletplot import PygletPlot def Plot(*args, **kwargs): """ A temporary proxy for an interface under deprecation. This proxy is the one imported by `from sympy import *`. The Plot class will change in future versions of sympy to use the new plotting module. That new plotting module is already used by the plot() function (lowercase). To write code compatible with future versions of sympy use that function (plot() lowercase). Or if you want to use the old plotting module just import it directly: `from sympy.plotting.pygletplot import PygletPlot` To use Plot from the new plotting module do: `from sympy.plotting.plot import Plot` In future version of sympy you will also be able to use `from sympy.plotting import Plot` but in the current version this will import this proxy object. It's done for backward compatibility. The old plotting module is not deprecated. Only the location will change. The new location is sympy.plotting.pygletplot. """ warn('This interface will change in future versions of sympy.' ' As a precatuion use the plot() function (lowercase).' ' See the docstring for details.', SymPyDeprecationWarning) return PygletPlot(*args, **kwargs)
from warnings import warn from sympy.utilities.exceptions import SymPyDeprecationWarning from pygletplot import PygletPlot def Plot(*args, **kwargs): """ A temporary proxy for an interface under deprecation. This proxy is the one imported by `from sympy import *`. The Plot class will change in future versions of sympy to use the new plotting module. That new plotting module is already used by the plot() function (lowercase). To write code compatible with future versions of sympy use that function (plot() lowercase). Or if you want to use the old plotting module just import it directly: `from sympy.plotting.pygletplot import PygletPlot` To use Plot from the new plotting module do: `from sympy.plotting.plot import Plot` In future version of sympy you will also be able to use `from sympy.plotting import Plot` but in the current version this will import this proxy object. It's done for backward compatibility. The old plotting module is not deprecated. Only the location will change. The new location is sympy.plotting.pygletplot. """ warn('This interface will change in future versions of sympy.' ' As a precatuion use the plot() function (lowercase).' ' See the docstring for details.', SymPyDeprecationWarning) return PygletPlot(*args, **kwargs)
Change the import location of DeprecationWarning used by plotting module
Change the import location of DeprecationWarning used by plotting module The SympyDeprecationWarning was moved from its original location. The change was done in the master branch. The same change must be mirrored in this development branch.
Python
bsd-3-clause
pbrady/sympy,grevutiu-gabriel/sympy,rahuldan/sympy,atsao72/sympy,kmacinnis/sympy,yashsharan/sympy,drufat/sympy,iamutkarshtiwari/sympy,shikil/sympy,atsao72/sympy,jaimahajan1997/sympy,meghana1995/sympy,jerli/sympy,oliverlee/sympy,ahhda/sympy,garvitr/sympy,sahmed95/sympy,abloomston/sympy,kaushik94/sympy,jbbskinny/sympy,cswiercz/sympy,postvakje/sympy,maniteja123/sympy,asm666/sympy,dqnykamp/sympy,jamesblunt/sympy,Curious72/sympy,diofant/diofant,Titan-C/sympy,kaushik94/sympy,saurabhjn76/sympy,amitjamadagni/sympy,atreyv/sympy,kevalds51/sympy,abloomston/sympy,saurabhjn76/sympy,sampadsaha5/sympy,abhiii5459/sympy,chaffra/sympy,MechCoder/sympy,MridulS/sympy,hargup/sympy,aktech/sympy,abhiii5459/sympy,vipulroxx/sympy,amitjamadagni/sympy,pandeyadarsh/sympy,Vishluck/sympy,sunny94/temp,emon10005/sympy,kaichogami/sympy,MridulS/sympy,AunShiLord/sympy,pbrady/sympy,Curious72/sympy,kevalds51/sympy,wyom/sympy,Davidjohnwilson/sympy,wanglongqi/sympy,jamesblunt/sympy,liangjiaxing/sympy,yashsharan/sympy,MechCoder/sympy,moble/sympy,atreyv/sympy,meghana1995/sympy,Arafatk/sympy,lindsayad/sympy,pbrady/sympy,aktech/sympy,mafiya69/sympy,jbbskinny/sympy,asm666/sympy,ga7g08/sympy,ga7g08/sympy,cswiercz/sympy,kevalds51/sympy,sahmed95/sympy,Sumith1896/sympy,souravsingh/sympy,skirpichev/omg,drufat/sympy,sunny94/temp,jerli/sympy,toolforger/sympy,mafiya69/sympy,cccfran/sympy,MridulS/sympy,chaffra/sympy,yukoba/sympy,AunShiLord/sympy,iamutkarshtiwari/sympy,ChristinaZografou/sympy,toolforger/sympy,lindsayad/sympy,farhaanbukhsh/sympy,debugger22/sympy,kumarkrishna/sympy,pandeyadarsh/sympy,ga7g08/sympy,hargup/sympy,saurabhjn76/sympy,atreyv/sympy,Curious72/sympy,lidavidm/sympy,ahhda/sympy,sahilshekhawat/sympy,dqnykamp/sympy,liangjiaxing/sympy,chaffra/sympy,kaushik94/sympy,MechCoder/sympy,Designist/sympy,jbbskinny/sympy,garvitr/sympy,madan96/sympy,beni55/sympy,VaibhavAgarwalVA/sympy,wanglongqi/sympy,skidzo/sympy,debugger22/sympy,cccfran/sympy,hargup/sympy,AkademieOlympia/sympy,VaibhavAgarwalVA/sympy,ahhda/sympy,postvakje/sympy,vipulroxx/sympy,dqnykamp/sympy,Sumith1896/sympy,bukzor/sympy,mafiya69/sympy,sunny94/temp,shikil/sympy,mcdaniel67/sympy,grevutiu-gabriel/sympy,Shaswat27/sympy,shikil/sympy,sampadsaha5/sympy,Davidjohnwilson/sympy,abloomston/sympy,skidzo/sympy,Shaswat27/sympy,Gadal/sympy,emon10005/sympy,lidavidm/sympy,hrashk/sympy,beni55/sympy,kumarkrishna/sympy,wyom/sympy,souravsingh/sympy,oliverlee/sympy,wanglongqi/sympy,sahilshekhawat/sympy,kmacinnis/sympy,bukzor/sympy,ChristinaZografou/sympy,aktech/sympy,maniteja123/sympy,moble/sympy,hrashk/sympy,cccfran/sympy,lindsayad/sympy,yukoba/sympy,emon10005/sympy,Titan-C/sympy,lidavidm/sympy,shipci/sympy,vipulroxx/sympy,bukzor/sympy,moble/sympy,yashsharan/sympy,Mitchkoens/sympy,madan96/sympy,Designist/sympy,toolforger/sympy,Gadal/sympy,Vishluck/sympy,Mitchkoens/sympy,meghana1995/sympy,ChristinaZografou/sympy,madan96/sympy,sahmed95/sympy,sampadsaha5/sympy,AunShiLord/sympy,srjoglekar246/sympy,cswiercz/sympy,rahuldan/sympy,skidzo/sympy,jaimahajan1997/sympy,kaichogami/sympy,flacjacket/sympy,jaimahajan1997/sympy,Shaswat27/sympy,abhiii5459/sympy,Mitchkoens/sympy,iamutkarshtiwari/sympy,kumarkrishna/sympy,AkademieOlympia/sympy,asm666/sympy,pandeyadarsh/sympy,postvakje/sympy,jerli/sympy,AkademieOlympia/sympy,shipci/sympy,Titan-C/sympy,shipci/sympy,farhaanbukhsh/sympy,atsao72/sympy,debugger22/sympy,kmacinnis/sympy,grevutiu-gabriel/sympy,yukoba/sympy,mcdaniel67/sympy,Arafatk/sympy,maniteja123/sympy,garvitr/sympy,sahilshekhawat/sympy,liangjiaxing/sympy,drufat/sympy,Davidjohnwilson/sympy,beni55/sympy,Designist/sympy,kaichogami/sympy,souravsingh/sympy,Vishluck/sympy,hrashk/sympy,farhaanbukhsh/sympy,oliverlee/sympy,wyom/sympy,rahuldan/sympy,jamesblunt/sympy,Sumith1896/sympy,Arafatk/sympy,VaibhavAgarwalVA/sympy,mcdaniel67/sympy,Gadal/sympy
python
## Code Before: from warnings import warn from sympy.core.compatibility import SymPyDeprecationWarning from pygletplot import PygletPlot def Plot(*args, **kwargs): """ A temporary proxy for an interface under deprecation. This proxy is the one imported by `from sympy import *`. The Plot class will change in future versions of sympy to use the new plotting module. That new plotting module is already used by the plot() function (lowercase). To write code compatible with future versions of sympy use that function (plot() lowercase). Or if you want to use the old plotting module just import it directly: `from sympy.plotting.pygletplot import PygletPlot` To use Plot from the new plotting module do: `from sympy.plotting.plot import Plot` In future version of sympy you will also be able to use `from sympy.plotting import Plot` but in the current version this will import this proxy object. It's done for backward compatibility. The old plotting module is not deprecated. Only the location will change. The new location is sympy.plotting.pygletplot. """ warn('This interface will change in future versions of sympy.' ' As a precatuion use the plot() function (lowercase).' ' See the docstring for details.', SymPyDeprecationWarning) return PygletPlot(*args, **kwargs) ## Instruction: Change the import location of DeprecationWarning used by plotting module The SympyDeprecationWarning was moved from its original location. The change was done in the master branch. The same change must be mirrored in this development branch. ## Code After: from warnings import warn from sympy.utilities.exceptions import SymPyDeprecationWarning from pygletplot import PygletPlot def Plot(*args, **kwargs): """ A temporary proxy for an interface under deprecation. This proxy is the one imported by `from sympy import *`. The Plot class will change in future versions of sympy to use the new plotting module. That new plotting module is already used by the plot() function (lowercase). To write code compatible with future versions of sympy use that function (plot() lowercase). Or if you want to use the old plotting module just import it directly: `from sympy.plotting.pygletplot import PygletPlot` To use Plot from the new plotting module do: `from sympy.plotting.plot import Plot` In future version of sympy you will also be able to use `from sympy.plotting import Plot` but in the current version this will import this proxy object. It's done for backward compatibility. The old plotting module is not deprecated. Only the location will change. The new location is sympy.plotting.pygletplot. """ warn('This interface will change in future versions of sympy.' ' As a precatuion use the plot() function (lowercase).' ' See the docstring for details.', SymPyDeprecationWarning) return PygletPlot(*args, **kwargs)
// ... existing code ... from warnings import warn from sympy.utilities.exceptions import SymPyDeprecationWarning from pygletplot import PygletPlot def Plot(*args, **kwargs): // ... rest of the code ...
b0efb7db50080dd1e9e96ad8d818e3b0859bbca3
retry/__init__.py
retry/__init__.py
from functools import wraps import time class RetryExceededError(Exception): pass class retry(object): '''A decorator encapsulated retry logic. Usage: @retry(errors=(TTransportException, AnyExpectedError)) ''' def __init__(self, errors=(Exception, ), tries=3, delay=0): self.errors = errors self.tries = tries self.delay = delay def __call__(self, func): @wraps(func) def _(*args, **kw): retry_left_count = self.tries while retry_left_count: try: return func(*args, **kw) except Exception, e: retry_left_count -= 1 if not isinstance(e, self.errors): raise e if not retry_left_count: raise RetryExceededError if self.delay: time.sleep(self.delay) return _
from functools import wraps import time class RetryExceededError(Exception): pass class retry(object): '''A decorator encapsulated retry logic. Usage: @retry(errors=(TTransportException, AnyExpectedError)) @retry() # detect whatsoever errors and retry 3 times ''' def __init__(self, errors=(Exception, ), tries=3, delay=0): self.errors = errors self.tries = tries self.delay = delay def __call__(self, func): @wraps(func) def _(*args, **kw): retry_left_count = self.tries while retry_left_count: try: return func(*args, **kw) except Exception, e: retry_left_count -= 1 if not isinstance(e, self.errors): raise e if not retry_left_count: raise RetryExceededError if self.delay: time.sleep(self.delay) return _
Add a usage in retry
Add a usage in retry
Python
mit
soasme/retries
python
## Code Before: from functools import wraps import time class RetryExceededError(Exception): pass class retry(object): '''A decorator encapsulated retry logic. Usage: @retry(errors=(TTransportException, AnyExpectedError)) ''' def __init__(self, errors=(Exception, ), tries=3, delay=0): self.errors = errors self.tries = tries self.delay = delay def __call__(self, func): @wraps(func) def _(*args, **kw): retry_left_count = self.tries while retry_left_count: try: return func(*args, **kw) except Exception, e: retry_left_count -= 1 if not isinstance(e, self.errors): raise e if not retry_left_count: raise RetryExceededError if self.delay: time.sleep(self.delay) return _ ## Instruction: Add a usage in retry ## Code After: from functools import wraps import time class RetryExceededError(Exception): pass class retry(object): '''A decorator encapsulated retry logic. Usage: @retry(errors=(TTransportException, AnyExpectedError)) @retry() # detect whatsoever errors and retry 3 times ''' def __init__(self, errors=(Exception, ), tries=3, delay=0): self.errors = errors self.tries = tries self.delay = delay def __call__(self, func): @wraps(func) def _(*args, **kw): retry_left_count = self.tries while retry_left_count: try: return func(*args, **kw) except Exception, e: retry_left_count -= 1 if not isinstance(e, self.errors): raise e if not retry_left_count: raise RetryExceededError if self.delay: time.sleep(self.delay) return _
# ... existing code ... Usage: @retry(errors=(TTransportException, AnyExpectedError)) @retry() # detect whatsoever errors and retry 3 times ''' def __init__(self, errors=(Exception, ), tries=3, delay=0): # ... rest of the code ...
115197d42b380ae65de75d74a4d28933eb8defde
testproj/testproj/testapp/models.py
testproj/testproj/testapp/models.py
from django.db import models from django.utils import timezone class SecretFile(models.Model): filename = models.CharField(max_length=255, blank=True, null=True) order = models.IntegerField(blank=True, null=True) size = models.PositiveIntegerField(blank=True, null=True) created_on = models.DateTimeField(default=timezone.now) is_secret = models.BooleanField() def __unicode__(self): return "#%d %s" % (self.pk, self.filename)
from django.db import models from django.utils import timezone class SecretFile(models.Model): filename = models.CharField(max_length=255, blank=True, null=True) order = models.IntegerField(blank=True, null=True) size = models.PositiveIntegerField(blank=True, null=True) created_on = models.DateTimeField(default=timezone.now) is_secret = models.BooleanField(default=False) def __unicode__(self): return "#%d %s" % (self.pk, self.filename)
Fix warning about default value for boolean field
Fix warning about default value for boolean field
Python
bsd-3-clause
artscoop/webstack-django-sorting,artscoop/webstack-django-sorting
python
## Code Before: from django.db import models from django.utils import timezone class SecretFile(models.Model): filename = models.CharField(max_length=255, blank=True, null=True) order = models.IntegerField(blank=True, null=True) size = models.PositiveIntegerField(blank=True, null=True) created_on = models.DateTimeField(default=timezone.now) is_secret = models.BooleanField() def __unicode__(self): return "#%d %s" % (self.pk, self.filename) ## Instruction: Fix warning about default value for boolean field ## Code After: from django.db import models from django.utils import timezone class SecretFile(models.Model): filename = models.CharField(max_length=255, blank=True, null=True) order = models.IntegerField(blank=True, null=True) size = models.PositiveIntegerField(blank=True, null=True) created_on = models.DateTimeField(default=timezone.now) is_secret = models.BooleanField(default=False) def __unicode__(self): return "#%d %s" % (self.pk, self.filename)
// ... existing code ... order = models.IntegerField(blank=True, null=True) size = models.PositiveIntegerField(blank=True, null=True) created_on = models.DateTimeField(default=timezone.now) is_secret = models.BooleanField(default=False) def __unicode__(self): return "#%d %s" % (self.pk, self.filename) // ... rest of the code ...
3372bade0c5aee8c30c507832c842d6533608f61
porunga/tests/test_main.py
porunga/tests/test_main.py
import unittest from porunga import get_manager from porunga.commands.test import PorungaTestCommand class TestManager(unittest.TestCase): def test_manager_has_proper_commands(self): manager = get_manager() commands = manager.get_commands() self.assertIn('test', commands) test_command = commands['test'] self.assertIsInstance(test_command, PorungaTestCommand)
import unittest from porunga import get_manager from porunga.commands.test import PorungaTestCommand class TestManager(unittest.TestCase): def test_manager_has_proper_commands(self): manager = get_manager() commands = manager.get_commands() self.assertTrue('test' in commands) test_command = commands['test'] self.assertTrue(isinstance(test_command, PorungaTestCommand))
Test updated to work with Python 2.6
Test updated to work with Python 2.6
Python
bsd-2-clause
lukaszb/porunga,lukaszb/porunga
python
## Code Before: import unittest from porunga import get_manager from porunga.commands.test import PorungaTestCommand class TestManager(unittest.TestCase): def test_manager_has_proper_commands(self): manager = get_manager() commands = manager.get_commands() self.assertIn('test', commands) test_command = commands['test'] self.assertIsInstance(test_command, PorungaTestCommand) ## Instruction: Test updated to work with Python 2.6 ## Code After: import unittest from porunga import get_manager from porunga.commands.test import PorungaTestCommand class TestManager(unittest.TestCase): def test_manager_has_proper_commands(self): manager = get_manager() commands = manager.get_commands() self.assertTrue('test' in commands) test_command = commands['test'] self.assertTrue(isinstance(test_command, PorungaTestCommand))
// ... existing code ... manager = get_manager() commands = manager.get_commands() self.assertTrue('test' in commands) test_command = commands['test'] self.assertTrue(isinstance(test_command, PorungaTestCommand)) // ... rest of the code ...
34f83765d850fbc97cc3512eac4c2ebab551b5f7
db_logger.py
db_logger.py
import mysql.connector import config import threading enabled = False db_lock = threading.Lock() conn = mysql.connector.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'), database=config.get('db_logger.database')) cur = conn.cursor() def log(message, kind): if enabled: with db_lock: cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW())', (message, kind)) conn.commit()
import mysql.connector import config import threading enabled = False connected = False db_lock = threading.Lock() def log(message, kind): if enabled: with db_lock: global conn, cur, connected if not connected: conn = mysql.connector.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'), database=config.get('db_logger.database')) cur = conn.cursor() connected = True cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW())', (message, kind)) conn.commit()
Connect to MySQL only when needed
Connect to MySQL only when needed
Python
mit
kalinochkind/vkbot,kalinochkind/vkbot,kalinochkind/vkbot
python
## Code Before: import mysql.connector import config import threading enabled = False db_lock = threading.Lock() conn = mysql.connector.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'), database=config.get('db_logger.database')) cur = conn.cursor() def log(message, kind): if enabled: with db_lock: cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW())', (message, kind)) conn.commit() ## Instruction: Connect to MySQL only when needed ## Code After: import mysql.connector import config import threading enabled = False connected = False db_lock = threading.Lock() def log(message, kind): if enabled: with db_lock: global conn, cur, connected if not connected: conn = mysql.connector.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'), database=config.get('db_logger.database')) cur = conn.cursor() connected = True cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW())', (message, kind)) conn.commit()
// ... existing code ... import threading enabled = False connected = False db_lock = threading.Lock() def log(message, kind): if enabled: with db_lock: global conn, cur, connected if not connected: conn = mysql.connector.connect(host=config.get('db_logger.host'), user=config.get('db_logger.username'), password=config.get('db_logger.password'), database=config.get('db_logger.database')) cur = conn.cursor() connected = True cur.execute('INSERT INTO vkbot_logmessage VALUES (NULL, %s, %s, NOW())', (message, kind)) conn.commit() // ... rest of the code ...
aa25fc4ef87ec1e3ea13aae2b746a518223c407d
src/test/java/de/innoaccel/wamp/server/converter/GeneralMessageTests.java
src/test/java/de/innoaccel/wamp/server/converter/GeneralMessageTests.java
package de.innoaccel.wamp.server.converter; import de.innoaccel.wamp.server.Websocket; import de.innoaccel.wamp.server.message.Message; import org.junit.Test; import java.io.IOException; abstract public class GeneralMessageTests<T extends Message> { protected JsonParsingConverter<T> converter; @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenMessageIsNoArray(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("message", socket); } @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenThereIsNoFirstField(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[]", socket); } @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenFirstFieldIsNoNumber(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[null]", socket); } @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenFirstFieldIsNoInteger(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[0.5f]", socket); } @Test(expected = InvalidMessageCodeException.class) public void deserializeThrowsInvalidMessageCodeExceptionWhenWrongMessageCode(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[" + Message.INVALID + "]", socket); } }
package de.innoaccel.wamp.server.converter; import de.innoaccel.wamp.server.Websocket; import de.innoaccel.wamp.server.message.Message; import org.junit.Test; import java.io.IOException; abstract public class GeneralMessageTests<T extends Message> { protected JsonParsingConverter<T> converter; @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenMessageIsNoArray(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("message", socket); } @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenThereIsNoMessageCodeField(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[]", socket); } @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenMessageCodeIsNoNumber(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[null]", socket); } @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenMessageCodeIsNoInteger(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[0.5f]", socket); } @Test(expected = InvalidMessageCodeException.class) public void deserializeThrowsInvalidMessageCodeExceptionWhenWrongMessageCode(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[" + Message.INVALID + "]", socket); } }
Rename tests to better reflect field meaning
Rename tests to better reflect field meaning
Java
bsd-2-clause
fritz-gerneth/java-wamp-server
java
## Code Before: package de.innoaccel.wamp.server.converter; import de.innoaccel.wamp.server.Websocket; import de.innoaccel.wamp.server.message.Message; import org.junit.Test; import java.io.IOException; abstract public class GeneralMessageTests<T extends Message> { protected JsonParsingConverter<T> converter; @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenMessageIsNoArray(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("message", socket); } @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenThereIsNoFirstField(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[]", socket); } @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenFirstFieldIsNoNumber(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[null]", socket); } @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenFirstFieldIsNoInteger(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[0.5f]", socket); } @Test(expected = InvalidMessageCodeException.class) public void deserializeThrowsInvalidMessageCodeExceptionWhenWrongMessageCode(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[" + Message.INVALID + "]", socket); } } ## Instruction: Rename tests to better reflect field meaning ## Code After: package de.innoaccel.wamp.server.converter; import de.innoaccel.wamp.server.Websocket; import de.innoaccel.wamp.server.message.Message; import org.junit.Test; import java.io.IOException; abstract public class GeneralMessageTests<T extends Message> { protected JsonParsingConverter<T> converter; @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenMessageIsNoArray(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("message", socket); } @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenThereIsNoMessageCodeField(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[]", socket); } @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenMessageCodeIsNoNumber(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[null]", socket); } @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenMessageCodeIsNoInteger(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[0.5f]", socket); } @Test(expected = InvalidMessageCodeException.class) public void deserializeThrowsInvalidMessageCodeExceptionWhenWrongMessageCode(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[" + Message.INVALID + "]", socket); } }
# ... existing code ... } @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenThereIsNoMessageCodeField(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[]", socket); # ... modified code ... } @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenMessageCodeIsNoNumber(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[null]", socket); ... } @Test(expected = MessageParseException.class) public void deserializeThrowsMessageParseExceptionWhenMessageCodeIsNoInteger(final Websocket socket) throws IOException, MessageParseException, InvalidMessageCodeException { this.converter.deserialize("[0.5f]", socket); # ... rest of the code ...
a580ca90a9434aa16e6e8c14552907c2cf405341
codechicken/lib/lighting/PlanarLightModel.java
codechicken/lib/lighting/PlanarLightModel.java
package codechicken.lib.lighting; import net.minecraft.client.renderer.Tessellator; import codechicken.lib.colour.Colour; import codechicken.lib.colour.ColourRGBA; import codechicken.lib.render.CCModel; import codechicken.lib.render.CCRenderState; import codechicken.lib.render.IVertexModifier; import codechicken.lib.render.UV; import codechicken.lib.vec.Vector3; /** * Faster precomputed version of LightModel that only works for axis planar sides */ public class PlanarLightModel implements IVertexModifier { public ColourRGBA[] colours; public PlanarLightModel(int[] colours) { this.colours = new ColourRGBA[6]; for(int i = 0; i < 6; i++) this.colours[i] = new ColourRGBA(colours[i]); } @Override public void applyModifiers(CCModel m, Tessellator tess, Vector3 vec, UV uv, Vector3 normal, int i) { ColourRGBA light = colours[CCModel.findSide(normal)]; int colour = (m == null || m.colours == null) ? -1 : m.colours[i]; Colour res = new ColourRGBA(colour).multiply(light); CCRenderState.vertexColour(res.r&0xFF, res.g&0xFF, res.b&0xFF, res.a&0xFF) ; } @Override public boolean needsNormals() { return true; } }
package codechicken.lib.lighting; import net.minecraft.client.renderer.Tessellator; import codechicken.lib.colour.Colour; import codechicken.lib.colour.ColourRGBA; import codechicken.lib.render.CCModel; import codechicken.lib.render.CCRenderState; import codechicken.lib.render.IVertexModifier; import codechicken.lib.render.UV; import codechicken.lib.vec.Vector3; /** * Faster precomputed version of LightModel that only works for axis planar sides */ public class PlanarLightModel implements IVertexModifier { public static PlanarLightModel standardLightModel = LightModel.standardLightModel.reducePlanar(); public ColourRGBA[] colours; public PlanarLightModel(int[] colours) { this.colours = new ColourRGBA[6]; for(int i = 0; i < 6; i++) this.colours[i] = new ColourRGBA(colours[i]); } @Override public void applyModifiers(CCModel m, Tessellator tess, Vector3 vec, UV uv, Vector3 normal, int i) { ColourRGBA light = colours[CCModel.findSide(normal)]; int colour = (m == null || m.colours == null) ? -1 : m.colours[i]; Colour res = new ColourRGBA(colour).multiply(light); CCRenderState.vertexColour(res.r&0xFF, res.g&0xFF, res.b&0xFF, res.a&0xFF) ; } @Override public boolean needsNormals() { return true; } }
Add standard planar light model
Add standard planar light model
Java
lgpl-2.1
Chicken-Bones/CodeChickenLib,TheCBProject/CodeChickenLib,KJ4IPS/CodeChickenLib,alexbegt/CodeChickenLib
java
## Code Before: package codechicken.lib.lighting; import net.minecraft.client.renderer.Tessellator; import codechicken.lib.colour.Colour; import codechicken.lib.colour.ColourRGBA; import codechicken.lib.render.CCModel; import codechicken.lib.render.CCRenderState; import codechicken.lib.render.IVertexModifier; import codechicken.lib.render.UV; import codechicken.lib.vec.Vector3; /** * Faster precomputed version of LightModel that only works for axis planar sides */ public class PlanarLightModel implements IVertexModifier { public ColourRGBA[] colours; public PlanarLightModel(int[] colours) { this.colours = new ColourRGBA[6]; for(int i = 0; i < 6; i++) this.colours[i] = new ColourRGBA(colours[i]); } @Override public void applyModifiers(CCModel m, Tessellator tess, Vector3 vec, UV uv, Vector3 normal, int i) { ColourRGBA light = colours[CCModel.findSide(normal)]; int colour = (m == null || m.colours == null) ? -1 : m.colours[i]; Colour res = new ColourRGBA(colour).multiply(light); CCRenderState.vertexColour(res.r&0xFF, res.g&0xFF, res.b&0xFF, res.a&0xFF) ; } @Override public boolean needsNormals() { return true; } } ## Instruction: Add standard planar light model ## Code After: package codechicken.lib.lighting; import net.minecraft.client.renderer.Tessellator; import codechicken.lib.colour.Colour; import codechicken.lib.colour.ColourRGBA; import codechicken.lib.render.CCModel; import codechicken.lib.render.CCRenderState; import codechicken.lib.render.IVertexModifier; import codechicken.lib.render.UV; import codechicken.lib.vec.Vector3; /** * Faster precomputed version of LightModel that only works for axis planar sides */ public class PlanarLightModel implements IVertexModifier { public static PlanarLightModel standardLightModel = LightModel.standardLightModel.reducePlanar(); public ColourRGBA[] colours; public PlanarLightModel(int[] colours) { this.colours = new ColourRGBA[6]; for(int i = 0; i < 6; i++) this.colours[i] = new ColourRGBA(colours[i]); } @Override public void applyModifiers(CCModel m, Tessellator tess, Vector3 vec, UV uv, Vector3 normal, int i) { ColourRGBA light = colours[CCModel.findSide(normal)]; int colour = (m == null || m.colours == null) ? -1 : m.colours[i]; Colour res = new ColourRGBA(colour).multiply(light); CCRenderState.vertexColour(res.r&0xFF, res.g&0xFF, res.b&0xFF, res.a&0xFF) ; } @Override public boolean needsNormals() { return true; } }
# ... existing code ... */ public class PlanarLightModel implements IVertexModifier { public static PlanarLightModel standardLightModel = LightModel.standardLightModel.reducePlanar(); public ColourRGBA[] colours; public PlanarLightModel(int[] colours) # ... rest of the code ...
18e6f40dcd6cf675f26197d6beb8a3f3d9064b1e
app.py
app.py
import tornado.ioloop import tornado.web from tornado.websocket import WebSocketHandler from tornado import template class MainHandler(tornado.web.RequestHandler): DEMO_TURN = { 'player_id': 'abc', 'player_turn': 1, 'card': { 'id': 'card_1', 'name': 'Card Name', 'image': None, 'description': 'This is a card', 'attributes': { 'power': 9001, 'strength': 100, 'speed': 50, 'agility': 20, 'smell': 4 } } } def get(self): self.write(application.template_loader.load("index.html").generate(turn=self.DEMO_TURN)) class SocketHandler(WebSocketHandler): def open(self): print("WebSocket opened") def on_message(self, message): self.write_message(u"You said: " + message) def on_close(self): print("WebSocket closed") application = tornado.web.Application([ (r"/", MainHandler), (r"/sockets", SocketHandler), (r"/content/(.*)", tornado.web.StaticFileHandler, {"path": "static"}) #(r"/", MainHandler), ]) if __name__ == "__main__": application.listen(8888) application.template_loader = template.Loader("templates") tornado.ioloop.IOLoop.current().start()
import json import tornado.ioloop import tornado.web from tornado.websocket import WebSocketHandler from tornado import template class MainHandler(tornado.web.RequestHandler): DEMO_TURN = { 'player_id': 'abc', 'player_turn': 1, 'card': { 'id': 'card_1', 'name': 'Card Name', 'image': None, 'description': 'This is a card', 'attributes': { 'power': 9001, 'strength': 100, 'speed': 50, 'agility': 20, 'smell': 4 } } } def get(self): self.write(application.template_loader.load("index.html").generate(turn=self.DEMO_TURN)) class SocketHandler(WebSocketHandler): def open(self): print("WebSocket opened") def on_message(self, message): self.write_message(json.dumps(self.DEMO_TURN)) def on_close(self): print("WebSocket closed") application = tornado.web.Application([ (r"/", MainHandler), (r"/sockets", SocketHandler), (r"/content/(.*)", tornado.web.StaticFileHandler, {"path": "static"}) #(r"/", MainHandler), ]) if __name__ == "__main__": application.listen(8888) application.template_loader = template.Loader("templates") tornado.ioloop.IOLoop.current().start()
Send demo turn over websocket.
Send demo turn over websocket.
Python
apache-2.0
ohmygourd/dewbrick,ohmygourd/dewbrick,ohmygourd/dewbrick
python
## Code Before: import tornado.ioloop import tornado.web from tornado.websocket import WebSocketHandler from tornado import template class MainHandler(tornado.web.RequestHandler): DEMO_TURN = { 'player_id': 'abc', 'player_turn': 1, 'card': { 'id': 'card_1', 'name': 'Card Name', 'image': None, 'description': 'This is a card', 'attributes': { 'power': 9001, 'strength': 100, 'speed': 50, 'agility': 20, 'smell': 4 } } } def get(self): self.write(application.template_loader.load("index.html").generate(turn=self.DEMO_TURN)) class SocketHandler(WebSocketHandler): def open(self): print("WebSocket opened") def on_message(self, message): self.write_message(u"You said: " + message) def on_close(self): print("WebSocket closed") application = tornado.web.Application([ (r"/", MainHandler), (r"/sockets", SocketHandler), (r"/content/(.*)", tornado.web.StaticFileHandler, {"path": "static"}) #(r"/", MainHandler), ]) if __name__ == "__main__": application.listen(8888) application.template_loader = template.Loader("templates") tornado.ioloop.IOLoop.current().start() ## Instruction: Send demo turn over websocket. ## Code After: import json import tornado.ioloop import tornado.web from tornado.websocket import WebSocketHandler from tornado import template class MainHandler(tornado.web.RequestHandler): DEMO_TURN = { 'player_id': 'abc', 'player_turn': 1, 'card': { 'id': 'card_1', 'name': 'Card Name', 'image': None, 'description': 'This is a card', 'attributes': { 'power': 9001, 'strength': 100, 'speed': 50, 'agility': 20, 'smell': 4 } } } def get(self): self.write(application.template_loader.load("index.html").generate(turn=self.DEMO_TURN)) class SocketHandler(WebSocketHandler): def open(self): print("WebSocket opened") def on_message(self, message): self.write_message(json.dumps(self.DEMO_TURN)) def on_close(self): print("WebSocket closed") application = tornado.web.Application([ (r"/", MainHandler), (r"/sockets", SocketHandler), (r"/content/(.*)", tornado.web.StaticFileHandler, {"path": "static"}) #(r"/", MainHandler), ]) if __name__ == "__main__": application.listen(8888) application.template_loader = template.Loader("templates") tornado.ioloop.IOLoop.current().start()
... import json import tornado.ioloop import tornado.web from tornado.websocket import WebSocketHandler ... print("WebSocket opened") def on_message(self, message): self.write_message(json.dumps(self.DEMO_TURN)) def on_close(self): print("WebSocket closed") ...
e2495040277fafdac4c0e060517cf667baa27c02
chinup/__init__.py
chinup/__init__.py
try: from .allauth import * except ImportError: from .chinup import * from .exceptions import * __version__ = '0.1'
from __future__ import absolute_import, unicode_literals try: from .allauth import * except ImportError: from .chinup import * from .exceptions import * __version__ = '0.1' # Configure logging to avoid warning. # https://docs.python.org/2/howto/logging.html#configuring-logging-for-a-library import logging if hasattr(logging, 'NullHandler'): logging.getLogger('chinup').addHandler(logging.NullHandler())
Configure package-level logging to avoid warning.
Configure package-level logging to avoid warning.
Python
mit
pagepart/chinup
python
## Code Before: try: from .allauth import * except ImportError: from .chinup import * from .exceptions import * __version__ = '0.1' ## Instruction: Configure package-level logging to avoid warning. ## Code After: from __future__ import absolute_import, unicode_literals try: from .allauth import * except ImportError: from .chinup import * from .exceptions import * __version__ = '0.1' # Configure logging to avoid warning. # https://docs.python.org/2/howto/logging.html#configuring-logging-for-a-library import logging if hasattr(logging, 'NullHandler'): logging.getLogger('chinup').addHandler(logging.NullHandler())
# ... existing code ... from __future__ import absolute_import, unicode_literals try: from .allauth import * except ImportError: # ... modified code ... __version__ = '0.1' # Configure logging to avoid warning. # https://docs.python.org/2/howto/logging.html#configuring-logging-for-a-library import logging if hasattr(logging, 'NullHandler'): logging.getLogger('chinup').addHandler(logging.NullHandler()) # ... rest of the code ...
0472c1cabdfdf0f8a193552dac3370ae93bbdaed
scripts/get_top_hashtags.py
scripts/get_top_hashtags.py
import json import sys from collections import Counter f = open(sys.argv[1], 'r') topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10 hashtags = [] for line in f: if line.startswith('{'): hashtags.extend(json.loads(line)['hashtags']) hashtagCounter = Counter([hashtag.lower() for hashtag in hashtags]) for (hashtag, count) in hashtagCounter.most_common(topk): print hashtag, count
import json import sys from collections import Counter f = open(sys.argv[1], 'r') topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10 hashtagCounter = Counter([hashtag.lower() for line in f if line.startswith('{') for hashtag in json.loads(line)['hashtags'] ]) for (hashtag, count) in hashtagCounter.most_common(topk): print hashtag, count
Use a more compact functional style for instantiating hashtagCounter
Use a more compact functional style for instantiating hashtagCounter
Python
mpl-2.0
aDataAlchemist/election-tweets
python
## Code Before: import json import sys from collections import Counter f = open(sys.argv[1], 'r') topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10 hashtags = [] for line in f: if line.startswith('{'): hashtags.extend(json.loads(line)['hashtags']) hashtagCounter = Counter([hashtag.lower() for hashtag in hashtags]) for (hashtag, count) in hashtagCounter.most_common(topk): print hashtag, count ## Instruction: Use a more compact functional style for instantiating hashtagCounter ## Code After: import json import sys from collections import Counter f = open(sys.argv[1], 'r') topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10 hashtagCounter = Counter([hashtag.lower() for line in f if line.startswith('{') for hashtag in json.loads(line)['hashtags'] ]) for (hashtag, count) in hashtagCounter.most_common(topk): print hashtag, count
// ... existing code ... f = open(sys.argv[1], 'r') topk = int(sys.argv[2]) if len(sys.argv) > 2 and sys.argv[2].isdigit() else 10 hashtagCounter = Counter([hashtag.lower() for line in f if line.startswith('{') for hashtag in json.loads(line)['hashtags'] ]) for (hashtag, count) in hashtagCounter.most_common(topk): print hashtag, count // ... rest of the code ...
edcc01522b7276962731f87f2fd5ee015adc0b75
Converter/src/test/java/converter/XmlToJsonConverterTest.java
Converter/src/test/java/converter/XmlToJsonConverterTest.java
package converter; import converters.XmlToJsonConverter; import org.junit.Test; import static org.junit.Assert.assertTrue; public class XmlToJsonConverterTest { /** * Test conversion from xml to json. */ @Test public void convertXmlToJsonTest() { String xmlString = "<note>\n" + "<to>Tove</to>\n" + "<from>Jani</from>\n" + "<heading>Reminder</heading>\n" + "<body>Don't forget me this weekend!</body>\n" + "</note>"; String jsonString = XmlToJsonConverter.convertXmlToJson(xmlString); String expectedJsonResult = "{\"note\":{\"heading\":\"Reminder\",\"from\":\"Jani\",\"to\":\"Tove\",\"body\":\"Don't forget me this weekend!\"}}"; assertTrue(expectedJsonResult.equals(expectedJsonResult)); } }
package converter; import processors.XmlToJsonConverter; import org.junit.Test; import static org.junit.Assert.assertTrue; public class XmlToJsonConverterTest { /** * Test conversion from xml to json. */ @Test public void convertXmlToJsonTest() { String xmlString = "<note>\n" + "<to>Tove</to>\n" + "<from>Jani</from>\n" + "<heading>Reminder</heading>\n" + "<body>Don't forget me this weekend!</body>\n" + "</note>"; String jsonString = null; try { jsonString = new XmlToJsonConverter().process(xmlString, null); } catch (Throwable throwable) { throwable.printStackTrace(); } String expectedJsonResult = "{\"note\":{\"heading\":\"Reminder\",\"from\":\"Jani\",\"to\":\"Tove\",\"body\":\"Don't forget me this weekend!\"}}"; assertTrue(expectedJsonResult.equals(expectedJsonResult)); } }
Update test for new interface.
Update test for new interface.
Java
apache-2.0
IrimieBogdan/DistributedMonitoring,IrimieBogdan/DistributedMonitoring,IrimieBogdan/DistributedMonitoring
java
## Code Before: package converter; import converters.XmlToJsonConverter; import org.junit.Test; import static org.junit.Assert.assertTrue; public class XmlToJsonConverterTest { /** * Test conversion from xml to json. */ @Test public void convertXmlToJsonTest() { String xmlString = "<note>\n" + "<to>Tove</to>\n" + "<from>Jani</from>\n" + "<heading>Reminder</heading>\n" + "<body>Don't forget me this weekend!</body>\n" + "</note>"; String jsonString = XmlToJsonConverter.convertXmlToJson(xmlString); String expectedJsonResult = "{\"note\":{\"heading\":\"Reminder\",\"from\":\"Jani\",\"to\":\"Tove\",\"body\":\"Don't forget me this weekend!\"}}"; assertTrue(expectedJsonResult.equals(expectedJsonResult)); } } ## Instruction: Update test for new interface. ## Code After: package converter; import processors.XmlToJsonConverter; import org.junit.Test; import static org.junit.Assert.assertTrue; public class XmlToJsonConverterTest { /** * Test conversion from xml to json. */ @Test public void convertXmlToJsonTest() { String xmlString = "<note>\n" + "<to>Tove</to>\n" + "<from>Jani</from>\n" + "<heading>Reminder</heading>\n" + "<body>Don't forget me this weekend!</body>\n" + "</note>"; String jsonString = null; try { jsonString = new XmlToJsonConverter().process(xmlString, null); } catch (Throwable throwable) { throwable.printStackTrace(); } String expectedJsonResult = "{\"note\":{\"heading\":\"Reminder\",\"from\":\"Jani\",\"to\":\"Tove\",\"body\":\"Don't forget me this weekend!\"}}"; assertTrue(expectedJsonResult.equals(expectedJsonResult)); } }
// ... existing code ... package converter; import processors.XmlToJsonConverter; import org.junit.Test; import static org.junit.Assert.assertTrue; // ... modified code ... "<heading>Reminder</heading>\n" + "<body>Don't forget me this weekend!</body>\n" + "</note>"; String jsonString = null; try { jsonString = new XmlToJsonConverter().process(xmlString, null); } catch (Throwable throwable) { throwable.printStackTrace(); } String expectedJsonResult = "{\"note\":{\"heading\":\"Reminder\",\"from\":\"Jani\",\"to\":\"Tove\",\"body\":\"Don't forget me this weekend!\"}}"; assertTrue(expectedJsonResult.equals(expectedJsonResult)); } // ... rest of the code ...
19faea809ec3ea8a9722b0e87bb028fd23c721a1
modlib.c
modlib.c
uint16_t MODBUSSwapEndian( uint16_t Data ) { //Change big-endian to little-endian and vice versa unsigned char Swap; //Create 2 bytes long union union Conversion { uint16_t Data; unsigned char Bytes[2]; } Conversion; //Swap bytes Conversion.Data = Data; Swap = Conversion.Bytes[0]; Conversion.Bytes[0] = Conversion.Bytes[1]; Conversion.Bytes[1] = Swap; return Conversion.Data; } uint16_t MODBUSCRC16( uint16_t *Data, uint16_t Length ) { //Calculate CRC16 checksum using given data and length uint16_t CRC = 0xFFFF; uint16_t i; unsigned char j; for ( i = 0; i < Length; i++ ) { CRC ^= Data[i]; //XOR current data byte with CRC value for ( j = 8; j != 0; j-- ) { //For each bit //Is least-significant-bit is set? if ( ( CRC & 0x0001 ) != 0 ) { CRC >>= 1; //Shift to right and xor CRC ^= 0xA001; } else // Else LSB is not set CRC >>= 1; } } return CRC; }
uint16_t MODBUSSwapEndian( uint16_t Data ) { //Change big-endian to little-endian and vice versa uint8_t Swap; //Create 2 bytes long union union Conversion { uint16_t Data; uint8_t Bytes[2]; } Conversion; //Swap bytes Conversion.Data = Data; Swap = Conversion.Bytes[0]; Conversion.Bytes[0] = Conversion.Bytes[1]; Conversion.Bytes[1] = Swap; return Conversion.Data; } uint16_t MODBUSCRC16( uint16_t *Data, uint16_t Length ) { //Calculate CRC16 checksum using given data and length uint16_t CRC = 0xFFFF; uint16_t i; uint8_t j; for ( i = 0; i < Length; i++ ) { CRC ^= Data[i]; //XOR current data byte with CRC value for ( j = 8; j != 0; j-- ) { //For each bit //Is least-significant-bit is set? if ( ( CRC & 0x0001 ) != 0 ) { CRC >>= 1; //Shift to right and xor CRC ^= 0xA001; } else // Else LSB is not set CRC >>= 1; } } return CRC; }
Change 'unsigned character' type variables to 'uint8_t'
Change 'unsigned character' type variables to 'uint8_t'
C
mit
Jacajack/modlib
c
## Code Before: uint16_t MODBUSSwapEndian( uint16_t Data ) { //Change big-endian to little-endian and vice versa unsigned char Swap; //Create 2 bytes long union union Conversion { uint16_t Data; unsigned char Bytes[2]; } Conversion; //Swap bytes Conversion.Data = Data; Swap = Conversion.Bytes[0]; Conversion.Bytes[0] = Conversion.Bytes[1]; Conversion.Bytes[1] = Swap; return Conversion.Data; } uint16_t MODBUSCRC16( uint16_t *Data, uint16_t Length ) { //Calculate CRC16 checksum using given data and length uint16_t CRC = 0xFFFF; uint16_t i; unsigned char j; for ( i = 0; i < Length; i++ ) { CRC ^= Data[i]; //XOR current data byte with CRC value for ( j = 8; j != 0; j-- ) { //For each bit //Is least-significant-bit is set? if ( ( CRC & 0x0001 ) != 0 ) { CRC >>= 1; //Shift to right and xor CRC ^= 0xA001; } else // Else LSB is not set CRC >>= 1; } } return CRC; } ## Instruction: Change 'unsigned character' type variables to 'uint8_t' ## Code After: uint16_t MODBUSSwapEndian( uint16_t Data ) { //Change big-endian to little-endian and vice versa uint8_t Swap; //Create 2 bytes long union union Conversion { uint16_t Data; uint8_t Bytes[2]; } Conversion; //Swap bytes Conversion.Data = Data; Swap = Conversion.Bytes[0]; Conversion.Bytes[0] = Conversion.Bytes[1]; Conversion.Bytes[1] = Swap; return Conversion.Data; } uint16_t MODBUSCRC16( uint16_t *Data, uint16_t Length ) { //Calculate CRC16 checksum using given data and length uint16_t CRC = 0xFFFF; uint16_t i; uint8_t j; for ( i = 0; i < Length; i++ ) { CRC ^= Data[i]; //XOR current data byte with CRC value for ( j = 8; j != 0; j-- ) { //For each bit //Is least-significant-bit is set? if ( ( CRC & 0x0001 ) != 0 ) { CRC >>= 1; //Shift to right and xor CRC ^= 0xA001; } else // Else LSB is not set CRC >>= 1; } } return CRC; }
... { //Change big-endian to little-endian and vice versa uint8_t Swap; //Create 2 bytes long union union Conversion { uint16_t Data; uint8_t Bytes[2]; } Conversion; //Swap bytes ... uint16_t CRC = 0xFFFF; uint16_t i; uint8_t j; for ( i = 0; i < Length; i++ ) { ...
09f649ac0b14269067c43df9f879d963ab99cdac
backend/breach/views.py
backend/breach/views.py
import json from django.http import Http404, JsonResponse from django.views.decorators.csrf import csrf_exempt from breach.strategy import Strategy from breach.models import Victim def get_work(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) # Example work structure: # return {'url': 'https://www.dimkarakostas.com/?breach-test', # 'amount': 10, # 'timeout': 0} new_work = strategy.get_work() return HttpResponse(json.dumps(new_work), content_type='application/json') @csrf_exempt def work_completed(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) victory = strategy.work_completed() return JsonResponse({ 'victory': victory })
import json from django.http import Http404, JsonResponse from django.views.decorators.csrf import csrf_exempt from breach.strategy import Strategy from breach.models import Victim def get_work(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) # Example work structure: # return {'url': 'https://www.dimkarakostas.com/?breach-test', # 'amount': 10, # 'timeout': 0} new_work = strategy.get_work() return JsonResponse(new_work) @csrf_exempt def work_completed(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) victory = strategy.work_completed() return JsonResponse({ 'victory': victory })
Fix response with json for get_work
Fix response with json for get_work
Python
mit
dionyziz/rupture,dimkarakostas/rupture,dionyziz/rupture,dimkarakostas/rupture,dimriou/rupture,esarafianou/rupture,dimriou/rupture,esarafianou/rupture,dionyziz/rupture,dimkarakostas/rupture,esarafianou/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,dimriou/rupture,esarafianou/rupture
python
## Code Before: import json from django.http import Http404, JsonResponse from django.views.decorators.csrf import csrf_exempt from breach.strategy import Strategy from breach.models import Victim def get_work(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) # Example work structure: # return {'url': 'https://www.dimkarakostas.com/?breach-test', # 'amount': 10, # 'timeout': 0} new_work = strategy.get_work() return HttpResponse(json.dumps(new_work), content_type='application/json') @csrf_exempt def work_completed(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) victory = strategy.work_completed() return JsonResponse({ 'victory': victory }) ## Instruction: Fix response with json for get_work ## Code After: import json from django.http import Http404, JsonResponse from django.views.decorators.csrf import csrf_exempt from breach.strategy import Strategy from breach.models import Victim def get_work(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) # Example work structure: # return {'url': 'https://www.dimkarakostas.com/?breach-test', # 'amount': 10, # 'timeout': 0} new_work = strategy.get_work() return JsonResponse(new_work) @csrf_exempt def work_completed(request, victim_id=0): assert(victim_id) try: victim = Victim.objects.get(pk=victim_id) except: raise Http404('Victim not found') strategy = Strategy(victim) victory = strategy.work_completed() return JsonResponse({ 'victory': victory })
... new_work = strategy.get_work() return JsonResponse(new_work) @csrf_exempt def work_completed(request, victim_id=0): ...
77f0c9dcf7fdc4ebe904c32b64fbcb5c9c1f4d6b
src/main/java/com/googlecode/pngtastic/core/PngChunkInserter.java
src/main/java/com/googlecode/pngtastic/core/PngChunkInserter.java
package com.googlecode.pngtastic.core; import java.io.ByteArrayOutputStream; import java.io.IOException; /** * Usage: * <code> * byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk); * final File exported = image.export(toDir + "/name.png", bytes); * </code> * * @author ray */ public class PngChunkInserter { private static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 }; public static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300); public byte[] insert(PngImage image, PngChunk chunk) throws IOException { // add it after the header chunk image.getChunks().add(1, chunk); final ByteArrayOutputStream outputBytes = new ByteArrayOutputStream(); image.writeDataOutputStream(outputBytes); return outputBytes.toByteArray(); } }
package com.googlecode.pngtastic.core; import java.io.ByteArrayOutputStream; import java.io.IOException; /** * Usage: * <code> * byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk); * final File exported = image.export(toDir + "/name.png", bytes); * </code> * * @author ray */ public class PngChunkInserter { /** * Conversion note: one inch is equal to exactly 0.0254 meters. * 300dpi = 300 / 0.0254 = 11,811.023622 = 11811 = 0x2E23 = new byte[] { 0, 0, 46, 35 } * http://comments.gmane.org/gmane.comp.graphics.png.general/2425 */ private static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 }; public static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300); public byte[] insert(PngImage image, PngChunk chunk) throws IOException { // add it after the header chunk image.getChunks().add(1, chunk); final ByteArrayOutputStream outputBytes = new ByteArrayOutputStream(); image.writeDataOutputStream(outputBytes); return outputBytes.toByteArray(); } }
Add comment describing dpi conversion
Add comment describing dpi conversion
Java
mit
depsypher/pngtastic
java
## Code Before: package com.googlecode.pngtastic.core; import java.io.ByteArrayOutputStream; import java.io.IOException; /** * Usage: * <code> * byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk); * final File exported = image.export(toDir + "/name.png", bytes); * </code> * * @author ray */ public class PngChunkInserter { private static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 }; public static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300); public byte[] insert(PngImage image, PngChunk chunk) throws IOException { // add it after the header chunk image.getChunks().add(1, chunk); final ByteArrayOutputStream outputBytes = new ByteArrayOutputStream(); image.writeDataOutputStream(outputBytes); return outputBytes.toByteArray(); } } ## Instruction: Add comment describing dpi conversion ## Code After: package com.googlecode.pngtastic.core; import java.io.ByteArrayOutputStream; import java.io.IOException; /** * Usage: * <code> * byte[] bytes = new PngChunkInserter().insert(image, PngChunkInserter.dpi300Chunk); * final File exported = image.export(toDir + "/name.png", bytes); * </code> * * @author ray */ public class PngChunkInserter { /** * Conversion note: one inch is equal to exactly 0.0254 meters. * 300dpi = 300 / 0.0254 = 11,811.023622 = 11811 = 0x2E23 = new byte[] { 0, 0, 46, 35 } * http://comments.gmane.org/gmane.comp.graphics.png.general/2425 */ private static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 }; public static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300); public byte[] insert(PngImage image, PngChunk chunk) throws IOException { // add it after the header chunk image.getChunks().add(1, chunk); final ByteArrayOutputStream outputBytes = new ByteArrayOutputStream(); image.writeDataOutputStream(outputBytes); return outputBytes.toByteArray(); } }
// ... existing code ... */ public class PngChunkInserter { /** * Conversion note: one inch is equal to exactly 0.0254 meters. * 300dpi = 300 / 0.0254 = 11,811.023622 = 11811 = 0x2E23 = new byte[] { 0, 0, 46, 35 } * http://comments.gmane.org/gmane.comp.graphics.png.general/2425 */ private static final byte[] dpi300 = new byte[] { 0, 0, 46, 35, 0, 0, 46, 35, 1 }; public static final PngChunk dpi300Chunk = new PngChunk(PngChunk.PHYSICAL_PIXEL_DIMENSIONS.getBytes(), dpi300); // ... rest of the code ...
99c06cff63c4dc661b5cffc5a80f79327269684a
c/anagrams.c
c/anagrams.c
void swap(char *i, char *j) { char saved = *i; *i = *j; *j = saved; } void generate_permutations(char* a, int n) { if (n == 0) { printf("%s\n", a); } else { for (int i = 0; i < n; i++) { generate_permutations(a, n-1); swap(&a[n % 2 == 0 ? 0 : i], &a[n]); } generate_permutations(a, n-1); } } int main(int argc, const char* argv[]) { if (argc != 2) { fprintf(stderr, "Need exactly one argument!\n"); return 1; } size_t len = strlen(argv[1]); char *word = malloc(len + 1); word = strncpy(word, argv[1], len); generate_permutations(word, len-1); free(word); return 0; }
void swap(char *i, char *j) { char saved = *i; *i = *j; *j = saved; } void generate_permutations(char* a, int n) { if (n == 0) { printf("%s\n", a); } else { for (int i = 0; i < n; i++) { generate_permutations(a, n-1); swap(&a[n % 2 == 0 ? 0 : i], &a[n]); } generate_permutations(a, n-1); } } int main(int argc, const char* argv[]) { if (argc != 2) { fprintf(stderr, "Exactly one argument is required\n"); return 1; } size_t len = strlen(argv[1]); char *word = malloc(len + 1); word = strncpy(word, argv[1], len); generate_permutations(word, len-1); free(word); return 0; }
Make the error message consistent with all of the other languages
Make the error message consistent with all of the other languages
C
mit
rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot
c
## Code Before: void swap(char *i, char *j) { char saved = *i; *i = *j; *j = saved; } void generate_permutations(char* a, int n) { if (n == 0) { printf("%s\n", a); } else { for (int i = 0; i < n; i++) { generate_permutations(a, n-1); swap(&a[n % 2 == 0 ? 0 : i], &a[n]); } generate_permutations(a, n-1); } } int main(int argc, const char* argv[]) { if (argc != 2) { fprintf(stderr, "Need exactly one argument!\n"); return 1; } size_t len = strlen(argv[1]); char *word = malloc(len + 1); word = strncpy(word, argv[1], len); generate_permutations(word, len-1); free(word); return 0; } ## Instruction: Make the error message consistent with all of the other languages ## Code After: void swap(char *i, char *j) { char saved = *i; *i = *j; *j = saved; } void generate_permutations(char* a, int n) { if (n == 0) { printf("%s\n", a); } else { for (int i = 0; i < n; i++) { generate_permutations(a, n-1); swap(&a[n % 2 == 0 ? 0 : i], &a[n]); } generate_permutations(a, n-1); } } int main(int argc, const char* argv[]) { if (argc != 2) { fprintf(stderr, "Exactly one argument is required\n"); return 1; } size_t len = strlen(argv[1]); char *word = malloc(len + 1); word = strncpy(word, argv[1], len); generate_permutations(word, len-1); free(word); return 0; }
... int main(int argc, const char* argv[]) { if (argc != 2) { fprintf(stderr, "Exactly one argument is required\n"); return 1; } ...
377ff1dabc595fab4af9cb35bb8c071f2872c612
agile-dbus-java-interface/src/main/java/iot/agile/object/DeviceStatusType.java
agile-dbus-java-interface/src/main/java/iot/agile/object/DeviceStatusType.java
package iot.agile.object; public enum DeviceStatusType { CONNECTED, DISCONNECTED, ERROR }
package iot.agile.object; public enum DeviceStatusType { CONNECTED, DISCONNECTED, ON, 0FF, ERROR }
Add more device status type
Add more device status type
Java
epl-1.0
muka/agile-api-spec,muka/agile-api-spec,muka/agile-api-spec
java
## Code Before: package iot.agile.object; public enum DeviceStatusType { CONNECTED, DISCONNECTED, ERROR } ## Instruction: Add more device status type ## Code After: package iot.agile.object; public enum DeviceStatusType { CONNECTED, DISCONNECTED, ON, 0FF, ERROR }
# ... existing code ... public enum DeviceStatusType { CONNECTED, DISCONNECTED, ON, 0FF, ERROR } # ... rest of the code ...
243adb38e3d4f61404f4df14a9a5aa18af8638d9
app/src/main/java/de/philipphager/disclosure/feature/analyser/app/Apk.java
app/src/main/java/de/philipphager/disclosure/feature/analyser/app/Apk.java
package de.philipphager.disclosure.feature.analyser.app; import dalvik.system.DexFile; import de.philipphager.disclosure.database.app.model.App; import java.io.IOException; import java.util.Collections; import java.util.List; import rx.Observable; import timber.log.Timber; public class Apk { private static final int MIN_INDEX = 0; private final App app; private List<String> sortedClassNames; public Apk(App app) throws IOException { this.app = app; load(); } private void load() throws IOException { DexFile dexFile = new DexFile(app.sourceDir()); List<String> classNames = Collections.list(dexFile.entries()); sortedClassNames = Observable.from(classNames) .distinct() .toSortedList() .toBlocking() .first(); } public boolean containsPackage(String packageName) { String currentThread = Thread.currentThread().getName(); Timber.d("%s : Searching for package %s in app %s", currentThread, packageName, app.label()); int index = Collections.binarySearch(sortedClassNames, packageName, (currentItem, key) -> { if(currentItem.startsWith(key)) { return 0; } return currentItem.compareTo(key); }); return index >= MIN_INDEX; } }
package de.philipphager.disclosure.feature.analyser.app; import dalvik.system.DexFile; import de.philipphager.disclosure.database.app.model.App; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import rx.Observable; import timber.log.Timber; public class Apk { private static final int MIN_INDEX = 0; private final App app; private List<String> sortedClassNames; public Apk(App app) throws IOException { this.app = app; load(); } private void load() throws IOException { File file = new File(app.sourceDir()); List<String> classNames = new ArrayList<>(); if (file.exists()) { DexFile dexFile = new DexFile(app.sourceDir()); classNames.addAll(Collections.list(dexFile.entries())); } sortedClassNames = Observable.from(classNames) .distinct() .toSortedList() .toBlocking() .first(); } public boolean containsPackage(String packageName) { String currentThread = Thread.currentThread().getName(); Timber.d("%s : Searching for package %s in app %s", currentThread, packageName, app.label()); int index = Collections.binarySearch(sortedClassNames, packageName, (currentItem, key) -> { if (currentItem.startsWith(key)) { return 0; } return currentItem.compareTo(key); }); return index >= MIN_INDEX; } }
Fix not existing .apks failing
Fix not existing .apks failing
Java
apache-2.0
philipphager/disclosure-android-app
java
## Code Before: package de.philipphager.disclosure.feature.analyser.app; import dalvik.system.DexFile; import de.philipphager.disclosure.database.app.model.App; import java.io.IOException; import java.util.Collections; import java.util.List; import rx.Observable; import timber.log.Timber; public class Apk { private static final int MIN_INDEX = 0; private final App app; private List<String> sortedClassNames; public Apk(App app) throws IOException { this.app = app; load(); } private void load() throws IOException { DexFile dexFile = new DexFile(app.sourceDir()); List<String> classNames = Collections.list(dexFile.entries()); sortedClassNames = Observable.from(classNames) .distinct() .toSortedList() .toBlocking() .first(); } public boolean containsPackage(String packageName) { String currentThread = Thread.currentThread().getName(); Timber.d("%s : Searching for package %s in app %s", currentThread, packageName, app.label()); int index = Collections.binarySearch(sortedClassNames, packageName, (currentItem, key) -> { if(currentItem.startsWith(key)) { return 0; } return currentItem.compareTo(key); }); return index >= MIN_INDEX; } } ## Instruction: Fix not existing .apks failing ## Code After: package de.philipphager.disclosure.feature.analyser.app; import dalvik.system.DexFile; import de.philipphager.disclosure.database.app.model.App; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import rx.Observable; import timber.log.Timber; public class Apk { private static final int MIN_INDEX = 0; private final App app; private List<String> sortedClassNames; public Apk(App app) throws IOException { this.app = app; load(); } private void load() throws IOException { File file = new File(app.sourceDir()); List<String> classNames = new ArrayList<>(); if (file.exists()) { DexFile dexFile = new DexFile(app.sourceDir()); classNames.addAll(Collections.list(dexFile.entries())); } sortedClassNames = Observable.from(classNames) .distinct() .toSortedList() .toBlocking() .first(); } public boolean containsPackage(String packageName) { String currentThread = Thread.currentThread().getName(); Timber.d("%s : Searching for package %s in app %s", currentThread, packageName, app.label()); int index = Collections.binarySearch(sortedClassNames, packageName, (currentItem, key) -> { if (currentItem.startsWith(key)) { return 0; } return currentItem.compareTo(key); }); return index >= MIN_INDEX; } }
# ... existing code ... import dalvik.system.DexFile; import de.philipphager.disclosure.database.app.model.App; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import rx.Observable; # ... modified code ... } private void load() throws IOException { File file = new File(app.sourceDir()); List<String> classNames = new ArrayList<>(); if (file.exists()) { DexFile dexFile = new DexFile(app.sourceDir()); classNames.addAll(Collections.list(dexFile.entries())); } sortedClassNames = Observable.from(classNames) .distinct() ... Timber.d("%s : Searching for package %s in app %s", currentThread, packageName, app.label()); int index = Collections.binarySearch(sortedClassNames, packageName, (currentItem, key) -> { if (currentItem.startsWith(key)) { return 0; } return currentItem.compareTo(key); # ... rest of the code ...
03cab833e3d1a4117adc763620f7a1c78ee2cc95
shim-server/src/test/java/org/openmhealth/shim/common/mapper/DataPointMapperUnitTests.java
shim-server/src/test/java/org/openmhealth/shim/common/mapper/DataPointMapperUnitTests.java
package org.openmhealth.shim.common.mapper; import com.fasterxml.jackson.databind.ObjectMapper; import static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper; /** * @author Emerson Farrugia */ public abstract class DataPointMapperUnitTests { protected static final ObjectMapper objectMapper = newObjectMapper(); }
package org.openmhealth.shim.common.mapper; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.springframework.core.io.ClassPathResource; import java.io.IOException; import java.io.InputStream; import static java.lang.String.format; import static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper; /** * @author Emerson Farrugia */ public abstract class DataPointMapperUnitTests { protected static final ObjectMapper objectMapper = newObjectMapper(); /** * @param classPathResourceName the name of the class path resource to load * @return the contents of the resource as a {@link JsonNode} * @throws RuntimeException if the resource can't be loaded */ protected JsonNode asJsonNode(String classPathResourceName) { ClassPathResource resource = new ClassPathResource(classPathResourceName); try { InputStream resourceInputStream = resource.getInputStream(); return objectMapper.readTree(resourceInputStream); } catch (IOException e) { throw new RuntimeException( format("The class path resource '%s' can't be loaded as a JSON node.", classPathResourceName), e); } } }
Add unit test support to load class path resources as JSON nodes
Add unit test support to load class path resources as JSON nodes
Java
apache-2.0
openmhealth/shimmer,openmhealth/shimmer,openmhealth/shimmer,openmhealth/shimmer
java
## Code Before: package org.openmhealth.shim.common.mapper; import com.fasterxml.jackson.databind.ObjectMapper; import static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper; /** * @author Emerson Farrugia */ public abstract class DataPointMapperUnitTests { protected static final ObjectMapper objectMapper = newObjectMapper(); } ## Instruction: Add unit test support to load class path resources as JSON nodes ## Code After: package org.openmhealth.shim.common.mapper; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.springframework.core.io.ClassPathResource; import java.io.IOException; import java.io.InputStream; import static java.lang.String.format; import static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper; /** * @author Emerson Farrugia */ public abstract class DataPointMapperUnitTests { protected static final ObjectMapper objectMapper = newObjectMapper(); /** * @param classPathResourceName the name of the class path resource to load * @return the contents of the resource as a {@link JsonNode} * @throws RuntimeException if the resource can't be loaded */ protected JsonNode asJsonNode(String classPathResourceName) { ClassPathResource resource = new ClassPathResource(classPathResourceName); try { InputStream resourceInputStream = resource.getInputStream(); return objectMapper.readTree(resourceInputStream); } catch (IOException e) { throw new RuntimeException( format("The class path resource '%s' can't be loaded as a JSON node.", classPathResourceName), e); } } }
# ... existing code ... package org.openmhealth.shim.common.mapper; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.springframework.core.io.ClassPathResource; import java.io.IOException; import java.io.InputStream; import static java.lang.String.format; import static org.openmhealth.schema.configuration.JacksonConfiguration.newObjectMapper; # ... modified code ... public abstract class DataPointMapperUnitTests { protected static final ObjectMapper objectMapper = newObjectMapper(); /** * @param classPathResourceName the name of the class path resource to load * @return the contents of the resource as a {@link JsonNode} * @throws RuntimeException if the resource can't be loaded */ protected JsonNode asJsonNode(String classPathResourceName) { ClassPathResource resource = new ClassPathResource(classPathResourceName); try { InputStream resourceInputStream = resource.getInputStream(); return objectMapper.readTree(resourceInputStream); } catch (IOException e) { throw new RuntimeException( format("The class path resource '%s' can't be loaded as a JSON node.", classPathResourceName), e); } } } # ... rest of the code ...
50089e4a1d55414e0ae88b1699eeca0980bcfc15
org.metaborg.meta.lang.dynsem.interpreter/src/main/java/org/metaborg/meta/lang/dynsem/interpreter/nodes/matching/LiteralMatchPattern.java
org.metaborg.meta.lang.dynsem.interpreter/src/main/java/org/metaborg/meta/lang/dynsem/interpreter/nodes/matching/LiteralMatchPattern.java
package org.metaborg.meta.lang.dynsem.interpreter.nodes.matching; import org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil; import org.spoofax.interpreter.core.Tools; import org.spoofax.interpreter.terms.IStrategoAppl; import org.spoofax.terms.util.NotImplementedException; import com.oracle.truffle.api.frame.FrameDescriptor; import com.oracle.truffle.api.source.SourceSection; public abstract class LiteralMatchPattern extends MatchPattern { public LiteralMatchPattern(SourceSection source) { super(source); } public static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) { SourceSection source = SourceSectionUtil.fromStrategoTerm(t); if(Tools.hasConstructor(t, "True",0)){ return new TrueLiteralTermMatchPattern(source); } if(Tools.hasConstructor(t, "False",0)){ return new FalseLiteralTermMatchPattern(source); } if(Tools.hasConstructor(t, "Int", 1)){ return new IntLiteralTermMatchPattern(Tools.intAt(t, 0).intValue(), source); } if(Tools.hasConstructor(t, "String", 1)){ return new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source); } throw new NotImplementedException("Unsupported literal: " + t); } }
package org.metaborg.meta.lang.dynsem.interpreter.nodes.matching; import org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil; import org.spoofax.interpreter.core.Tools; import org.spoofax.interpreter.terms.IStrategoAppl; import org.spoofax.terms.util.NotImplementedException; import com.oracle.truffle.api.frame.FrameDescriptor; import com.oracle.truffle.api.source.SourceSection; public abstract class LiteralMatchPattern extends MatchPattern { public LiteralMatchPattern(SourceSection source) { super(source); } public static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) { SourceSection source = SourceSectionUtil.fromStrategoTerm(t); if (Tools.hasConstructor(t, "True", 0)) { return new TrueLiteralTermMatchPattern(source); } if (Tools.hasConstructor(t, "False", 0)) { return new FalseLiteralTermMatchPattern(source); } if (Tools.hasConstructor(t, "Int", 1)) { return new IntLiteralTermMatchPattern(Integer.parseInt(Tools.stringAt(t, 0).stringValue()), source); } if (Tools.hasConstructor(t, "String", 1)) { return new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source); } throw new NotImplementedException("Unsupported literal: " + t); } }
Fix literal pattern matching creation to correctly convert integer literals in the specification which are encoded as strings.
Fix literal pattern matching creation to correctly convert integer literals in the specification which are encoded as strings.
Java
apache-2.0
metaborg/dynsem,metaborg/dynsem
java
## Code Before: package org.metaborg.meta.lang.dynsem.interpreter.nodes.matching; import org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil; import org.spoofax.interpreter.core.Tools; import org.spoofax.interpreter.terms.IStrategoAppl; import org.spoofax.terms.util.NotImplementedException; import com.oracle.truffle.api.frame.FrameDescriptor; import com.oracle.truffle.api.source.SourceSection; public abstract class LiteralMatchPattern extends MatchPattern { public LiteralMatchPattern(SourceSection source) { super(source); } public static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) { SourceSection source = SourceSectionUtil.fromStrategoTerm(t); if(Tools.hasConstructor(t, "True",0)){ return new TrueLiteralTermMatchPattern(source); } if(Tools.hasConstructor(t, "False",0)){ return new FalseLiteralTermMatchPattern(source); } if(Tools.hasConstructor(t, "Int", 1)){ return new IntLiteralTermMatchPattern(Tools.intAt(t, 0).intValue(), source); } if(Tools.hasConstructor(t, "String", 1)){ return new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source); } throw new NotImplementedException("Unsupported literal: " + t); } } ## Instruction: Fix literal pattern matching creation to correctly convert integer literals in the specification which are encoded as strings. ## Code After: package org.metaborg.meta.lang.dynsem.interpreter.nodes.matching; import org.metaborg.meta.lang.dynsem.interpreter.utils.SourceSectionUtil; import org.spoofax.interpreter.core.Tools; import org.spoofax.interpreter.terms.IStrategoAppl; import org.spoofax.terms.util.NotImplementedException; import com.oracle.truffle.api.frame.FrameDescriptor; import com.oracle.truffle.api.source.SourceSection; public abstract class LiteralMatchPattern extends MatchPattern { public LiteralMatchPattern(SourceSection source) { super(source); } public static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) { SourceSection source = SourceSectionUtil.fromStrategoTerm(t); if (Tools.hasConstructor(t, "True", 0)) { return new TrueLiteralTermMatchPattern(source); } if (Tools.hasConstructor(t, "False", 0)) { return new FalseLiteralTermMatchPattern(source); } if (Tools.hasConstructor(t, "Int", 1)) { return new IntLiteralTermMatchPattern(Integer.parseInt(Tools.stringAt(t, 0).stringValue()), source); } if (Tools.hasConstructor(t, "String", 1)) { return new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source); } throw new NotImplementedException("Unsupported literal: " + t); } }
# ... existing code ... public static LiteralMatchPattern create(IStrategoAppl t, FrameDescriptor fd) { SourceSection source = SourceSectionUtil.fromStrategoTerm(t); if (Tools.hasConstructor(t, "True", 0)) { return new TrueLiteralTermMatchPattern(source); } if (Tools.hasConstructor(t, "False", 0)) { return new FalseLiteralTermMatchPattern(source); } if (Tools.hasConstructor(t, "Int", 1)) { return new IntLiteralTermMatchPattern(Integer.parseInt(Tools.stringAt(t, 0).stringValue()), source); } if (Tools.hasConstructor(t, "String", 1)) { return new StringLiteralTermMatchPattern(Tools.stringAt(t, 0).stringValue(), source); } throw new NotImplementedException("Unsupported literal: " + t); } } # ... rest of the code ...
2ba5f562edb568653574d329a9f1ffbe8b15e7c5
tests/test_caching.py
tests/test_caching.py
import os import tempfile from . import RTRSSTestCase from rtrss import caching, config class CachingTestCase(RTRSSTestCase): def setUp(self): fh, self.filename = tempfile.mkstemp(dir=config.DATA_DIR) os.close(fh) def tearDown(self): os.remove(self.filename) def test_open_for_atomic_write_writes(self): test_data = 'test' with caching.open_for_atomic_write(self.filename) as f: f.write(test_data) with open(self.filename) as f: data = f.read() self.assertEqual(test_data, data) def test_atomic_write_really_atomic(self): test_data = 'test' with caching.open_for_atomic_write(self.filename) as f: f.write(test_data) with open(self.filename, 'w') as f1: f1.write('this will be overwritten') with open(self.filename) as f: data = f.read() self.assertEqual(test_data, data)
import os import tempfile from . import TempDirTestCase from rtrss import caching class CachingTestCase(TempDirTestCase): def setUp(self): super(CachingTestCase, self).setUp() fh, self.filename = tempfile.mkstemp(dir=self.dir.path) os.close(fh) def tearDown(self): os.remove(self.filename) super(CachingTestCase, self).tearDown() def test_open_for_atomic_write_writes(self): test_data = 'test' with caching.open_for_atomic_write(self.filename) as f: f.write(test_data) with open(self.filename) as f: data = f.read() self.assertEqual(test_data, data) def test_atomic_write_really_atomic(self): test_data = 'test' with caching.open_for_atomic_write(self.filename) as f: f.write(test_data) with open(self.filename, 'w') as f1: f1.write('this will be overwritten') with open(self.filename) as f: data = f.read() self.assertEqual(test_data, data)
Update test case to use new base class
Update test case to use new base class
Python
apache-2.0
notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss
python
## Code Before: import os import tempfile from . import RTRSSTestCase from rtrss import caching, config class CachingTestCase(RTRSSTestCase): def setUp(self): fh, self.filename = tempfile.mkstemp(dir=config.DATA_DIR) os.close(fh) def tearDown(self): os.remove(self.filename) def test_open_for_atomic_write_writes(self): test_data = 'test' with caching.open_for_atomic_write(self.filename) as f: f.write(test_data) with open(self.filename) as f: data = f.read() self.assertEqual(test_data, data) def test_atomic_write_really_atomic(self): test_data = 'test' with caching.open_for_atomic_write(self.filename) as f: f.write(test_data) with open(self.filename, 'w') as f1: f1.write('this will be overwritten') with open(self.filename) as f: data = f.read() self.assertEqual(test_data, data) ## Instruction: Update test case to use new base class ## Code After: import os import tempfile from . import TempDirTestCase from rtrss import caching class CachingTestCase(TempDirTestCase): def setUp(self): super(CachingTestCase, self).setUp() fh, self.filename = tempfile.mkstemp(dir=self.dir.path) os.close(fh) def tearDown(self): os.remove(self.filename) super(CachingTestCase, self).tearDown() def test_open_for_atomic_write_writes(self): test_data = 'test' with caching.open_for_atomic_write(self.filename) as f: f.write(test_data) with open(self.filename) as f: data = f.read() self.assertEqual(test_data, data) def test_atomic_write_really_atomic(self): test_data = 'test' with caching.open_for_atomic_write(self.filename) as f: f.write(test_data) with open(self.filename, 'w') as f1: f1.write('this will be overwritten') with open(self.filename) as f: data = f.read() self.assertEqual(test_data, data)
// ... existing code ... import os import tempfile from . import TempDirTestCase from rtrss import caching class CachingTestCase(TempDirTestCase): def setUp(self): super(CachingTestCase, self).setUp() fh, self.filename = tempfile.mkstemp(dir=self.dir.path) os.close(fh) def tearDown(self): os.remove(self.filename) super(CachingTestCase, self).tearDown() def test_open_for_atomic_write_writes(self): test_data = 'test' // ... rest of the code ...
61b5bc8a7e81225a83d195e016bc4adbd7ca1db5
setup.py
setup.py
from setuptools import setup, find_packages setup( name='pymediainfo', version='2.1.5', author='Louis Sautier', author_email='[email protected]', url='https://github.com/sbraz/pymediainfo', description="""A Python wrapper for the mediainfo library.""", packages=find_packages(), namespace_packages=[], include_package_data=True, zip_safe=False, license='MIT', tests_require=["nose"], test_suite="nose.collector", classifiers=[ "Development Status :: 5 - Production/Stable", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Operating System :: POSIX :: Linux", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "License :: OSI Approved :: MIT License", ] )
from setuptools import setup, find_packages setup( name='pymediainfo', version='2.1.5', author='Louis Sautier', author_email='[email protected]', url='https://github.com/sbraz/pymediainfo', description="""A Python wrapper for the mediainfo library.""", packages=find_packages(), namespace_packages=[], include_package_data=True, zip_safe=False, license='MIT', tests_require=["nose"], test_suite="nose.collector", classifiers=[ "Development Status :: 5 - Production/Stable", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Operating System :: POSIX :: Linux", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "License :: OSI Approved :: MIT License", ] )
Add Python 2.6 to classifiers
Add Python 2.6 to classifiers
Python
mit
paltman/pymediainfo,paltman-archive/pymediainfo
python
## Code Before: from setuptools import setup, find_packages setup( name='pymediainfo', version='2.1.5', author='Louis Sautier', author_email='[email protected]', url='https://github.com/sbraz/pymediainfo', description="""A Python wrapper for the mediainfo library.""", packages=find_packages(), namespace_packages=[], include_package_data=True, zip_safe=False, license='MIT', tests_require=["nose"], test_suite="nose.collector", classifiers=[ "Development Status :: 5 - Production/Stable", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Operating System :: POSIX :: Linux", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "License :: OSI Approved :: MIT License", ] ) ## Instruction: Add Python 2.6 to classifiers ## Code After: from setuptools import setup, find_packages setup( name='pymediainfo', version='2.1.5', author='Louis Sautier', author_email='[email protected]', url='https://github.com/sbraz/pymediainfo', description="""A Python wrapper for the mediainfo library.""", packages=find_packages(), namespace_packages=[], include_package_data=True, zip_safe=False, license='MIT', tests_require=["nose"], test_suite="nose.collector", classifiers=[ "Development Status :: 5 - Production/Stable", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Operating System :: POSIX :: Linux", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "License :: OSI Approved :: MIT License", ] )
// ... existing code ... test_suite="nose.collector", classifiers=[ "Development Status :: 5 - Production/Stable", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", // ... rest of the code ...
94996a2a78743020945bf708616e202e7b988173
aconite-core/src/io/aconite/serializers/GsonBodySerializer.kt
aconite-core/src/io/aconite/serializers/GsonBodySerializer.kt
package io.aconite.serializers import com.google.gson.Gson import com.google.gson.GsonBuilder import com.google.gson.JsonParseException import io.aconite.BadRequestException import io.aconite.UnsupportedMediaTypeException import io.aconite.BodyBuffer import io.aconite.BodySerializer import io.aconite.Buffer import io.aconite.utils.toJavaType import java.lang.reflect.Type import kotlin.reflect.KAnnotatedElement import kotlin.reflect.KType class GsonBodySerializer(val gson: Gson, val type: Type): BodySerializer { class Factory(val gson: Gson = Gson()): BodySerializer.Factory { constructor(builder: GsonBuilder): this(builder.create()) override fun create(annotations: KAnnotatedElement, type: KType) = GsonBodySerializer(gson, type.toJavaType()) } override fun serialize(obj: Any?) = BodyBuffer( content = Buffer.wrap(gson.toJson(obj, type)), contentType = "application/json" ) override fun deserialize(body: BodyBuffer): Any? { if (body.contentType.toLowerCase() != "application/json") throw UnsupportedMediaTypeException("Only 'application/json' media type supported") try { return gson.fromJson(body.content.string, type) } catch (ex: JsonParseException) { throw BadRequestException("Bad JSON format. ${ex.message}") } } }
package io.aconite.serializers import com.google.gson.Gson import com.google.gson.GsonBuilder import com.google.gson.JsonParseException import io.aconite.* import io.aconite.utils.toJavaType import java.lang.reflect.Type import kotlin.reflect.KAnnotatedElement import kotlin.reflect.KType class GsonBodySerializer(val gson: Gson, val type: Type): BodySerializer { class Factory(val gson: Gson = Gson()): BodySerializer.Factory { constructor(builder: GsonBuilder): this(builder.create()) override fun create(annotations: KAnnotatedElement, type: KType) = GsonBodySerializer(gson, type.toJavaType()) } override fun serialize(obj: Any?) = BodyBuffer( content = Buffer.wrap(gson.toJson(obj, type)), contentType = "application/json" ) override fun deserialize(body: BodyBuffer): Any? { if (body.content.bytes.isEmpty()) return null if (body.contentType.toLowerCase() != "application/json") throw UnsupportedMediaTypeException("Only 'application/json' media type supported") try { return gson.fromJson(body.content.string, type) } catch (ex: JsonParseException) { throw BadRequestException("Bad JSON format. ${ex.message}") } } }
Add support for empty body without contentType specification
Add support for empty body without contentType specification
Kotlin
mit
AcapellaSoft/Aconite,AcapellaSoft/Aconite
kotlin
## Code Before: package io.aconite.serializers import com.google.gson.Gson import com.google.gson.GsonBuilder import com.google.gson.JsonParseException import io.aconite.BadRequestException import io.aconite.UnsupportedMediaTypeException import io.aconite.BodyBuffer import io.aconite.BodySerializer import io.aconite.Buffer import io.aconite.utils.toJavaType import java.lang.reflect.Type import kotlin.reflect.KAnnotatedElement import kotlin.reflect.KType class GsonBodySerializer(val gson: Gson, val type: Type): BodySerializer { class Factory(val gson: Gson = Gson()): BodySerializer.Factory { constructor(builder: GsonBuilder): this(builder.create()) override fun create(annotations: KAnnotatedElement, type: KType) = GsonBodySerializer(gson, type.toJavaType()) } override fun serialize(obj: Any?) = BodyBuffer( content = Buffer.wrap(gson.toJson(obj, type)), contentType = "application/json" ) override fun deserialize(body: BodyBuffer): Any? { if (body.contentType.toLowerCase() != "application/json") throw UnsupportedMediaTypeException("Only 'application/json' media type supported") try { return gson.fromJson(body.content.string, type) } catch (ex: JsonParseException) { throw BadRequestException("Bad JSON format. ${ex.message}") } } } ## Instruction: Add support for empty body without contentType specification ## Code After: package io.aconite.serializers import com.google.gson.Gson import com.google.gson.GsonBuilder import com.google.gson.JsonParseException import io.aconite.* import io.aconite.utils.toJavaType import java.lang.reflect.Type import kotlin.reflect.KAnnotatedElement import kotlin.reflect.KType class GsonBodySerializer(val gson: Gson, val type: Type): BodySerializer { class Factory(val gson: Gson = Gson()): BodySerializer.Factory { constructor(builder: GsonBuilder): this(builder.create()) override fun create(annotations: KAnnotatedElement, type: KType) = GsonBodySerializer(gson, type.toJavaType()) } override fun serialize(obj: Any?) = BodyBuffer( content = Buffer.wrap(gson.toJson(obj, type)), contentType = "application/json" ) override fun deserialize(body: BodyBuffer): Any? { if (body.content.bytes.isEmpty()) return null if (body.contentType.toLowerCase() != "application/json") throw UnsupportedMediaTypeException("Only 'application/json' media type supported") try { return gson.fromJson(body.content.string, type) } catch (ex: JsonParseException) { throw BadRequestException("Bad JSON format. ${ex.message}") } } }
// ... existing code ... import com.google.gson.Gson import com.google.gson.GsonBuilder import com.google.gson.JsonParseException import io.aconite.* import io.aconite.utils.toJavaType import java.lang.reflect.Type import kotlin.reflect.KAnnotatedElement // ... modified code ... ) override fun deserialize(body: BodyBuffer): Any? { if (body.content.bytes.isEmpty()) return null if (body.contentType.toLowerCase() != "application/json") throw UnsupportedMediaTypeException("Only 'application/json' media type supported") try { return gson.fromJson(body.content.string, type) } catch (ex: JsonParseException) { // ... rest of the code ...
2d64c01daebd918c3e6196b1eb3ad62f105c56e0
django_google_charts/charts.py
django_google_charts/charts.py
import six import json from django.core.urlresolvers import reverse from django.utils.html import format_html, mark_safe CHARTS = {} class ChartMeta(type): def __new__(cls, name, bases, attrs): klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs) if klass.chart_slug: CHARTS[klass.chart_slug] = klass return klass @six.add_metaclass(ChartMeta) class Chart(object): options = {} chart_slug = None columns = None def get_data(self): raise NotImplementedError def __str__(self): return format_html( "<div " "data-chart-options='{0}'" "data-chart-url='{1}'" "></div>", json.dumps(self.options), reverse( 'djgc-chart-data', args=(self.chart_slug,), ), )
import six import json from django.core.urlresolvers import reverse from django.utils.html import format_html, mark_safe from django.utils.encoding import python_2_unicode_compatible CHARTS = {} class ChartMeta(type): def __new__(cls, name, bases, attrs): klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs) if klass.chart_slug: CHARTS[klass.chart_slug] = klass return klass @six.add_metaclass(ChartMeta) @python_2_unicode_compatible class Chart(object): options = {} chart_slug = None columns = None def get_data(self): raise NotImplementedError def __str__(self): return format_html( "<div " "data-chart-options='{0}'" "data-chart-url='{1}'" "></div>", json.dumps(self.options), reverse( 'djgc-chart-data', args=(self.chart_slug,), ), )
Make this Python 2.x compatible
Make this Python 2.x compatible
Python
mit
danpalmer/django-google-charts,danpalmer/django-google-charts
python
## Code Before: import six import json from django.core.urlresolvers import reverse from django.utils.html import format_html, mark_safe CHARTS = {} class ChartMeta(type): def __new__(cls, name, bases, attrs): klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs) if klass.chart_slug: CHARTS[klass.chart_slug] = klass return klass @six.add_metaclass(ChartMeta) class Chart(object): options = {} chart_slug = None columns = None def get_data(self): raise NotImplementedError def __str__(self): return format_html( "<div " "data-chart-options='{0}'" "data-chart-url='{1}'" "></div>", json.dumps(self.options), reverse( 'djgc-chart-data', args=(self.chart_slug,), ), ) ## Instruction: Make this Python 2.x compatible ## Code After: import six import json from django.core.urlresolvers import reverse from django.utils.html import format_html, mark_safe from django.utils.encoding import python_2_unicode_compatible CHARTS = {} class ChartMeta(type): def __new__(cls, name, bases, attrs): klass = super(ChartMeta, cls).__new__(cls, name, bases, attrs) if klass.chart_slug: CHARTS[klass.chart_slug] = klass return klass @six.add_metaclass(ChartMeta) @python_2_unicode_compatible class Chart(object): options = {} chart_slug = None columns = None def get_data(self): raise NotImplementedError def __str__(self): return format_html( "<div " "data-chart-options='{0}'" "data-chart-url='{1}'" "></div>", json.dumps(self.options), reverse( 'djgc-chart-data', args=(self.chart_slug,), ), )
// ... existing code ... from django.core.urlresolvers import reverse from django.utils.html import format_html, mark_safe from django.utils.encoding import python_2_unicode_compatible CHARTS = {} // ... modified code ... return klass @six.add_metaclass(ChartMeta) @python_2_unicode_compatible class Chart(object): options = {} chart_slug = None // ... rest of the code ...
a385490e82e3ac3f909fe2b407e692206212748b
main/src/main/java/com/bloatit/framework/Offer.java
main/src/main/java/com/bloatit/framework/Offer.java
package com.bloatit.framework; import java.util.Date; import com.bloatit.framework.right.OfferRight; import com.bloatit.framework.right.RightManager.Action; import com.bloatit.model.data.DaoComment; import com.bloatit.model.data.DaoKudosable; import com.bloatit.model.data.DaoOffer; public final class Offer extends Kudosable { private final DaoOffer dao; public static Offer create(final DaoOffer dao) { if (dao == null) { return null; } return new Offer(dao); } public Offer(final DaoOffer dao) { super(); this.dao = dao; } public DaoOffer getDao() { return dao; } public Date getDateExpire() { return dao.getDateExpire(); } public boolean canSetdatExpire() { return new OfferRight.DateExpire().canAccess(calculateRole(this), Action.WRITE); } public void setDateExpire(final Date dateExpire) { new OfferRight.DateExpire().tryAccess(calculateRole(this), Action.WRITE); dao.setDateExpire(dateExpire); } public Demand getDemand() { return Demand.create(dao.getDemand()); } public Description getDescription() { return new Description(dao.getDescription()); } @Override protected DaoKudosable getDaoKudosable() { return dao; } }
package com.bloatit.framework; import java.math.BigDecimal; import java.util.Date; import com.bloatit.framework.right.OfferRight; import com.bloatit.framework.right.RightManager.Action; import com.bloatit.model.data.DaoComment; import com.bloatit.model.data.DaoKudosable; import com.bloatit.model.data.DaoOffer; public final class Offer extends Kudosable { private final DaoOffer dao; public static Offer create(final DaoOffer dao) { if (dao == null) { return null; } return new Offer(dao); } public Offer(final DaoOffer dao) { super(); this.dao = dao; } public DaoOffer getDao() { return dao; } public Date getDateExpire() { return dao.getDateExpire(); } public boolean canSetdatExpire() { return new OfferRight.DateExpire().canAccess(calculateRole(this), Action.WRITE); } public void setDateExpire(final Date dateExpire) { new OfferRight.DateExpire().tryAccess(calculateRole(this), Action.WRITE); dao.setDateExpire(dateExpire); } public Demand getDemand() { return Demand.create(dao.getDemand()); } public Description getDescription() { return new Description(dao.getDescription()); } public BigDecimal getAmount() { return dao.getAmount(); } @Override protected DaoKudosable getDaoKudosable() { return dao; } }
Add get amount method in offer
Add get amount method in offer
Java
agpl-3.0
BloatIt/bloatit,BloatIt/bloatit,BloatIt/bloatit,BloatIt/bloatit,BloatIt/bloatit
java
## Code Before: package com.bloatit.framework; import java.util.Date; import com.bloatit.framework.right.OfferRight; import com.bloatit.framework.right.RightManager.Action; import com.bloatit.model.data.DaoComment; import com.bloatit.model.data.DaoKudosable; import com.bloatit.model.data.DaoOffer; public final class Offer extends Kudosable { private final DaoOffer dao; public static Offer create(final DaoOffer dao) { if (dao == null) { return null; } return new Offer(dao); } public Offer(final DaoOffer dao) { super(); this.dao = dao; } public DaoOffer getDao() { return dao; } public Date getDateExpire() { return dao.getDateExpire(); } public boolean canSetdatExpire() { return new OfferRight.DateExpire().canAccess(calculateRole(this), Action.WRITE); } public void setDateExpire(final Date dateExpire) { new OfferRight.DateExpire().tryAccess(calculateRole(this), Action.WRITE); dao.setDateExpire(dateExpire); } public Demand getDemand() { return Demand.create(dao.getDemand()); } public Description getDescription() { return new Description(dao.getDescription()); } @Override protected DaoKudosable getDaoKudosable() { return dao; } } ## Instruction: Add get amount method in offer ## Code After: package com.bloatit.framework; import java.math.BigDecimal; import java.util.Date; import com.bloatit.framework.right.OfferRight; import com.bloatit.framework.right.RightManager.Action; import com.bloatit.model.data.DaoComment; import com.bloatit.model.data.DaoKudosable; import com.bloatit.model.data.DaoOffer; public final class Offer extends Kudosable { private final DaoOffer dao; public static Offer create(final DaoOffer dao) { if (dao == null) { return null; } return new Offer(dao); } public Offer(final DaoOffer dao) { super(); this.dao = dao; } public DaoOffer getDao() { return dao; } public Date getDateExpire() { return dao.getDateExpire(); } public boolean canSetdatExpire() { return new OfferRight.DateExpire().canAccess(calculateRole(this), Action.WRITE); } public void setDateExpire(final Date dateExpire) { new OfferRight.DateExpire().tryAccess(calculateRole(this), Action.WRITE); dao.setDateExpire(dateExpire); } public Demand getDemand() { return Demand.create(dao.getDemand()); } public Description getDescription() { return new Description(dao.getDescription()); } public BigDecimal getAmount() { return dao.getAmount(); } @Override protected DaoKudosable getDaoKudosable() { return dao; } }
# ... existing code ... package com.bloatit.framework; import java.math.BigDecimal; import java.util.Date; import com.bloatit.framework.right.OfferRight; # ... modified code ... return new Description(dao.getDescription()); } public BigDecimal getAmount() { return dao.getAmount(); } @Override protected DaoKudosable getDaoKudosable() { return dao; # ... rest of the code ...
4a98d2ce95d6a082588e4ccc8e04454c26260ca0
helpers.py
helpers.py
def get_readable_list(passed_list, sep=', ', end=''): output = "" if isinstance(passed_list, list): for i, item in enumerate(passed_list): if len(passed_list) is 1: output += str(item) else: if i is not (len(passed_list) - 1): output += str(item) + sep else: output += str(item) elif isinstance(passed_list, dict): for i, item in enumerate(passed_list.values()): if len(passed_list) is 1: output += str(item) else: if i is not (len(passed_list) - 1): output += str(item) + sep else: output += str(item) return output + end def get_list_as_english(passed_list): output = "" for i, item in enumerate(passed_list): if len(passed_list) is 1: output += str(item) + ' ' elif len(passed_list) is 2: output += str(item) if i is not (len(passed_list) - 1): output += " and " else: output += "" else: if i is not (len(passed_list) - 1): output += str(item) + ", " else: output += "and " + str(item) + ", " return output
def get_readable_list(passed_list, sep=', ', end=''): output = "" if isinstance(passed_list, list) or isinstance(passed_list, tuple): for i, item in enumerate(passed_list): if len(passed_list) is 1: output += str(item) else: if i is not (len(passed_list) - 1): output += str(item) + sep else: output += str(item) elif isinstance(passed_list, dict): for i, item in enumerate(passed_list.values()): if len(passed_list) is 1: output += str(item) else: if i is not (len(passed_list) - 1): output += str(item) + sep else: output += str(item) return output + end def get_list_as_english(passed_list): output = "" for i, item in enumerate(passed_list): if len(passed_list) is 1: output += str(item) + ' ' elif len(passed_list) is 2: output += str(item) if i is not (len(passed_list) - 1): output += " and " else: output += "" else: if i is not (len(passed_list) - 1): output += str(item) + ", " else: output += "and " + str(item) + ", " return output
Make get_readable_list process tuples, too
Make get_readable_list process tuples, too
Python
agpl-3.0
hawkrives/gobbldygook,hawkrives/gobbldygook,hawkrives/gobbldygook
python
## Code Before: def get_readable_list(passed_list, sep=', ', end=''): output = "" if isinstance(passed_list, list): for i, item in enumerate(passed_list): if len(passed_list) is 1: output += str(item) else: if i is not (len(passed_list) - 1): output += str(item) + sep else: output += str(item) elif isinstance(passed_list, dict): for i, item in enumerate(passed_list.values()): if len(passed_list) is 1: output += str(item) else: if i is not (len(passed_list) - 1): output += str(item) + sep else: output += str(item) return output + end def get_list_as_english(passed_list): output = "" for i, item in enumerate(passed_list): if len(passed_list) is 1: output += str(item) + ' ' elif len(passed_list) is 2: output += str(item) if i is not (len(passed_list) - 1): output += " and " else: output += "" else: if i is not (len(passed_list) - 1): output += str(item) + ", " else: output += "and " + str(item) + ", " return output ## Instruction: Make get_readable_list process tuples, too ## Code After: def get_readable_list(passed_list, sep=', ', end=''): output = "" if isinstance(passed_list, list) or isinstance(passed_list, tuple): for i, item in enumerate(passed_list): if len(passed_list) is 1: output += str(item) else: if i is not (len(passed_list) - 1): output += str(item) + sep else: output += str(item) elif isinstance(passed_list, dict): for i, item in enumerate(passed_list.values()): if len(passed_list) is 1: output += str(item) else: if i is not (len(passed_list) - 1): output += str(item) + sep else: output += str(item) return output + end def get_list_as_english(passed_list): output = "" for i, item in enumerate(passed_list): if len(passed_list) is 1: output += str(item) + ' ' elif len(passed_list) is 2: output += str(item) if i is not (len(passed_list) - 1): output += " and " else: output += "" else: if i is not (len(passed_list) - 1): output += str(item) + ", " else: output += "and " + str(item) + ", " return output
// ... existing code ... def get_readable_list(passed_list, sep=', ', end=''): output = "" if isinstance(passed_list, list) or isinstance(passed_list, tuple): for i, item in enumerate(passed_list): if len(passed_list) is 1: output += str(item) // ... modified code ... output += str(item) + sep else: output += str(item) elif isinstance(passed_list, dict): for i, item in enumerate(passed_list.values()): // ... rest of the code ...
7627b8759ab08df562048ec1fa94fe9d69d01374
setup.py
setup.py
from setuptools import setup from exoline import __version__ as version with open('requirements.txt') as f: required = f.read().splitlines() try: from collections import OrderedDict except ImportError: required.append('ordereddict==1.1') setup( name='exoline', version=version, url = 'http://github.com/dweaver/exoline', author = 'Dan Weaver', author_email = '[email protected]', description = 'Command line interface for Exosite platform.', long_description = open('README.md').read() + '\n\n' + open('HISTORY.md').read(), packages=['exoline'], package_dir={'exoline': 'exoline'}, scripts=['bin/exo', 'bin/exoline'], keywords=['exosite', 'onep', 'one platform', 'm2m'], install_requires=required, zip_safe=False, )
from setuptools import setup from exoline import __version__ as version with open('requirements.txt') as f: required = f.read().splitlines() try: from collections import OrderedDict except ImportError: required.append('ordereddict>=1.1') try: import importlib except ImportError: required.append('importlib>=1.0.2') setup( name='exoline', version=version, url = 'http://github.com/dweaver/exoline', author = 'Dan Weaver', author_email = '[email protected]', description = 'Command line interface for Exosite platform.', long_description = open('README.md').read() + '\n\n' + open('HISTORY.md').read(), packages=['exoline'], package_dir={'exoline': 'exoline'}, scripts=['bin/exo', 'bin/exoline'], keywords=['exosite', 'onep', 'one platform', 'm2m'], install_requires=required, zip_safe=False, )
Add importlib if not included
Add importlib if not included
Python
bsd-3-clause
tadpol/exoline,azdle/exoline,asolz/exoline,danslimmon/exoline,tadpol/exoline,asolz/exoline,azdle/exoline,danslimmon/exoline
python
## Code Before: from setuptools import setup from exoline import __version__ as version with open('requirements.txt') as f: required = f.read().splitlines() try: from collections import OrderedDict except ImportError: required.append('ordereddict==1.1') setup( name='exoline', version=version, url = 'http://github.com/dweaver/exoline', author = 'Dan Weaver', author_email = '[email protected]', description = 'Command line interface for Exosite platform.', long_description = open('README.md').read() + '\n\n' + open('HISTORY.md').read(), packages=['exoline'], package_dir={'exoline': 'exoline'}, scripts=['bin/exo', 'bin/exoline'], keywords=['exosite', 'onep', 'one platform', 'm2m'], install_requires=required, zip_safe=False, ) ## Instruction: Add importlib if not included ## Code After: from setuptools import setup from exoline import __version__ as version with open('requirements.txt') as f: required = f.read().splitlines() try: from collections import OrderedDict except ImportError: required.append('ordereddict>=1.1') try: import importlib except ImportError: required.append('importlib>=1.0.2') setup( name='exoline', version=version, url = 'http://github.com/dweaver/exoline', author = 'Dan Weaver', author_email = '[email protected]', description = 'Command line interface for Exosite platform.', long_description = open('README.md').read() + '\n\n' + open('HISTORY.md').read(), packages=['exoline'], package_dir={'exoline': 'exoline'}, scripts=['bin/exo', 'bin/exoline'], keywords=['exosite', 'onep', 'one platform', 'm2m'], install_requires=required, zip_safe=False, )
# ... existing code ... try: from collections import OrderedDict except ImportError: required.append('ordereddict>=1.1') try: import importlib except ImportError: required.append('importlib>=1.0.2') setup( name='exoline', # ... rest of the code ...
d9fc83ec526df1bf732d8f65f445f48f1b764dfe
selvbetjening/api/rest/models.py
selvbetjening/api/rest/models.py
from tastypie.authentication import Authentication from tastypie.resources import ModelResource from provider.oauth2.models import AccessToken from selvbetjening.core.members.models import SUser class OAuth2Authentication(Authentication): def is_authenticated(self, request, **kwargs): access_key = request.REQUEST.get('access_key', None) if not access_key: auth_header_value = request.META.get('HTTP_AUTHORIZATION', None) if auth_header_value: access_key = auth_header_value.split(' ')[1] if not access_key: return False try: token = AccessToken.objects.get_token(access_key) except AccessToken.DoesNotExist: return False request.user = token.user return True class AuthenticatedUserResource(ModelResource): class Meta: queryset = SUser.objects.all() resource_name = 'authenticated_user' allowed_methods = ['get'] excludes = ['password'] authentication = OAuth2Authentication() def get_object_list(self, request): return super(AuthenticatedUserResource, self).get_object_list(request).filter(pk=1)
from tastypie.authentication import Authentication from tastypie.resources import ModelResource from provider.oauth2.models import AccessToken from selvbetjening.core.members.models import SUser class OAuth2Authentication(Authentication): def is_authenticated(self, request, **kwargs): access_key = request.REQUEST.get('access_key', None) if not access_key: auth_header_value = request.META.get('HTTP_AUTHORIZATION', None) if auth_header_value: access_key = auth_header_value.split(' ')[1] if not access_key: return False try: token = AccessToken.objects.get_token(access_key) except AccessToken.DoesNotExist: return False request.user = token.user return True class AuthenticatedUserResource(ModelResource): class Meta: queryset = SUser.objects.all() resource_name = 'authenticated_user' allowed_methods = ['get'] excludes = ['password'] authentication = OAuth2Authentication() def get_object_list(self, request): return super(AuthenticatedUserResource, self).get_object_list(request).filter(pk=request.user.pk)
Fix mistake returning the wrong authenticated user
Fix mistake returning the wrong authenticated user
Python
mit
animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening
python
## Code Before: from tastypie.authentication import Authentication from tastypie.resources import ModelResource from provider.oauth2.models import AccessToken from selvbetjening.core.members.models import SUser class OAuth2Authentication(Authentication): def is_authenticated(self, request, **kwargs): access_key = request.REQUEST.get('access_key', None) if not access_key: auth_header_value = request.META.get('HTTP_AUTHORIZATION', None) if auth_header_value: access_key = auth_header_value.split(' ')[1] if not access_key: return False try: token = AccessToken.objects.get_token(access_key) except AccessToken.DoesNotExist: return False request.user = token.user return True class AuthenticatedUserResource(ModelResource): class Meta: queryset = SUser.objects.all() resource_name = 'authenticated_user' allowed_methods = ['get'] excludes = ['password'] authentication = OAuth2Authentication() def get_object_list(self, request): return super(AuthenticatedUserResource, self).get_object_list(request).filter(pk=1) ## Instruction: Fix mistake returning the wrong authenticated user ## Code After: from tastypie.authentication import Authentication from tastypie.resources import ModelResource from provider.oauth2.models import AccessToken from selvbetjening.core.members.models import SUser class OAuth2Authentication(Authentication): def is_authenticated(self, request, **kwargs): access_key = request.REQUEST.get('access_key', None) if not access_key: auth_header_value = request.META.get('HTTP_AUTHORIZATION', None) if auth_header_value: access_key = auth_header_value.split(' ')[1] if not access_key: return False try: token = AccessToken.objects.get_token(access_key) except AccessToken.DoesNotExist: return False request.user = token.user return True class AuthenticatedUserResource(ModelResource): class Meta: queryset = SUser.objects.all() resource_name = 'authenticated_user' allowed_methods = ['get'] excludes = ['password'] authentication = OAuth2Authentication() def get_object_list(self, request): return super(AuthenticatedUserResource, self).get_object_list(request).filter(pk=request.user.pk)
# ... existing code ... authentication = OAuth2Authentication() def get_object_list(self, request): return super(AuthenticatedUserResource, self).get_object_list(request).filter(pk=request.user.pk) # ... rest of the code ...
ec439270a5460a6e1b649f7ceaca35b8d8827407
src/main/java/org/jusecase/properties/ui/KeyListCellRenderer.java
src/main/java/org/jusecase/properties/ui/KeyListCellRenderer.java
package org.jusecase.properties.ui; import org.jusecase.properties.entities.Key; import org.jusecase.properties.entities.KeyPopulation; import javax.swing.*; import java.awt.*; import java.util.HashMap; import java.util.Map; public class KeyListCellRenderer extends DefaultListCellRenderer { Map<KeyPopulation, Color> backgroundColorForPopulation = new HashMap<>(); public KeyListCellRenderer() { backgroundColorForPopulation.put(KeyPopulation.Complete, new Color(240, 255, 230)); backgroundColorForPopulation.put(KeyPopulation.Sparse, new Color(255, 251, 230)); } @Override public Component getListCellRendererComponent(JList<?> list, Object value, int index, boolean isSelected, boolean cellHasFocus) { Key key = (Key) value; JLabel label = (JLabel) super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); if (!isSelected) { Color color = backgroundColorForPopulation.get(key.getPopulation()); if (color != null) { label.setBackground(color); } } return label; } }
package org.jusecase.properties.ui; import org.jusecase.properties.entities.Key; import org.jusecase.properties.entities.KeyPopulation; import javax.swing.*; import java.awt.*; import java.util.HashMap; import java.util.Map; public class KeyListCellRenderer extends DefaultListCellRenderer { Map<KeyPopulation, Color> backgroundColorForPopulation = new HashMap<>(); public KeyListCellRenderer() { backgroundColorForPopulation.put(KeyPopulation.Sparse, new Color(231, 211, 186)); } @Override public Component getListCellRendererComponent(JList<?> list, Object value, int index, boolean isSelected, boolean cellHasFocus) { Key key = (Key) value; JLabel label = (JLabel) super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); if (!isSelected) { Color color = backgroundColorForPopulation.get(key.getPopulation()); if (color != null) { label.setBackground(color); } } return label; } }
Make sparse / complete more distinguishable
Make sparse / complete more distinguishable
Java
apache-2.0
casid/jusecase-properties-editor
java
## Code Before: package org.jusecase.properties.ui; import org.jusecase.properties.entities.Key; import org.jusecase.properties.entities.KeyPopulation; import javax.swing.*; import java.awt.*; import java.util.HashMap; import java.util.Map; public class KeyListCellRenderer extends DefaultListCellRenderer { Map<KeyPopulation, Color> backgroundColorForPopulation = new HashMap<>(); public KeyListCellRenderer() { backgroundColorForPopulation.put(KeyPopulation.Complete, new Color(240, 255, 230)); backgroundColorForPopulation.put(KeyPopulation.Sparse, new Color(255, 251, 230)); } @Override public Component getListCellRendererComponent(JList<?> list, Object value, int index, boolean isSelected, boolean cellHasFocus) { Key key = (Key) value; JLabel label = (JLabel) super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); if (!isSelected) { Color color = backgroundColorForPopulation.get(key.getPopulation()); if (color != null) { label.setBackground(color); } } return label; } } ## Instruction: Make sparse / complete more distinguishable ## Code After: package org.jusecase.properties.ui; import org.jusecase.properties.entities.Key; import org.jusecase.properties.entities.KeyPopulation; import javax.swing.*; import java.awt.*; import java.util.HashMap; import java.util.Map; public class KeyListCellRenderer extends DefaultListCellRenderer { Map<KeyPopulation, Color> backgroundColorForPopulation = new HashMap<>(); public KeyListCellRenderer() { backgroundColorForPopulation.put(KeyPopulation.Sparse, new Color(231, 211, 186)); } @Override public Component getListCellRendererComponent(JList<?> list, Object value, int index, boolean isSelected, boolean cellHasFocus) { Key key = (Key) value; JLabel label = (JLabel) super.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); if (!isSelected) { Color color = backgroundColorForPopulation.get(key.getPopulation()); if (color != null) { label.setBackground(color); } } return label; } }
# ... existing code ... Map<KeyPopulation, Color> backgroundColorForPopulation = new HashMap<>(); public KeyListCellRenderer() { backgroundColorForPopulation.put(KeyPopulation.Sparse, new Color(231, 211, 186)); } @Override # ... rest of the code ...
ee32b2e48acd47f1f1ff96482abf20f3d1818fc4
tests/__init__.py
tests/__init__.py
import sys import unittest sys.path.append("../pythainlp") loader = unittest.TestLoader() testSuite = loader.discover("tests") testRunner = unittest.TextTestRunner(verbosity=1) testRunner.run(testSuite)
import sys import unittest import nltk sys.path.append("../pythainlp") nltk.download('omw-1.4') # load wordnet loader = unittest.TestLoader() testSuite = loader.discover("tests") testRunner = unittest.TextTestRunner(verbosity=1) testRunner.run(testSuite)
Add load wordnet to tests
Add load wordnet to tests
Python
apache-2.0
PyThaiNLP/pythainlp
python
## Code Before: import sys import unittest sys.path.append("../pythainlp") loader = unittest.TestLoader() testSuite = loader.discover("tests") testRunner = unittest.TextTestRunner(verbosity=1) testRunner.run(testSuite) ## Instruction: Add load wordnet to tests ## Code After: import sys import unittest import nltk sys.path.append("../pythainlp") nltk.download('omw-1.4') # load wordnet loader = unittest.TestLoader() testSuite = loader.discover("tests") testRunner = unittest.TextTestRunner(verbosity=1) testRunner.run(testSuite)
# ... existing code ... import sys import unittest import nltk sys.path.append("../pythainlp") nltk.download('omw-1.4') # load wordnet loader = unittest.TestLoader() testSuite = loader.discover("tests") # ... rest of the code ...
21fdc8c2c20a75b5431a2fb549fd9632a27ab410
src/main/java/org/apacheextras/camel/examples/rcode/RCodeRunner.java
src/main/java/org/apacheextras/camel/examples/rcode/RCodeRunner.java
package org.apacheextras.camel.examples.rcode; import org.apache.camel.CamelContext; import org.apache.camel.impl.DefaultCamelContext; import java.io.Console; import java.io.File; import java.util.concurrent.TimeUnit; /** * @author Sebastian Rühl */ public class RCodeRunner { public static void main(String... args) throws Exception { CamelContext camelContext = new DefaultCamelContext(); File basePath = args.length > 0 ? new File(args[0]) : new File(System.getProperty("user.home") + "/.rcode-example"); camelContext.addRoutes(new RCodeRouteBuilder(basePath)); camelContext.start(); Console console = System.console(); if (console != null) { console.printf("Please press enter to shutdown route."); console.readLine(); } else { TimeUnit.SECONDS.sleep(5); } camelContext.stop(); } }
/* * Copyright 2013 Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apacheextras.camel.examples.rcode; import org.apache.camel.CamelContext; import org.apache.camel.impl.DefaultCamelContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; /** * * @author cemmersb */ public class RCodeRunner { private final static Logger LOGGER = LoggerFactory.getLogger(RCodeRunner.class); private CamelContext camelContext; private RCodeRouteBuilder routeBuilder = null; public RCodeRunner() { try { initializeContext(); } catch (Exception ex) { LOGGER.error("Unable to initialize context: {}", ex.getMessage()); } } private void initializeContext() throws Exception { routeBuilder = new RCodeRouteBuilder(new File(System.getProperty("user.dir") + "./rcode-example/data")); camelContext = new DefaultCamelContext(); camelContext.addRoutes(routeBuilder); camelContext.start(); } @Override protected void finalize() throws Throwable { camelContext.stop(); super.finalize(); } public static void main(String... args) throws InterruptedException, Throwable { LOGGER.info("Starting RCodeRunner."); RCodeRunner rCodeRunner = new RCodeRunner(); Thread.sleep(1000); LOGGER.info("Stopping RCodeRunner."); rCodeRunner.finalize(); } }
Revert "migrate example to rcoderunner"
Revert "migrate example to rcoderunner" This reverts commit b03405d0726820a1f184e0e80892826d02994763.
Java
apache-2.0
sruehl/camel-example-rcode,sruehl/camel-example-rcode
java
## Code Before: package org.apacheextras.camel.examples.rcode; import org.apache.camel.CamelContext; import org.apache.camel.impl.DefaultCamelContext; import java.io.Console; import java.io.File; import java.util.concurrent.TimeUnit; /** * @author Sebastian Rühl */ public class RCodeRunner { public static void main(String... args) throws Exception { CamelContext camelContext = new DefaultCamelContext(); File basePath = args.length > 0 ? new File(args[0]) : new File(System.getProperty("user.home") + "/.rcode-example"); camelContext.addRoutes(new RCodeRouteBuilder(basePath)); camelContext.start(); Console console = System.console(); if (console != null) { console.printf("Please press enter to shutdown route."); console.readLine(); } else { TimeUnit.SECONDS.sleep(5); } camelContext.stop(); } } ## Instruction: Revert "migrate example to rcoderunner" This reverts commit b03405d0726820a1f184e0e80892826d02994763. ## Code After: /* * Copyright 2013 Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apacheextras.camel.examples.rcode; import org.apache.camel.CamelContext; import org.apache.camel.impl.DefaultCamelContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; /** * * @author cemmersb */ public class RCodeRunner { private final static Logger LOGGER = LoggerFactory.getLogger(RCodeRunner.class); private CamelContext camelContext; private RCodeRouteBuilder routeBuilder = null; public RCodeRunner() { try { initializeContext(); } catch (Exception ex) { LOGGER.error("Unable to initialize context: {}", ex.getMessage()); } } private void initializeContext() throws Exception { routeBuilder = new RCodeRouteBuilder(new File(System.getProperty("user.dir") + "./rcode-example/data")); camelContext = new DefaultCamelContext(); camelContext.addRoutes(routeBuilder); camelContext.start(); } @Override protected void finalize() throws Throwable { camelContext.stop(); super.finalize(); } public static void main(String... args) throws InterruptedException, Throwable { LOGGER.info("Starting RCodeRunner."); RCodeRunner rCodeRunner = new RCodeRunner(); Thread.sleep(1000); LOGGER.info("Stopping RCodeRunner."); rCodeRunner.finalize(); } }
# ... existing code ... /* * Copyright 2013 Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apacheextras.camel.examples.rcode; import org.apache.camel.CamelContext; import org.apache.camel.impl.DefaultCamelContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; /** * * @author cemmersb */ public class RCodeRunner { private final static Logger LOGGER = LoggerFactory.getLogger(RCodeRunner.class); private CamelContext camelContext; private RCodeRouteBuilder routeBuilder = null; public RCodeRunner() { try { initializeContext(); } catch (Exception ex) { LOGGER.error("Unable to initialize context: {}", ex.getMessage()); } } private void initializeContext() throws Exception { routeBuilder = new RCodeRouteBuilder(new File(System.getProperty("user.dir") + "./rcode-example/data")); camelContext = new DefaultCamelContext(); camelContext.addRoutes(routeBuilder); camelContext.start(); } @Override protected void finalize() throws Throwable { camelContext.stop(); super.finalize(); } public static void main(String... args) throws InterruptedException, Throwable { LOGGER.info("Starting RCodeRunner."); RCodeRunner rCodeRunner = new RCodeRunner(); Thread.sleep(1000); LOGGER.info("Stopping RCodeRunner."); rCodeRunner.finalize(); } } # ... rest of the code ...
9ceace60593f133b4f6dfdbd9b6f583362415294
src/configuration.py
src/configuration.py
import ConfigParser import os def class ConfigDlstats(object): """Cross platform configuration file handler. This class manages dlstats configuration files, providing easy access to the options.""" def __init__(self) """Open the configuration files handler, choosing the right path depending on the platform.""" appname = 'dlstats' if os.name == 'posix': if os.path.isfile(os.environ["HOME"]+'/.'+appname): self.filename = os.environ["HOME"]+'/.'+appname elif os.path.isfile('/etc/'+appname): self.filename = '/etc/'+appname else: raise FileNotFoundError('No configuration file found.' elif os.name == 'mac': self.filename = ("%s/Library/Application Support/%s" % (os.environ["HOME"], appname)) elif os.name == 'nt': self.filename = ("%s\Application Data\%s" % (os.environ["HOMEPATH"], appname)) else: raise UnsupportedOSError(os.name) self.config = ConfigParser.ConfigParser() self.config.read(self.filename)
import ConfigParser import os class ConfigDlstats(object): """Cross platform configuration file handler. This class manages dlstats configuration files, providing easy access to the options.""" def __init__(self): """Open the configuration files handler, choosing the right path depending on the platform.""" appname = 'dlstats' if os.name == 'posix': if os.path.isfile(os.environ["HOME"]+'/.'+appname): self.filename = os.environ["HOME"]+'/.'+appname elif os.path.isfile('/etc/'+appname): self.filename = '/etc/'+appname else: raise FileNotFoundError('No configuration file found.') elif os.name == 'mac': self.filename = ("%s/Library/Application Support/%s" % (os.environ["HOME"], appname)) elif os.name == 'nt': self.filename = ("%s\Application Data\%s" % (os.environ["HOMEPATH"], appname)) else: raise UnsupportedOSError(os.name) self.config = ConfigParser.ConfigParser() self.config.read(self.filename)
Fix a few syntax errors
Fix a few syntax errors
Python
agpl-3.0
MichelJuillard/dlstats,Widukind/dlstats,mmalter/dlstats,mmalter/dlstats,Widukind/dlstats,MichelJuillard/dlstats,mmalter/dlstats,MichelJuillard/dlstats
python
## Code Before: import ConfigParser import os def class ConfigDlstats(object): """Cross platform configuration file handler. This class manages dlstats configuration files, providing easy access to the options.""" def __init__(self) """Open the configuration files handler, choosing the right path depending on the platform.""" appname = 'dlstats' if os.name == 'posix': if os.path.isfile(os.environ["HOME"]+'/.'+appname): self.filename = os.environ["HOME"]+'/.'+appname elif os.path.isfile('/etc/'+appname): self.filename = '/etc/'+appname else: raise FileNotFoundError('No configuration file found.' elif os.name == 'mac': self.filename = ("%s/Library/Application Support/%s" % (os.environ["HOME"], appname)) elif os.name == 'nt': self.filename = ("%s\Application Data\%s" % (os.environ["HOMEPATH"], appname)) else: raise UnsupportedOSError(os.name) self.config = ConfigParser.ConfigParser() self.config.read(self.filename) ## Instruction: Fix a few syntax errors ## Code After: import ConfigParser import os class ConfigDlstats(object): """Cross platform configuration file handler. This class manages dlstats configuration files, providing easy access to the options.""" def __init__(self): """Open the configuration files handler, choosing the right path depending on the platform.""" appname = 'dlstats' if os.name == 'posix': if os.path.isfile(os.environ["HOME"]+'/.'+appname): self.filename = os.environ["HOME"]+'/.'+appname elif os.path.isfile('/etc/'+appname): self.filename = '/etc/'+appname else: raise FileNotFoundError('No configuration file found.') elif os.name == 'mac': self.filename = ("%s/Library/Application Support/%s" % (os.environ["HOME"], appname)) elif os.name == 'nt': self.filename = ("%s\Application Data\%s" % (os.environ["HOMEPATH"], appname)) else: raise UnsupportedOSError(os.name) self.config = ConfigParser.ConfigParser() self.config.read(self.filename)
... import ConfigParser import os class ConfigDlstats(object): """Cross platform configuration file handler. This class manages dlstats configuration files, providing easy access to the options.""" def __init__(self): """Open the configuration files handler, choosing the right path depending on the platform.""" appname = 'dlstats' if os.name == 'posix': if os.path.isfile(os.environ["HOME"]+'/.'+appname): ... elif os.path.isfile('/etc/'+appname): self.filename = '/etc/'+appname else: raise FileNotFoundError('No configuration file found.') elif os.name == 'mac': self.filename = ("%s/Library/Application Support/%s" % (os.environ["HOME"], appname)) ...
721f6f7916d698f22c9d96ce52cce3773fa514cc
uwsgiplugin.py
uwsgiplugin.py
import os import os.path import inspect base_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0]) NAME = 'rust' GCC_LIST = ['rust', '%s/plugin.a' % base_path] CFLAGS = [] if os.uname()[0] == 'Darwin': CFLAGS.append('-mmacosx-version-min=10.7') if os.system("rustc -o %s/plugin.a --crate-type staticlib %s/plugin.rs" % (base_path, base_path)) != 0: os._exit(1)
import os import os.path import inspect base_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0]) NAME = 'rust' GCC_LIST = ['rust', '%s/target/release/libuwsgi_rust.a' % base_path] CFLAGS = [] if os.uname()[0] == 'Darwin': CFLAGS.append('-mmacosx-version-min=10.7') if os.system("cargo build --release") != 0: os._exit(1) # To also build the example app: #os.system("cargo build --release --manifest-path examples/Cargo.toml")
Update script to build rust code via cargo
Update script to build rust code via cargo Signed-off-by: Luca Bruno <[email protected]>
Python
mit
unbit/uwsgi-rust,unbit/uwsgi-rust,unbit/uwsgi-rust
python
## Code Before: import os import os.path import inspect base_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0]) NAME = 'rust' GCC_LIST = ['rust', '%s/plugin.a' % base_path] CFLAGS = [] if os.uname()[0] == 'Darwin': CFLAGS.append('-mmacosx-version-min=10.7') if os.system("rustc -o %s/plugin.a --crate-type staticlib %s/plugin.rs" % (base_path, base_path)) != 0: os._exit(1) ## Instruction: Update script to build rust code via cargo Signed-off-by: Luca Bruno <[email protected]> ## Code After: import os import os.path import inspect base_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0]) NAME = 'rust' GCC_LIST = ['rust', '%s/target/release/libuwsgi_rust.a' % base_path] CFLAGS = [] if os.uname()[0] == 'Darwin': CFLAGS.append('-mmacosx-version-min=10.7') if os.system("cargo build --release") != 0: os._exit(1) # To also build the example app: #os.system("cargo build --release --manifest-path examples/Cargo.toml")
// ... existing code ... base_path = os.path.dirname(inspect.getframeinfo(inspect.currentframe())[0]) NAME = 'rust' GCC_LIST = ['rust', '%s/target/release/libuwsgi_rust.a' % base_path] CFLAGS = [] // ... modified code ... if os.uname()[0] == 'Darwin': CFLAGS.append('-mmacosx-version-min=10.7') if os.system("cargo build --release") != 0: os._exit(1) # To also build the example app: #os.system("cargo build --release --manifest-path examples/Cargo.toml") // ... rest of the code ...
59b6d5b7e4f337320ea12d381e9cad0aa9c9fa75
tests/slice.c
tests/slice.c
int main( void ) { int const xs[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }; printf( "Testing subset slice...\n" ); int const ws[] = { SLICE( xs, 3, 4 ) }; ASSERT( NELEM( ws ) == 4, ws[ 0 ] == xs[ 3 ], ws[ 1 ] == xs[ 4 ], ws[ 2 ] == xs[ 5 ], ws[ 3 ] == xs[ 6 ] ); printf( "Testing total slice...\n" ); int const ys[] = { SLICE( xs, 0, 6 ) }; ASSERT( NELEM( ys ) == 6, ys[ 0 ] == xs[ 0 ], ys[ 1 ] == xs[ 1 ], ys[ 2 ] == xs[ 2 ], ys[ 3 ] == xs[ 3 ], ys[ 4 ] == xs[ 4 ], ys[ 5 ] == xs[ 5 ] ); printf( "Testing empty slice...\n" ); int const zs[] = { 0, SLICE( xs, 2, 0 ) }; ASSERT( NELEM( zs ) == 1 ); printf( "SLICE() tests passed.\n" ); }
int main( void ) { int const xs[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }; printf( "Testing subset slice...\n" ); int const ws[] = { SLICE( xs, 3, 4 ) }; ASSERT( NELEM( ws ) == 4, ws[ 0 ] == xs[ 3 ], ws[ 1 ] == xs[ 4 ], ws[ 2 ] == xs[ 5 ], ws[ 3 ] == xs[ 6 ] ); ( void ) ws; printf( "Testing total slice...\n" ); int const ys[] = { SLICE( xs, 0, 6 ) }; ASSERT( NELEM( ys ) == 6, ys[ 0 ] == xs[ 0 ], ys[ 1 ] == xs[ 1 ], ys[ 2 ] == xs[ 2 ], ys[ 3 ] == xs[ 3 ], ys[ 4 ] == xs[ 4 ], ys[ 5 ] == xs[ 5 ] ); ( void ) ys; printf( "Testing empty slice...\n" ); int const zs[] = { 0, SLICE( xs, 2, 0 ) }; ASSERT( NELEM( zs ) == 1 ); ( void ) zs; printf( "SLICE() tests passed.\n" ); }
Fix 'unused variable' warning on fast build
Fix 'unused variable' warning on fast build
C
agpl-3.0
mcinglis/libmacro,mcinglis/libmacro,mcinglis/libmacro
c
## Code Before: int main( void ) { int const xs[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }; printf( "Testing subset slice...\n" ); int const ws[] = { SLICE( xs, 3, 4 ) }; ASSERT( NELEM( ws ) == 4, ws[ 0 ] == xs[ 3 ], ws[ 1 ] == xs[ 4 ], ws[ 2 ] == xs[ 5 ], ws[ 3 ] == xs[ 6 ] ); printf( "Testing total slice...\n" ); int const ys[] = { SLICE( xs, 0, 6 ) }; ASSERT( NELEM( ys ) == 6, ys[ 0 ] == xs[ 0 ], ys[ 1 ] == xs[ 1 ], ys[ 2 ] == xs[ 2 ], ys[ 3 ] == xs[ 3 ], ys[ 4 ] == xs[ 4 ], ys[ 5 ] == xs[ 5 ] ); printf( "Testing empty slice...\n" ); int const zs[] = { 0, SLICE( xs, 2, 0 ) }; ASSERT( NELEM( zs ) == 1 ); printf( "SLICE() tests passed.\n" ); } ## Instruction: Fix 'unused variable' warning on fast build ## Code After: int main( void ) { int const xs[] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }; printf( "Testing subset slice...\n" ); int const ws[] = { SLICE( xs, 3, 4 ) }; ASSERT( NELEM( ws ) == 4, ws[ 0 ] == xs[ 3 ], ws[ 1 ] == xs[ 4 ], ws[ 2 ] == xs[ 5 ], ws[ 3 ] == xs[ 6 ] ); ( void ) ws; printf( "Testing total slice...\n" ); int const ys[] = { SLICE( xs, 0, 6 ) }; ASSERT( NELEM( ys ) == 6, ys[ 0 ] == xs[ 0 ], ys[ 1 ] == xs[ 1 ], ys[ 2 ] == xs[ 2 ], ys[ 3 ] == xs[ 3 ], ys[ 4 ] == xs[ 4 ], ys[ 5 ] == xs[ 5 ] ); ( void ) ys; printf( "Testing empty slice...\n" ); int const zs[] = { 0, SLICE( xs, 2, 0 ) }; ASSERT( NELEM( zs ) == 1 ); ( void ) zs; printf( "SLICE() tests passed.\n" ); }
... ws[ 1 ] == xs[ 4 ], ws[ 2 ] == xs[ 5 ], ws[ 3 ] == xs[ 6 ] ); ( void ) ws; printf( "Testing total slice...\n" ); int const ys[] = { SLICE( xs, 0, 6 ) }; ... ys[ 3 ] == xs[ 3 ], ys[ 4 ] == xs[ 4 ], ys[ 5 ] == xs[ 5 ] ); ( void ) ys; printf( "Testing empty slice...\n" ); int const zs[] = { 0, SLICE( xs, 2, 0 ) }; ASSERT( NELEM( zs ) == 1 ); ( void ) zs; printf( "SLICE() tests passed.\n" ); } ...
9a2cc99b068b2aaa572f52b4516852b239577c34
dummyserver/server.py
dummyserver/server.py
import threading, socket """ Dummy server using for unit testing """ class Server(threading.Thread): def __init__(self, handler, host='localhost', port=8021): threading.Thread.__init__(self) self.handler = handler self.host = host self.port = port self.ready_event = threading.Event() self.stop_event = threading.Event() def run(self): sock = socket.socket() sock.bind((self.host, self.port)) sock.listen(0) self.ready_event.set() self.handler(sock) self.stop_event.set() sock.close() def __enter__(self): self.start() self.ready_event.wait() return self.host, self.port def __exit__(self, exc_type, exc_value, traceback): if exc_type is None: self.stop_event.wait() return False # allow exceptions to propagate
import threading, socket class Server(threading.Thread): """ Dummy server using for unit testing """ def __init__(self, handler, host='localhost', port=8021): threading.Thread.__init__(self) self.handler = handler self.host = host self.port = port self.ready_event = threading.Event() self.stop_event = threading.Event() def run(self): sock = socket.socket() sock.bind((self.host, self.port)) sock.listen(0) self.ready_event.set() self.handler(sock) self.stop_event.set() sock.close() def __enter__(self): self.start() self.ready_event.wait() return self.host, self.port def __exit__(self, exc_type, exc_value, traceback): if exc_type is None: self.stop_event.wait() return False # allow exceptions to propagate
Put docstring inside Server class
Put docstring inside Server class
Python
apache-2.0
psf/requests
python
## Code Before: import threading, socket """ Dummy server using for unit testing """ class Server(threading.Thread): def __init__(self, handler, host='localhost', port=8021): threading.Thread.__init__(self) self.handler = handler self.host = host self.port = port self.ready_event = threading.Event() self.stop_event = threading.Event() def run(self): sock = socket.socket() sock.bind((self.host, self.port)) sock.listen(0) self.ready_event.set() self.handler(sock) self.stop_event.set() sock.close() def __enter__(self): self.start() self.ready_event.wait() return self.host, self.port def __exit__(self, exc_type, exc_value, traceback): if exc_type is None: self.stop_event.wait() return False # allow exceptions to propagate ## Instruction: Put docstring inside Server class ## Code After: import threading, socket class Server(threading.Thread): """ Dummy server using for unit testing """ def __init__(self, handler, host='localhost', port=8021): threading.Thread.__init__(self) self.handler = handler self.host = host self.port = port self.ready_event = threading.Event() self.stop_event = threading.Event() def run(self): sock = socket.socket() sock.bind((self.host, self.port)) sock.listen(0) self.ready_event.set() self.handler(sock) self.stop_event.set() sock.close() def __enter__(self): self.start() self.ready_event.wait() return self.host, self.port def __exit__(self, exc_type, exc_value, traceback): if exc_type is None: self.stop_event.wait() return False # allow exceptions to propagate
# ... existing code ... class Server(threading.Thread): """ Dummy server using for unit testing """ def __init__(self, handler, host='localhost', port=8021): threading.Thread.__init__(self) self.handler = handler # ... rest of the code ...
44d103359cff312865f409ff34f528f63e441ef4
graphapi/views.py
graphapi/views.py
from simplekeys.verifier import verify_request from graphene_django.views import GraphQLView from django.conf import settings class KeyedGraphQLView(GraphQLView): graphiql_template = "graphene/graphiql-keyed.html" def get_response(self, request, data, show_graphiql=False): # check key only if we're not handling a graphiql request # if not show_graphiql: # error = verify_request(request, 'graphapi') # if error: # print('graphapi/views: get_response bailed ') # return error, error.status_code return super().get_response(request, data, show_graphiql) def render_graphiql(self, request, **data): data['demo_key'] = settings.GRAPHQL_DEMO_KEY return super().render_graphiql(request, **data)
from simplekeys.verifier import verify_request from graphene_django.views import GraphQLView from django.conf import settings class KeyedGraphQLView(GraphQLView): graphiql_template = "graphene/graphiql-keyed.html" def get_response(self, request, data, show_graphiql=False): # check key only if we're not handling a graphiql request if not show_graphiql: error = verify_request(request, 'graphapi') if error: return error, error.status_code return super().get_response(request, data, show_graphiql) def render_graphiql(self, request, **data): data['demo_key'] = settings.GRAPHQL_DEMO_KEY return super().render_graphiql(request, **data)
Revert "Reimplement using explicit variable lookup"
Revert "Reimplement using explicit variable lookup" This reverts commit 94683e6c
Python
mit
openstates/openstates.org,openstates/openstates.org,openstates/openstates.org,openstates/openstates.org
python
## Code Before: from simplekeys.verifier import verify_request from graphene_django.views import GraphQLView from django.conf import settings class KeyedGraphQLView(GraphQLView): graphiql_template = "graphene/graphiql-keyed.html" def get_response(self, request, data, show_graphiql=False): # check key only if we're not handling a graphiql request # if not show_graphiql: # error = verify_request(request, 'graphapi') # if error: # print('graphapi/views: get_response bailed ') # return error, error.status_code return super().get_response(request, data, show_graphiql) def render_graphiql(self, request, **data): data['demo_key'] = settings.GRAPHQL_DEMO_KEY return super().render_graphiql(request, **data) ## Instruction: Revert "Reimplement using explicit variable lookup" This reverts commit 94683e6c ## Code After: from simplekeys.verifier import verify_request from graphene_django.views import GraphQLView from django.conf import settings class KeyedGraphQLView(GraphQLView): graphiql_template = "graphene/graphiql-keyed.html" def get_response(self, request, data, show_graphiql=False): # check key only if we're not handling a graphiql request if not show_graphiql: error = verify_request(request, 'graphapi') if error: return error, error.status_code return super().get_response(request, data, show_graphiql) def render_graphiql(self, request, **data): data['demo_key'] = settings.GRAPHQL_DEMO_KEY return super().render_graphiql(request, **data)
// ... existing code ... def get_response(self, request, data, show_graphiql=False): # check key only if we're not handling a graphiql request if not show_graphiql: error = verify_request(request, 'graphapi') if error: return error, error.status_code return super().get_response(request, data, show_graphiql) // ... rest of the code ...
8e622ec1c83b064ed3d47d41dc49b179dc9a9f54
app/src/main/java/coderefactory/net/popmovies/MovieAdapter.java
app/src/main/java/coderefactory/net/popmovies/MovieAdapter.java
package coderefactory.net.popmovies; import android.app.Activity; import android.support.annotation.NonNull; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.TextView; import java.util.List; public class MovieAdapter extends ArrayAdapter<Movie> { public MovieAdapter(final Activity context, final List<Movie> movies) { super(context, 0, movies); } @NonNull @Override public View getView(final int position, final View convertView, final ViewGroup parent) { final View rootView; if (convertView == null) { rootView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false); } else { rootView = convertView; } final Movie movie = getItem(position); final TextView titleView = (TextView) rootView.findViewById(R.id.movie_title); titleView.setText(movie.getTitle()); final TextView releaseView = (TextView) rootView.findViewById(R.id.movie_released); releaseView.setText(String.valueOf(movie.getReleased())); return rootView; } }
package coderefactory.net.popmovies; import android.app.Activity; import android.support.annotation.NonNull; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.TextView; import java.util.List; public class MovieAdapter extends ArrayAdapter<Movie> { private ViewHolder viewHolder; public MovieAdapter(final Activity context, final List<Movie> movies) { super(context, 0, movies); } @NonNull @Override public View getView(final int position, final View convertView, final ViewGroup parent) { final View itemView; if (convertView == null) { itemView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false); viewHolder = new ViewHolder(itemView); itemView.setTag(viewHolder); } else { itemView = convertView; viewHolder = (ViewHolder) convertView.getTag(); } populateView(position); return itemView; } private void populateView(final int position) { final Movie movie = getItem(position); viewHolder.titleView.setText(movie.getTitle()); viewHolder.releaseView.setText(String.valueOf(movie.getReleased())); } private static class ViewHolder { private final TextView titleView; private final TextView releaseView; private ViewHolder(final View itemView) { titleView = (TextView) itemView.findViewById(R.id.movie_title); releaseView = (TextView) itemView.findViewById(R.id.movie_released); } } }
Introduce ViewHolder pattern into ArrayAdapter
Introduce ViewHolder pattern into ArrayAdapter
Java
mit
jarst/PopMovies
java
## Code Before: package coderefactory.net.popmovies; import android.app.Activity; import android.support.annotation.NonNull; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.TextView; import java.util.List; public class MovieAdapter extends ArrayAdapter<Movie> { public MovieAdapter(final Activity context, final List<Movie> movies) { super(context, 0, movies); } @NonNull @Override public View getView(final int position, final View convertView, final ViewGroup parent) { final View rootView; if (convertView == null) { rootView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false); } else { rootView = convertView; } final Movie movie = getItem(position); final TextView titleView = (TextView) rootView.findViewById(R.id.movie_title); titleView.setText(movie.getTitle()); final TextView releaseView = (TextView) rootView.findViewById(R.id.movie_released); releaseView.setText(String.valueOf(movie.getReleased())); return rootView; } } ## Instruction: Introduce ViewHolder pattern into ArrayAdapter ## Code After: package coderefactory.net.popmovies; import android.app.Activity; import android.support.annotation.NonNull; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.TextView; import java.util.List; public class MovieAdapter extends ArrayAdapter<Movie> { private ViewHolder viewHolder; public MovieAdapter(final Activity context, final List<Movie> movies) { super(context, 0, movies); } @NonNull @Override public View getView(final int position, final View convertView, final ViewGroup parent) { final View itemView; if (convertView == null) { itemView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false); viewHolder = new ViewHolder(itemView); itemView.setTag(viewHolder); } else { itemView = convertView; viewHolder = (ViewHolder) convertView.getTag(); } populateView(position); return itemView; } private void populateView(final int position) { final Movie movie = getItem(position); viewHolder.titleView.setText(movie.getTitle()); viewHolder.releaseView.setText(String.valueOf(movie.getReleased())); } private static class ViewHolder { private final TextView titleView; private final TextView releaseView; private ViewHolder(final View itemView) { titleView = (TextView) itemView.findViewById(R.id.movie_title); releaseView = (TextView) itemView.findViewById(R.id.movie_released); } } }
... public class MovieAdapter extends ArrayAdapter<Movie> { private ViewHolder viewHolder; public MovieAdapter(final Activity context, final List<Movie> movies) { super(context, 0, movies); } ... @NonNull @Override public View getView(final int position, final View convertView, final ViewGroup parent) { final View itemView; if (convertView == null) { itemView = LayoutInflater.from(getContext()).inflate(R.layout.list_item_movie, parent, false); viewHolder = new ViewHolder(itemView); itemView.setTag(viewHolder); } else { itemView = convertView; viewHolder = (ViewHolder) convertView.getTag(); } populateView(position); return itemView; } private void populateView(final int position) { final Movie movie = getItem(position); viewHolder.titleView.setText(movie.getTitle()); viewHolder.releaseView.setText(String.valueOf(movie.getReleased())); } private static class ViewHolder { private final TextView titleView; private final TextView releaseView; private ViewHolder(final View itemView) { titleView = (TextView) itemView.findViewById(R.id.movie_title); releaseView = (TextView) itemView.findViewById(R.id.movie_released); } } } ...
0261c895cb41f5caba42ae432b997fd3c941e96f
tests.py
tests.py
import pytest import cleaner class TestTagRemoval(): def test_span_removal(self): text = ('<span style="font-family: &quot;helvetica neue&quot; ,' '&quot;arial&quot; , &quot;helvetica&quot; , sans-serif;">This is some' ' dummy text lalalala</span> This is some more dummy text ' '<span>test</span>') expected = ('This is some dummy text lalalala This is some more dummy ' 'text test') cleaned = cleaner.remove_superflous_markup(text) assert cleaned == expected
import pytest import cleaner class TestTagTools(): def test_get_pure_tag(self): tag1 = '<div>' tag2 = '</div>' tag3 = '<pre class="prettyprint">' assert cleaner.get_pure_tag(tag1) == '<div>' assert cleaner.get_pure_tag(tag2) == '</div>' assert cleaner.get_pure_tag(tag3) == '<pre>'
Add test for getting pure html tag
Add test for getting pure html tag
Python
mit
jamalmoir/blogger_html_cleaner
python
## Code Before: import pytest import cleaner class TestTagRemoval(): def test_span_removal(self): text = ('<span style="font-family: &quot;helvetica neue&quot; ,' '&quot;arial&quot; , &quot;helvetica&quot; , sans-serif;">This is some' ' dummy text lalalala</span> This is some more dummy text ' '<span>test</span>') expected = ('This is some dummy text lalalala This is some more dummy ' 'text test') cleaned = cleaner.remove_superflous_markup(text) assert cleaned == expected ## Instruction: Add test for getting pure html tag ## Code After: import pytest import cleaner class TestTagTools(): def test_get_pure_tag(self): tag1 = '<div>' tag2 = '</div>' tag3 = '<pre class="prettyprint">' assert cleaner.get_pure_tag(tag1) == '<div>' assert cleaner.get_pure_tag(tag2) == '</div>' assert cleaner.get_pure_tag(tag3) == '<pre>'
... import pytest import cleaner class TestTagTools(): def test_get_pure_tag(self): tag1 = '<div>' tag2 = '</div>' tag3 = '<pre class="prettyprint">' assert cleaner.get_pure_tag(tag1) == '<div>' assert cleaner.get_pure_tag(tag2) == '</div>' assert cleaner.get_pure_tag(tag3) == '<pre>' ...
fba4fdf426b0a29ca06deb67587c2bd804adb017
tbgxmlutils/xmlutils.py
tbgxmlutils/xmlutils.py
from xml.dom import minidom import xml.etree.ElementTree as ET import xmltodict def add(k, parent=None, txt=None, attrs=None): if parent is None: handle = ET.Element(k) else: handle = ET.SubElement(parent, k) if txt: handle.text = unicode(txt) try: for k, v in attrs.iteritems(): handle.attrib[k] = v except AttributeError: pass return handle def etree2xml(e, encoding='UTF-8'): return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e) def pretty(xml=None, fn=None): if fn is not None: xml = minidom.parse(fn) elif not isinstance(xml, minidom.Document): xml = minidom.parseString(xml) return xml.toprettyxml(indent=' ') def xml_fn_to_json(fn): fh = open(fn, 'r') json = xmltodict.parse(fh.read()) return json
from xml.dom import minidom import lxml.etree as ET import xmltodict def add(k, parent=None, txt=None, attrs=None): if parent is None: handle = ET.Element(k) else: handle = ET.SubElement(parent, k) if txt: handle.text = unicode(txt) try: for k, v in attrs.iteritems(): handle.attrib[k] = v except AttributeError: pass return handle def etree2xml(e, encoding='UTF-8'): return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e) def pretty(xml=None, fn=None): if fn is not None: xml = minidom.parse(fn) elif not isinstance(xml, minidom.Document): xml = minidom.parseString(xml) return xml.toprettyxml(indent=' ') def xml_fn_to_json(fn): fh = open(fn, 'r') json = xmltodict.parse(fh.read()) return json
Use lxml instead of elementtree.
Use lxml instead of elementtree.
Python
mit
Schwarzschild/TBGXMLUtils
python
## Code Before: from xml.dom import minidom import xml.etree.ElementTree as ET import xmltodict def add(k, parent=None, txt=None, attrs=None): if parent is None: handle = ET.Element(k) else: handle = ET.SubElement(parent, k) if txt: handle.text = unicode(txt) try: for k, v in attrs.iteritems(): handle.attrib[k] = v except AttributeError: pass return handle def etree2xml(e, encoding='UTF-8'): return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e) def pretty(xml=None, fn=None): if fn is not None: xml = minidom.parse(fn) elif not isinstance(xml, minidom.Document): xml = minidom.parseString(xml) return xml.toprettyxml(indent=' ') def xml_fn_to_json(fn): fh = open(fn, 'r') json = xmltodict.parse(fh.read()) return json ## Instruction: Use lxml instead of elementtree. ## Code After: from xml.dom import minidom import lxml.etree as ET import xmltodict def add(k, parent=None, txt=None, attrs=None): if parent is None: handle = ET.Element(k) else: handle = ET.SubElement(parent, k) if txt: handle.text = unicode(txt) try: for k, v in attrs.iteritems(): handle.attrib[k] = v except AttributeError: pass return handle def etree2xml(e, encoding='UTF-8'): return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e) def pretty(xml=None, fn=None): if fn is not None: xml = minidom.parse(fn) elif not isinstance(xml, minidom.Document): xml = minidom.parseString(xml) return xml.toprettyxml(indent=' ') def xml_fn_to_json(fn): fh = open(fn, 'r') json = xmltodict.parse(fh.read()) return json
// ... existing code ... from xml.dom import minidom import lxml.etree as ET import xmltodict def add(k, parent=None, txt=None, attrs=None): // ... rest of the code ...
6b15d7151f2703ce049ac9ab14b13c8b13122bf8
http.h
http.h
typedef struct { int method; /* request method */ char* path; /* requested path */ char* host; /* hostname field */ char* type; /* content-type */ size_t length; /* content-length */ int flags; } http_request; /* Write an error page (and header). Returns number of bytes written. */ size_t gen_error_page( int fd, int error ); /* Write 200 Ok header with content length and content type. Returns the number of bytes written, 0 on failure. */ size_t http_ok( int fd, const char* type, unsigned long size ); /* parse a HTTP request, returns non-zero on success, zero on failure */ int http_request_parse( char* buffer, http_request* request ); #endif /* HTTP_H */
typedef struct { int method; /* request method */ char* path; /* requested path */ char* host; /* hostname field */ char* type; /* content-type */ size_t length; /* content-length */ } http_request; /* Write an error page (and header). Returns number of bytes written. */ size_t gen_error_page( int fd, int error ); /* Write 200 Ok header with content length and content type. Returns the number of bytes written, 0 on failure. */ size_t http_ok( int fd, const char* type, unsigned long size ); /* parse a HTTP request, returns non-zero on success, zero on failure */ int http_request_parse( char* buffer, http_request* request ); #endif /* HTTP_H */
Remove unused flags field from request
Remove unused flags field from request Signed-off-by: David Oberhollenzer <[email protected]>
C
agpl-3.0
AgentD/websrv,AgentD/websrv,AgentD/websrv
c
## Code Before: typedef struct { int method; /* request method */ char* path; /* requested path */ char* host; /* hostname field */ char* type; /* content-type */ size_t length; /* content-length */ int flags; } http_request; /* Write an error page (and header). Returns number of bytes written. */ size_t gen_error_page( int fd, int error ); /* Write 200 Ok header with content length and content type. Returns the number of bytes written, 0 on failure. */ size_t http_ok( int fd, const char* type, unsigned long size ); /* parse a HTTP request, returns non-zero on success, zero on failure */ int http_request_parse( char* buffer, http_request* request ); #endif /* HTTP_H */ ## Instruction: Remove unused flags field from request Signed-off-by: David Oberhollenzer <[email protected]> ## Code After: typedef struct { int method; /* request method */ char* path; /* requested path */ char* host; /* hostname field */ char* type; /* content-type */ size_t length; /* content-length */ } http_request; /* Write an error page (and header). Returns number of bytes written. */ size_t gen_error_page( int fd, int error ); /* Write 200 Ok header with content length and content type. Returns the number of bytes written, 0 on failure. */ size_t http_ok( int fd, const char* type, unsigned long size ); /* parse a HTTP request, returns non-zero on success, zero on failure */ int http_request_parse( char* buffer, http_request* request ); #endif /* HTTP_H */
# ... existing code ... char* host; /* hostname field */ char* type; /* content-type */ size_t length; /* content-length */ } http_request; # ... rest of the code ...
ad0151eee0027237c8cdd433ef2f24bfa47af5df
pyreaclib/nucdata/tests/test_binding.py
pyreaclib/nucdata/tests/test_binding.py
import os from pyreaclib.nucdata import BindingTable class TestAME(object): @classmethod def setup_class(cls): """ this is run once for each class before any tests """ pass @classmethod def teardown_class(cls): """ this is run once for each class after all tests """ pass def setup_method(self): """ this is run before each test """ self.bintable = BindingTable() def teardown_method(self): """ this is run after each test """ self.bintable = None def test_get(self): nuc = self.bintable.get_nuclide(n=1, z=1) assert nuc.z == 1 assert nuc.n == 1 assert nuc.nucbind == 1.112283
import os from pyreaclib.nucdata import BindingTable class TestAME(object): @classmethod def setup_class(cls): """ this is run once for each class before any tests """ pass @classmethod def teardown_class(cls): """ this is run once for each class after all tests """ pass def setup_method(self): """ this is run before each test """ self.bintable = BindingTable() def teardown_method(self): """ this is run after each test """ self.bintable = None def test_get(self): nuc = self.bintable.get_nuclide(n=1, z=1) assert nuc.z == 1 assert nuc.n == 1 assert nuc.nucbind == 1.112283 nuc = self.bintable.get_nuclide(n=5, z=6) assert nuc.z == 6 assert nuc.n == 5 assert nuc.nucbind == 6.676456 nuc = self.bintable.get_nuclide(n=17, z=23) assert nuc.z == 23 assert nuc.n == 17 assert nuc.nucbind == 7.317 nuc = self.bintable.get_nuclide(n=90, z=78) assert nuc.z == 78 assert nuc.n == 90 assert nuc.nucbind == 7.773605
Add some more binding energy table tests.
Add some more binding energy table tests.
Python
bsd-3-clause
pyreaclib/pyreaclib
python
## Code Before: import os from pyreaclib.nucdata import BindingTable class TestAME(object): @classmethod def setup_class(cls): """ this is run once for each class before any tests """ pass @classmethod def teardown_class(cls): """ this is run once for each class after all tests """ pass def setup_method(self): """ this is run before each test """ self.bintable = BindingTable() def teardown_method(self): """ this is run after each test """ self.bintable = None def test_get(self): nuc = self.bintable.get_nuclide(n=1, z=1) assert nuc.z == 1 assert nuc.n == 1 assert nuc.nucbind == 1.112283 ## Instruction: Add some more binding energy table tests. ## Code After: import os from pyreaclib.nucdata import BindingTable class TestAME(object): @classmethod def setup_class(cls): """ this is run once for each class before any tests """ pass @classmethod def teardown_class(cls): """ this is run once for each class after all tests """ pass def setup_method(self): """ this is run before each test """ self.bintable = BindingTable() def teardown_method(self): """ this is run after each test """ self.bintable = None def test_get(self): nuc = self.bintable.get_nuclide(n=1, z=1) assert nuc.z == 1 assert nuc.n == 1 assert nuc.nucbind == 1.112283 nuc = self.bintable.get_nuclide(n=5, z=6) assert nuc.z == 6 assert nuc.n == 5 assert nuc.nucbind == 6.676456 nuc = self.bintable.get_nuclide(n=17, z=23) assert nuc.z == 23 assert nuc.n == 17 assert nuc.nucbind == 7.317 nuc = self.bintable.get_nuclide(n=90, z=78) assert nuc.z == 78 assert nuc.n == 90 assert nuc.nucbind == 7.773605
... assert nuc.n == 1 assert nuc.nucbind == 1.112283 nuc = self.bintable.get_nuclide(n=5, z=6) assert nuc.z == 6 assert nuc.n == 5 assert nuc.nucbind == 6.676456 nuc = self.bintable.get_nuclide(n=17, z=23) assert nuc.z == 23 assert nuc.n == 17 assert nuc.nucbind == 7.317 nuc = self.bintable.get_nuclide(n=90, z=78) assert nuc.z == 78 assert nuc.n == 90 assert nuc.nucbind == 7.773605 ...
d6759d0abec637753d93cd407fad5e7abc6ec86d
astropy/tests/plugins/display.py
astropy/tests/plugins/display.py
import warnings from astropy.utils.exceptions import AstropyDeprecationWarning try: from pytest_astropy_header.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS except ImportError: PYTEST_HEADER_MODULES = {} TESTED_VERSIONS = {} warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. ' 'See the pytest-astropy documentation for information on ' 'migrating to using pytest-astropy to customize the pytest ' 'header.', AstropyDeprecationWarning)
import warnings from astropy.utils.exceptions import AstropyDeprecationWarning try: from pytest_astropy_header.display import (PYTEST_HEADER_MODULES, TESTED_VERSIONS) except ImportError: PYTEST_HEADER_MODULES = {} TESTED_VERSIONS = {} warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. ' 'See the pytest-astropy-header documentation for information on ' 'migrating to using pytest-astropy-header to customize the ' 'pytest header.', AstropyDeprecationWarning)
Fix typo in deprecation warning
TST: Fix typo in deprecation warning [ci skip]
Python
bsd-3-clause
stargaser/astropy,dhomeier/astropy,saimn/astropy,saimn/astropy,larrybradley/astropy,astropy/astropy,StuartLittlefair/astropy,lpsinger/astropy,dhomeier/astropy,lpsinger/astropy,StuartLittlefair/astropy,larrybradley/astropy,lpsinger/astropy,MSeifert04/astropy,astropy/astropy,astropy/astropy,MSeifert04/astropy,larrybradley/astropy,larrybradley/astropy,StuartLittlefair/astropy,StuartLittlefair/astropy,mhvk/astropy,StuartLittlefair/astropy,saimn/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,mhvk/astropy,astropy/astropy,astropy/astropy,mhvk/astropy,aleksandr-bakanov/astropy,MSeifert04/astropy,mhvk/astropy,lpsinger/astropy,dhomeier/astropy,stargaser/astropy,pllim/astropy,dhomeier/astropy,saimn/astropy,saimn/astropy,pllim/astropy,larrybradley/astropy,pllim/astropy,pllim/astropy,aleksandr-bakanov/astropy,lpsinger/astropy,mhvk/astropy,pllim/astropy,aleksandr-bakanov/astropy,stargaser/astropy,MSeifert04/astropy,stargaser/astropy
python
## Code Before: import warnings from astropy.utils.exceptions import AstropyDeprecationWarning try: from pytest_astropy_header.display import PYTEST_HEADER_MODULES, TESTED_VERSIONS except ImportError: PYTEST_HEADER_MODULES = {} TESTED_VERSIONS = {} warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. ' 'See the pytest-astropy documentation for information on ' 'migrating to using pytest-astropy to customize the pytest ' 'header.', AstropyDeprecationWarning) ## Instruction: TST: Fix typo in deprecation warning [ci skip] ## Code After: import warnings from astropy.utils.exceptions import AstropyDeprecationWarning try: from pytest_astropy_header.display import (PYTEST_HEADER_MODULES, TESTED_VERSIONS) except ImportError: PYTEST_HEADER_MODULES = {} TESTED_VERSIONS = {} warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. ' 'See the pytest-astropy-header documentation for information on ' 'migrating to using pytest-astropy-header to customize the ' 'pytest header.', AstropyDeprecationWarning)
... from astropy.utils.exceptions import AstropyDeprecationWarning try: from pytest_astropy_header.display import (PYTEST_HEADER_MODULES, TESTED_VERSIONS) except ImportError: PYTEST_HEADER_MODULES = {} TESTED_VERSIONS = {} warnings.warn('The astropy.tests.plugins.display plugin has been deprecated. ' 'See the pytest-astropy-header documentation for information on ' 'migrating to using pytest-astropy-header to customize the ' 'pytest header.', AstropyDeprecationWarning) ...
ab9a38793645a9c61cf1c320e5a4db9bf7b03ccf
grow/deployments/utils.py
grow/deployments/utils.py
from .indexes import messages import git class Error(Exception): pass class NoGitHeadError(Error, ValueError): pass def create_commit_message(repo): message = messages.CommitMessage() try: commit = repo.head.commit except ValueError: raise NoGitHeadError('On initial commit, no HEAD yet.') try: repo.git.diff('--quiet') has_unstaged_changes = False except git.exc.GitCommandError: has_unstaged_changes = True message.has_unstaged_changes = has_unstaged_changes message.sha = commit.hexsha message.message = commit.message message.branch = repo.head.ref.name message.author = messages.AuthorMessage( name=commit.author.name, email=commit.author.email) return message
from .indexes import messages import git class Error(Exception): pass class NoGitHeadError(Error, ValueError): pass def create_commit_message(repo): message = messages.CommitMessage() try: commit = repo.head.commit except ValueError: raise NoGitHeadError('On initial commit, no HEAD yet.') try: repo.git.diff('--quiet') has_unstaged_changes = False except git.exc.GitCommandError: has_unstaged_changes = True message.has_unstaged_changes = has_unstaged_changes message.sha = commit.hexsha message.message = commit.message try: message.branch = repo.head.ref.name except TypeError: # Allow operating in an environment with a detached HEAD. pass message.author = messages.AuthorMessage( name=commit.author.name, email=commit.author.email) return message
Allow operating in an environment with a detached HEAD.
Allow operating in an environment with a detached HEAD.
Python
mit
grow/pygrow,denmojo/pygrow,grow/grow,grow/grow,grow/pygrow,codedcolors/pygrow,grow/grow,grow/pygrow,denmojo/pygrow,denmojo/pygrow,denmojo/pygrow,codedcolors/pygrow,codedcolors/pygrow,grow/grow
python
## Code Before: from .indexes import messages import git class Error(Exception): pass class NoGitHeadError(Error, ValueError): pass def create_commit_message(repo): message = messages.CommitMessage() try: commit = repo.head.commit except ValueError: raise NoGitHeadError('On initial commit, no HEAD yet.') try: repo.git.diff('--quiet') has_unstaged_changes = False except git.exc.GitCommandError: has_unstaged_changes = True message.has_unstaged_changes = has_unstaged_changes message.sha = commit.hexsha message.message = commit.message message.branch = repo.head.ref.name message.author = messages.AuthorMessage( name=commit.author.name, email=commit.author.email) return message ## Instruction: Allow operating in an environment with a detached HEAD. ## Code After: from .indexes import messages import git class Error(Exception): pass class NoGitHeadError(Error, ValueError): pass def create_commit_message(repo): message = messages.CommitMessage() try: commit = repo.head.commit except ValueError: raise NoGitHeadError('On initial commit, no HEAD yet.') try: repo.git.diff('--quiet') has_unstaged_changes = False except git.exc.GitCommandError: has_unstaged_changes = True message.has_unstaged_changes = has_unstaged_changes message.sha = commit.hexsha message.message = commit.message try: message.branch = repo.head.ref.name except TypeError: # Allow operating in an environment with a detached HEAD. pass message.author = messages.AuthorMessage( name=commit.author.name, email=commit.author.email) return message
// ... existing code ... message.has_unstaged_changes = has_unstaged_changes message.sha = commit.hexsha message.message = commit.message try: message.branch = repo.head.ref.name except TypeError: # Allow operating in an environment with a detached HEAD. pass message.author = messages.AuthorMessage( name=commit.author.name, email=commit.author.email) return message // ... rest of the code ...
79c0071b7aad2992011684428611701bc58a9bff
tests/__init__.py
tests/__init__.py
try: from urllib.parse import urlencode except ImportError: from urllib import urlencode import tornado.testing import tornado.options import celery from flower.app import Flower from flower.urls import handlers from flower.events import Events from flower.urls import settings from flower import command # side effect - define options class AsyncHTTPTestCase(tornado.testing.AsyncHTTPTestCase): def get_app(self): capp = celery.Celery() events = Events(capp) app = Flower(capp=capp, events=events, options=tornado.options.options, handlers=handlers, **settings) app.delay = lambda method, *args, **kwargs: method(*args, **kwargs) return app def get(self, url, **kwargs): return self.fetch(url, **kwargs) def post(self, url, **kwargs): if 'body' in kwargs and isinstance(kwargs['body'], dict): kwargs['body'] = urlencode(kwargs['body']) return self.fetch(url, method='POST', **kwargs)
try: from urllib.parse import urlencode except ImportError: from urllib import urlencode import tornado.testing from tornado.options import options import celery import mock from flower.app import Flower from flower.urls import handlers from flower.events import Events from flower.urls import settings from flower import command # side effect - define options class AsyncHTTPTestCase(tornado.testing.AsyncHTTPTestCase): def get_app(self): capp = celery.Celery() events = Events(capp) app = Flower(capp=capp, events=events, options=options, handlers=handlers, **settings) app.delay = lambda method, *args, **kwargs: method(*args, **kwargs) return app def get(self, url, **kwargs): return self.fetch(url, **kwargs) def post(self, url, **kwargs): if 'body' in kwargs and isinstance(kwargs['body'], dict): kwargs['body'] = urlencode(kwargs['body']) return self.fetch(url, method='POST', **kwargs) def mock_option(self, name, value): return mock.patch.object(options.mockable(), name, value)
Add an util funcion for mocking options
Add an util funcion for mocking options
Python
bsd-3-clause
jzhou77/flower,asmodehn/flower,jzhou77/flower,asmodehn/flower,asmodehn/flower,jzhou77/flower
python
## Code Before: try: from urllib.parse import urlencode except ImportError: from urllib import urlencode import tornado.testing import tornado.options import celery from flower.app import Flower from flower.urls import handlers from flower.events import Events from flower.urls import settings from flower import command # side effect - define options class AsyncHTTPTestCase(tornado.testing.AsyncHTTPTestCase): def get_app(self): capp = celery.Celery() events = Events(capp) app = Flower(capp=capp, events=events, options=tornado.options.options, handlers=handlers, **settings) app.delay = lambda method, *args, **kwargs: method(*args, **kwargs) return app def get(self, url, **kwargs): return self.fetch(url, **kwargs) def post(self, url, **kwargs): if 'body' in kwargs and isinstance(kwargs['body'], dict): kwargs['body'] = urlencode(kwargs['body']) return self.fetch(url, method='POST', **kwargs) ## Instruction: Add an util funcion for mocking options ## Code After: try: from urllib.parse import urlencode except ImportError: from urllib import urlencode import tornado.testing from tornado.options import options import celery import mock from flower.app import Flower from flower.urls import handlers from flower.events import Events from flower.urls import settings from flower import command # side effect - define options class AsyncHTTPTestCase(tornado.testing.AsyncHTTPTestCase): def get_app(self): capp = celery.Celery() events = Events(capp) app = Flower(capp=capp, events=events, options=options, handlers=handlers, **settings) app.delay = lambda method, *args, **kwargs: method(*args, **kwargs) return app def get(self, url, **kwargs): return self.fetch(url, **kwargs) def post(self, url, **kwargs): if 'body' in kwargs and isinstance(kwargs['body'], dict): kwargs['body'] = urlencode(kwargs['body']) return self.fetch(url, method='POST', **kwargs) def mock_option(self, name, value): return mock.patch.object(options.mockable(), name, value)
... from urllib import urlencode import tornado.testing from tornado.options import options import celery import mock from flower.app import Flower from flower.urls import handlers ... capp = celery.Celery() events = Events(capp) app = Flower(capp=capp, events=events, options=options, handlers=handlers, **settings) app.delay = lambda method, *args, **kwargs: method(*args, **kwargs) return app ... if 'body' in kwargs and isinstance(kwargs['body'], dict): kwargs['body'] = urlencode(kwargs['body']) return self.fetch(url, method='POST', **kwargs) def mock_option(self, name, value): return mock.patch.object(options.mockable(), name, value) ...
98393be0011f4e4227e6f5e86db68533af8b78e0
webserver/profiles/models.py
webserver/profiles/models.py
from django.db import models from django.db.models.signals import pre_save, post_save from django.dispatch import receiver from django.contrib.auth.models import User from django.conf import settings import markdown import bleach class UserProfile(models.Model): user = models.OneToOneField(User) about_me = models.TextField() rendered_about_me = models.TextField(editable=False, null=True) @models.permalink def get_absolute_url(self): return ('view_profile', (), {'username': self.user.username}) @receiver(post_save, sender=User) def create_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance) @receiver(pre_save, sender=UserProfile) def user_profile_pre_save(sender, instance, **kwargs): # Render the about_me field as HTML instead of markdown rendered = markdown.markdown(instance.about_me, safe_mode='escape') clean_rendered = bleach.clean(rendered, tags=settings.ALLOWED_HTML_TAGS, attributes=settings.ALLOWED_HTML_ATTRS) instance.rendered_about_me = clean_rendered
from django.db import models from django.db.models.signals import pre_save, post_save from django.dispatch import receiver from django.contrib.auth.models import User from django.conf import settings from django.core.validators import MaxLengthValidator import markdown import bleach class UserProfile(models.Model): user = models.OneToOneField(User) about_me = models.TextField(validators=[MaxLengthValidator(500)]) rendered_about_me = models.TextField(editable=False, null=True) @models.permalink def get_absolute_url(self): return ('view_profile', (), {'username': self.user.username}) @receiver(post_save, sender=User) def create_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance) @receiver(pre_save, sender=UserProfile) def user_profile_pre_save(sender, instance, **kwargs): # Render the about_me field as HTML instead of markdown rendered = markdown.markdown(instance.about_me, safe_mode='escape') clean_rendered = bleach.clean(rendered, tags=settings.ALLOWED_HTML_TAGS, attributes=settings.ALLOWED_HTML_ATTRS) instance.rendered_about_me = clean_rendered
Add maximum length validator to about_me
Add maximum length validator to about_me
Python
bsd-3-clause
siggame/webserver,siggame/webserver,siggame/webserver
python
## Code Before: from django.db import models from django.db.models.signals import pre_save, post_save from django.dispatch import receiver from django.contrib.auth.models import User from django.conf import settings import markdown import bleach class UserProfile(models.Model): user = models.OneToOneField(User) about_me = models.TextField() rendered_about_me = models.TextField(editable=False, null=True) @models.permalink def get_absolute_url(self): return ('view_profile', (), {'username': self.user.username}) @receiver(post_save, sender=User) def create_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance) @receiver(pre_save, sender=UserProfile) def user_profile_pre_save(sender, instance, **kwargs): # Render the about_me field as HTML instead of markdown rendered = markdown.markdown(instance.about_me, safe_mode='escape') clean_rendered = bleach.clean(rendered, tags=settings.ALLOWED_HTML_TAGS, attributes=settings.ALLOWED_HTML_ATTRS) instance.rendered_about_me = clean_rendered ## Instruction: Add maximum length validator to about_me ## Code After: from django.db import models from django.db.models.signals import pre_save, post_save from django.dispatch import receiver from django.contrib.auth.models import User from django.conf import settings from django.core.validators import MaxLengthValidator import markdown import bleach class UserProfile(models.Model): user = models.OneToOneField(User) about_me = models.TextField(validators=[MaxLengthValidator(500)]) rendered_about_me = models.TextField(editable=False, null=True) @models.permalink def get_absolute_url(self): return ('view_profile', (), {'username': self.user.username}) @receiver(post_save, sender=User) def create_user_profile(sender, instance, created, **kwargs): if created: UserProfile.objects.create(user=instance) @receiver(pre_save, sender=UserProfile) def user_profile_pre_save(sender, instance, **kwargs): # Render the about_me field as HTML instead of markdown rendered = markdown.markdown(instance.about_me, safe_mode='escape') clean_rendered = bleach.clean(rendered, tags=settings.ALLOWED_HTML_TAGS, attributes=settings.ALLOWED_HTML_ATTRS) instance.rendered_about_me = clean_rendered
# ... existing code ... from django.dispatch import receiver from django.contrib.auth.models import User from django.conf import settings from django.core.validators import MaxLengthValidator import markdown import bleach # ... modified code ... class UserProfile(models.Model): user = models.OneToOneField(User) about_me = models.TextField(validators=[MaxLengthValidator(500)]) rendered_about_me = models.TextField(editable=False, null=True) # ... rest of the code ...
af51ef98d8575e7832d79c1068c092d388866dcb
donut/donut_SMTP_handler.py
donut/donut_SMTP_handler.py
from logging.handlers import SMTPHandler DEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups WHERE group_name = "Devteam" ''' class DonutSMTPHandler(SMTPHandler): def __init__(self, mailhost, fromaddr, toaddrs, subject, db_instance, credentials=None, secure=None, timeout=5.0): super().__init__(mailhost, fromaddr, toaddrs, subject, credentials, secure, timeout) self.db_instance = db_instance def emit(self, record): ''' Overrides SMTPHandler's emit such that we dynamically get current donut dev team members ''' self.toaddrs = self.getAdmins() super().emit(record) def getAdmins(self): ''' Returns current members in Devteam ''' with self.db_instance.cursor() as cursor: cursor.execute(DEV_TEAM_EMAILS_QUERY, []) res = cursor.fetchall() return [result['email'] for result in res]
from logging.handlers import SMTPHandler DEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups WHERE group_name = "Devteam" ''' DEFAULT_DEV_TEAM_EMAILS = ['[email protected]'] class DonutSMTPHandler(SMTPHandler): def __init__(self, mailhost, fromaddr, toaddrs, subject, db_instance, credentials=None, secure=None, timeout=5.0): super().__init__(mailhost, fromaddr, toaddrs, subject, credentials, secure, timeout) self.db_instance = db_instance def emit(self, record): ''' Overrides SMTPHandler's emit such that we dynamically get current donut dev team members ''' self.toaddrs = self.getAdmins() super().emit(record) def getAdmins(self): ''' Returns current members in Devteam ''' try: with self.db_instance.cursor() as cursor: cursor.execute(DEV_TEAM_EMAILS_QUERY) res = cursor.fetchall() return [result['email'] for result in res] except Exception: # If the database is inaccessible, fallback to a hard-coded email list return DEFAULT_DEV_TEAM_EMAILS
Allow error email to still be sent if DB is down
Allow error email to still be sent if DB is down We were seeing errors in the logs where the database was inaccessible, but the errors were not being emailed out because the handler makes a DB query.
Python
mit
ASCIT/donut,ASCIT/donut,ASCIT/donut
python
## Code Before: from logging.handlers import SMTPHandler DEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups WHERE group_name = "Devteam" ''' class DonutSMTPHandler(SMTPHandler): def __init__(self, mailhost, fromaddr, toaddrs, subject, db_instance, credentials=None, secure=None, timeout=5.0): super().__init__(mailhost, fromaddr, toaddrs, subject, credentials, secure, timeout) self.db_instance = db_instance def emit(self, record): ''' Overrides SMTPHandler's emit such that we dynamically get current donut dev team members ''' self.toaddrs = self.getAdmins() super().emit(record) def getAdmins(self): ''' Returns current members in Devteam ''' with self.db_instance.cursor() as cursor: cursor.execute(DEV_TEAM_EMAILS_QUERY, []) res = cursor.fetchall() return [result['email'] for result in res] ## Instruction: Allow error email to still be sent if DB is down We were seeing errors in the logs where the database was inaccessible, but the errors were not being emailed out because the handler makes a DB query. ## Code After: from logging.handlers import SMTPHandler DEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups WHERE group_name = "Devteam" ''' DEFAULT_DEV_TEAM_EMAILS = ['[email protected]'] class DonutSMTPHandler(SMTPHandler): def __init__(self, mailhost, fromaddr, toaddrs, subject, db_instance, credentials=None, secure=None, timeout=5.0): super().__init__(mailhost, fromaddr, toaddrs, subject, credentials, secure, timeout) self.db_instance = db_instance def emit(self, record): ''' Overrides SMTPHandler's emit such that we dynamically get current donut dev team members ''' self.toaddrs = self.getAdmins() super().emit(record) def getAdmins(self): ''' Returns current members in Devteam ''' try: with self.db_instance.cursor() as cursor: cursor.execute(DEV_TEAM_EMAILS_QUERY) res = cursor.fetchall() return [result['email'] for result in res] except Exception: # If the database is inaccessible, fallback to a hard-coded email list return DEFAULT_DEV_TEAM_EMAILS
# ... existing code ... from logging.handlers import SMTPHandler DEV_TEAM_EMAILS_QUERY = '''SELECT DISTINCT email FROM members NATURAL JOIN current_position_holders NATURAL JOIN positions NATURAL JOIN groups WHERE group_name = "Devteam" ''' DEFAULT_DEV_TEAM_EMAILS = ['[email protected]'] class DonutSMTPHandler(SMTPHandler): # ... modified code ... def getAdmins(self): ''' Returns current members in Devteam ''' try: with self.db_instance.cursor() as cursor: cursor.execute(DEV_TEAM_EMAILS_QUERY) res = cursor.fetchall() return [result['email'] for result in res] except Exception: # If the database is inaccessible, fallback to a hard-coded email list return DEFAULT_DEV_TEAM_EMAILS # ... rest of the code ...
ca3978b6068add93418b4c5db8346143533beb7e
examples/forwarder_device.py
examples/forwarder_device.py
import os import zmq import yaml name = 'zmq_document_forwarder' filenames = [ os.path.join('/etc', name + '.yml'), os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'), ] config = {} for filename in filenames: if os.path.isfile(filename): print('found config file at', filename) with open(filename) as f: config.update(yaml.load(f)) def main(frontend_port, backend_port): try: context = zmq.Context(1) # Socket facing clients frontend = context.socket(zmq.SUB) frontend.bind("tcp://*:%d" % frontend_port) frontend.setsockopt_string(zmq.SUBSCRIBE, "") # Socket facing services backend = context.socket(zmq.PUB) backend.bind("tcp://*:%d" % backend_port) zmq.device(zmq.FORWARDER, frontend, backend) finally: frontend.close() backend.close() context.term() if __name__ == "__main__": main(int(config['frontend_port']), int(config['backend_port']))
import os import zmq import yaml name = 'zmq_document_forwarder' filenames = [ os.path.join('/etc', name + '.yml'), os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'), ] config = {} for filename in filenames: if os.path.isfile(filename): print('found config file at', filename) with open(filename) as f: config.update(yaml.load(f)) def main(frontend_port, backend_port): try: context = zmq.Context(1) # Socket facing clients frontend = context.socket(zmq.SUB) frontend.bind("tcp://*:%d" % frontend_port) frontend.setsockopt_string(zmq.SUBSCRIBE, "") # Socket facing services backend = context.socket(zmq.PUB) backend.bind("tcp://*:%d" % backend_port) print("Receiving on %d; publishing to %d" % (frontend_port, backend_port)) zmq.device(zmq.FORWARDER, frontend, backend) finally: frontend.close() backend.close() context.term() if __name__ == "__main__": main(int(config['frontend_port']), int(config['backend_port']))
Print ports when forwarder device starts.
MNT: Print ports when forwarder device starts.
Python
bsd-3-clause
ericdill/bluesky,ericdill/bluesky
python
## Code Before: import os import zmq import yaml name = 'zmq_document_forwarder' filenames = [ os.path.join('/etc', name + '.yml'), os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'), ] config = {} for filename in filenames: if os.path.isfile(filename): print('found config file at', filename) with open(filename) as f: config.update(yaml.load(f)) def main(frontend_port, backend_port): try: context = zmq.Context(1) # Socket facing clients frontend = context.socket(zmq.SUB) frontend.bind("tcp://*:%d" % frontend_port) frontend.setsockopt_string(zmq.SUBSCRIBE, "") # Socket facing services backend = context.socket(zmq.PUB) backend.bind("tcp://*:%d" % backend_port) zmq.device(zmq.FORWARDER, frontend, backend) finally: frontend.close() backend.close() context.term() if __name__ == "__main__": main(int(config['frontend_port']), int(config['backend_port'])) ## Instruction: MNT: Print ports when forwarder device starts. ## Code After: import os import zmq import yaml name = 'zmq_document_forwarder' filenames = [ os.path.join('/etc', name + '.yml'), os.path.join(os.path.expanduser('~'), '.config', name, 'connection.yml'), ] config = {} for filename in filenames: if os.path.isfile(filename): print('found config file at', filename) with open(filename) as f: config.update(yaml.load(f)) def main(frontend_port, backend_port): try: context = zmq.Context(1) # Socket facing clients frontend = context.socket(zmq.SUB) frontend.bind("tcp://*:%d" % frontend_port) frontend.setsockopt_string(zmq.SUBSCRIBE, "") # Socket facing services backend = context.socket(zmq.PUB) backend.bind("tcp://*:%d" % backend_port) print("Receiving on %d; publishing to %d" % (frontend_port, backend_port)) zmq.device(zmq.FORWARDER, frontend, backend) finally: frontend.close() backend.close() context.term() if __name__ == "__main__": main(int(config['frontend_port']), int(config['backend_port']))
# ... existing code ... def main(frontend_port, backend_port): try: context = zmq.Context(1) # Socket facing clients # ... modified code ... # Socket facing services backend = context.socket(zmq.PUB) backend.bind("tcp://*:%d" % backend_port) print("Receiving on %d; publishing to %d" % (frontend_port, backend_port)) zmq.device(zmq.FORWARDER, frontend, backend) finally: frontend.close() # ... rest of the code ...
bb11252c277d40c8ec8c579100c04a6a676accfe
tests/run.py
tests/run.py
from os import path import sys from colour_runner.django_runner import ColourRunnerMixin from django.conf import settings from django.test.runner import DiscoverRunner settings.configure( INSTALLED_APPS=( # Put contenttypes before auth to work around test issue. # See: https://code.djangoproject.com/ticket/10827#comment:12 'django.contrib.contenttypes', 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.admin', 'django-admin-sso', 'django-crispy-forms', 'incuna_auth', ), PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',), AUTH_USER_MODEL='tests.User', ROOT_URLCONF='incuna_auth.urls', REST_FRAMEWORK={ 'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAuthenticated',), 'DEFAULT_RENDERER_CLASSES': ('rest_framework.renderers.JSONRenderer',), }, TEST_DISCOVER_TOP_LEVEL=path.dirname(path.dirname(__file__)), ) class Runner(ColourRunnerMixin, DiscoverRunner): pass test_runner = Runner(verbosity=1) failures = test_runner.run_tests(['tests']) if failures: sys.exit(1)
from os import path import sys from colour_runner.django_runner import ColourRunnerMixin from django.conf import settings settings.configure( INSTALLED_APPS=( # Put contenttypes before auth to work around test issue. # See: https://code.djangoproject.com/ticket/10827#comment:12 'django.contrib.contenttypes', 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.admin', 'django-admin-sso', 'django-crispy-forms', 'incuna_auth', ), PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',), AUTH_USER_MODEL='tests.User', ROOT_URLCONF='incuna_auth.urls', REST_FRAMEWORK={ 'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAuthenticated',), 'DEFAULT_RENDERER_CLASSES': ('rest_framework.renderers.JSONRenderer',), }, TEST_DISCOVER_TOP_LEVEL=path.dirname(path.dirname(__file__)), ) from django.test.runner import DiscoverRunner class Runner(ColourRunnerMixin, DiscoverRunner): pass test_runner = Runner(verbosity=1) failures = test_runner.run_tests(['tests']) if failures: sys.exit(1)
Reorder imports to dodge a settings problem.
Reorder imports to dodge a settings problem.
Python
bsd-2-clause
incuna/incuna-auth,ghickman/incuna-auth,incuna/incuna-auth,ghickman/incuna-auth
python
## Code Before: from os import path import sys from colour_runner.django_runner import ColourRunnerMixin from django.conf import settings from django.test.runner import DiscoverRunner settings.configure( INSTALLED_APPS=( # Put contenttypes before auth to work around test issue. # See: https://code.djangoproject.com/ticket/10827#comment:12 'django.contrib.contenttypes', 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.admin', 'django-admin-sso', 'django-crispy-forms', 'incuna_auth', ), PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',), AUTH_USER_MODEL='tests.User', ROOT_URLCONF='incuna_auth.urls', REST_FRAMEWORK={ 'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAuthenticated',), 'DEFAULT_RENDERER_CLASSES': ('rest_framework.renderers.JSONRenderer',), }, TEST_DISCOVER_TOP_LEVEL=path.dirname(path.dirname(__file__)), ) class Runner(ColourRunnerMixin, DiscoverRunner): pass test_runner = Runner(verbosity=1) failures = test_runner.run_tests(['tests']) if failures: sys.exit(1) ## Instruction: Reorder imports to dodge a settings problem. ## Code After: from os import path import sys from colour_runner.django_runner import ColourRunnerMixin from django.conf import settings settings.configure( INSTALLED_APPS=( # Put contenttypes before auth to work around test issue. # See: https://code.djangoproject.com/ticket/10827#comment:12 'django.contrib.contenttypes', 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.admin', 'django-admin-sso', 'django-crispy-forms', 'incuna_auth', ), PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',), AUTH_USER_MODEL='tests.User', ROOT_URLCONF='incuna_auth.urls', REST_FRAMEWORK={ 'DEFAULT_PERMISSION_CLASSES': ('rest_framework.permissions.IsAuthenticated',), 'DEFAULT_RENDERER_CLASSES': ('rest_framework.renderers.JSONRenderer',), }, TEST_DISCOVER_TOP_LEVEL=path.dirname(path.dirname(__file__)), ) from django.test.runner import DiscoverRunner class Runner(ColourRunnerMixin, DiscoverRunner): pass test_runner = Runner(verbosity=1) failures = test_runner.run_tests(['tests']) if failures: sys.exit(1)
// ... existing code ... from colour_runner.django_runner import ColourRunnerMixin from django.conf import settings settings.configure( // ... modified code ... ) from django.test.runner import DiscoverRunner class Runner(ColourRunnerMixin, DiscoverRunner): pass // ... rest of the code ...
e751cb4f4805aed079fc025b9b1655f30cf5e69a
watson/html/entities.py
watson/html/entities.py
import re from html import _escape_map_full from html.entities import codepoint2name html_entities = {_ord: '&{0};'.format(value) for _ord, value in codepoint2name.items()} html_entities.update(_escape_map_full) entities_html = {value: _ord for _ord, value in html_entities.items()} def encode(string): """Encodes html entities. This is a little more full featured than html.escape, as it will replace all charactes from codepoint2name. Returns: string with replaced html entities. """ return string.translate(html_entities) def decode(string): """Decodes html entities. Returns: string with html entities decoded. """ return ( re.sub( '&(?:[#a-z][a-z0-9]+);', lambda m: chr(entities_html[m.group()]), string) )
import re from html.entities import codepoint2name try: from html import _escape_map_full except: # taken from the 3.3 standard lib, as it's removed in 3.4 _escape_map_full = {ord('&'): '&amp;', ord('<'): '&lt;', ord('>'): '&gt;', ord('"'): '&quot;', ord('\''): '&#x27;'} html_entities = {_ord: '&{0};'.format(value) for _ord, value in codepoint2name.items()} html_entities.update(_escape_map_full) entities_html = {value: _ord for _ord, value in html_entities.items()} def encode(string): """Encodes html entities. This is a little more full featured than html.escape, as it will replace all charactes from codepoint2name. Returns: string with replaced html entities. """ return string.translate(html_entities) def decode(string): """Decodes html entities. Returns: string with html entities decoded. """ return ( re.sub( '&(?:[#a-z][a-z0-9]+);', lambda m: chr(entities_html[m.group()]), string) )
Fix for Python 3.4 html module not containing _escape_map_full
Fix for Python 3.4 html module not containing _escape_map_full
Python
bsd-3-clause
watsonpy/watson-html
python
## Code Before: import re from html import _escape_map_full from html.entities import codepoint2name html_entities = {_ord: '&{0};'.format(value) for _ord, value in codepoint2name.items()} html_entities.update(_escape_map_full) entities_html = {value: _ord for _ord, value in html_entities.items()} def encode(string): """Encodes html entities. This is a little more full featured than html.escape, as it will replace all charactes from codepoint2name. Returns: string with replaced html entities. """ return string.translate(html_entities) def decode(string): """Decodes html entities. Returns: string with html entities decoded. """ return ( re.sub( '&(?:[#a-z][a-z0-9]+);', lambda m: chr(entities_html[m.group()]), string) ) ## Instruction: Fix for Python 3.4 html module not containing _escape_map_full ## Code After: import re from html.entities import codepoint2name try: from html import _escape_map_full except: # taken from the 3.3 standard lib, as it's removed in 3.4 _escape_map_full = {ord('&'): '&amp;', ord('<'): '&lt;', ord('>'): '&gt;', ord('"'): '&quot;', ord('\''): '&#x27;'} html_entities = {_ord: '&{0};'.format(value) for _ord, value in codepoint2name.items()} html_entities.update(_escape_map_full) entities_html = {value: _ord for _ord, value in html_entities.items()} def encode(string): """Encodes html entities. This is a little more full featured than html.escape, as it will replace all charactes from codepoint2name. Returns: string with replaced html entities. """ return string.translate(html_entities) def decode(string): """Decodes html entities. Returns: string with html entities decoded. """ return ( re.sub( '&(?:[#a-z][a-z0-9]+);', lambda m: chr(entities_html[m.group()]), string) )
// ... existing code ... import re from html.entities import codepoint2name try: from html import _escape_map_full except: # taken from the 3.3 standard lib, as it's removed in 3.4 _escape_map_full = {ord('&'): '&amp;', ord('<'): '&lt;', ord('>'): '&gt;', ord('"'): '&quot;', ord('\''): '&#x27;'} html_entities = {_ord: '&{0};'.format(value) for _ord, value in codepoint2name.items()} // ... rest of the code ...
7cef87a81278c227db0cb07329d1b659dbd175b3
mail_factory/models.py
mail_factory/models.py
import django from django.conf import settings from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule def autodiscover(): """Auto-discover INSTALLED_APPS mails.py modules.""" for app in settings.INSTALLED_APPS: module = '%s.mails' % app # Attempt to import the app's 'mails' module try: import_module(module) except: # Decide whether to bubble up this error. If the app just # doesn't have a mails module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. app_module = import_module(app) if module_has_submodule(app_module, 'mails'): raise # If we're using Django >= 1.7, use the new app-loading mecanism which is way # better. if django.VERSION < (1, 7): autodiscover()
import django from django.conf import settings from django.utils.module_loading import module_has_submodule try: from importlib import import_module except ImportError: # Compatibility for python-2.6 from django.utils.importlib import import_module def autodiscover(): """Auto-discover INSTALLED_APPS mails.py modules.""" for app in settings.INSTALLED_APPS: module = '%s.mails' % app # Attempt to import the app's 'mails' module try: import_module(module) except: # Decide whether to bubble up this error. If the app just # doesn't have a mails module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. app_module = import_module(app) if module_has_submodule(app_module, 'mails'): raise # If we're using Django >= 1.7, use the new app-loading mecanism which is way # better. if django.VERSION < (1, 7): autodiscover()
Use standard library instead of django.utils.importlib
Use standard library instead of django.utils.importlib > django.utils.importlib is a compatibility library for when Python 2.6 was > still supported. It has been obsolete since Django 1.7, which dropped support > for Python 2.6, and is removed in 1.9 per the deprecation cycle. > Use Python's import_module function instead > — [1] References: [1] http://stackoverflow.com/a/32763639 [2] https://docs.djangoproject.com/en/1.9/internals/deprecation/#deprecation-removed-in-1-9
Python
bsd-3-clause
novafloss/django-mail-factory,novafloss/django-mail-factory
python
## Code Before: import django from django.conf import settings from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule def autodiscover(): """Auto-discover INSTALLED_APPS mails.py modules.""" for app in settings.INSTALLED_APPS: module = '%s.mails' % app # Attempt to import the app's 'mails' module try: import_module(module) except: # Decide whether to bubble up this error. If the app just # doesn't have a mails module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. app_module = import_module(app) if module_has_submodule(app_module, 'mails'): raise # If we're using Django >= 1.7, use the new app-loading mecanism which is way # better. if django.VERSION < (1, 7): autodiscover() ## Instruction: Use standard library instead of django.utils.importlib > django.utils.importlib is a compatibility library for when Python 2.6 was > still supported. It has been obsolete since Django 1.7, which dropped support > for Python 2.6, and is removed in 1.9 per the deprecation cycle. > Use Python's import_module function instead > — [1] References: [1] http://stackoverflow.com/a/32763639 [2] https://docs.djangoproject.com/en/1.9/internals/deprecation/#deprecation-removed-in-1-9 ## Code After: import django from django.conf import settings from django.utils.module_loading import module_has_submodule try: from importlib import import_module except ImportError: # Compatibility for python-2.6 from django.utils.importlib import import_module def autodiscover(): """Auto-discover INSTALLED_APPS mails.py modules.""" for app in settings.INSTALLED_APPS: module = '%s.mails' % app # Attempt to import the app's 'mails' module try: import_module(module) except: # Decide whether to bubble up this error. If the app just # doesn't have a mails module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. app_module = import_module(app) if module_has_submodule(app_module, 'mails'): raise # If we're using Django >= 1.7, use the new app-loading mecanism which is way # better. if django.VERSION < (1, 7): autodiscover()
# ... existing code ... import django from django.conf import settings from django.utils.module_loading import module_has_submodule try: from importlib import import_module except ImportError: # Compatibility for python-2.6 from django.utils.importlib import import_module def autodiscover(): # ... rest of the code ...
10ae930f6f14c2840d0b87cbec17054b4cc318d2
facebook_auth/models.py
facebook_auth/models.py
from django.contrib.auth import models as auth_models from django.db import models import facepy import simplejson from facebook_auth import utils class FacebookUser(auth_models.User): user_id = models.BigIntegerField(unique=True) access_token = models.TextField(blank=True, null=True) app_friends = models.ManyToManyField('self') @property def graph(self): return facepy.GraphAPI(self.access_token) @property def js_session(self): return simplejson.dumps({ 'access_token': self.access_token, 'uid': self.user_id }) @property def friends(self): return utils.get_from_graph_api(self.graph, "me/friends")['data'] def update_app_friends(self): friends = self.friends friends_ids = [f['id'] for f in friends] self.app_friends.clear() self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids))
from uuid import uuid1 from django.conf import settings from django.contrib.auth import models as auth_models from django.db import models import facepy import simplejson from facebook_auth import utils class FacebookUser(auth_models.User): user_id = models.BigIntegerField(unique=True) access_token = models.TextField(blank=True, null=True) app_friends = models.ManyToManyField('self') @property def graph(self): return facepy.GraphAPI(self.access_token) @property def js_session(self): return simplejson.dumps({ 'access_token': self.access_token, 'uid': self.user_id }) @property def friends(self): return utils.get_from_graph_api(self.graph, "me/friends")['data'] def update_app_friends(self): friends = self.friends friends_ids = [f['id'] for f in friends] self.app_friends.clear() self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids)) def get_auth_address(request, redirect_to, scope=''): state = unicode(uuid1()) request.session['state'] = state return 'https://www.facebook.com/dialog/oauth?client_id=%s&redirect_uri=%s&scope=%s&state=%s' % ( settings.FACEBOOK_APP_ID, redirect_to, scope, state )
Add support for server side authentication.
Add support for server side authentication. Change-Id: Iff45fa00b5a5b389f998570827e33d9d232f5d1e Reviewed-on: http://review.pozytywnie.pl:8080/5087 Reviewed-by: Tomasz Wysocki <[email protected]> Tested-by: Tomasz Wysocki <[email protected]>
Python
mit
pozytywnie/django-facebook-auth,pozytywnie/django-facebook-auth,jgoclawski/django-facebook-auth,jgoclawski/django-facebook-auth
python
## Code Before: from django.contrib.auth import models as auth_models from django.db import models import facepy import simplejson from facebook_auth import utils class FacebookUser(auth_models.User): user_id = models.BigIntegerField(unique=True) access_token = models.TextField(blank=True, null=True) app_friends = models.ManyToManyField('self') @property def graph(self): return facepy.GraphAPI(self.access_token) @property def js_session(self): return simplejson.dumps({ 'access_token': self.access_token, 'uid': self.user_id }) @property def friends(self): return utils.get_from_graph_api(self.graph, "me/friends")['data'] def update_app_friends(self): friends = self.friends friends_ids = [f['id'] for f in friends] self.app_friends.clear() self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids)) ## Instruction: Add support for server side authentication. Change-Id: Iff45fa00b5a5b389f998570827e33d9d232f5d1e Reviewed-on: http://review.pozytywnie.pl:8080/5087 Reviewed-by: Tomasz Wysocki <[email protected]> Tested-by: Tomasz Wysocki <[email protected]> ## Code After: from uuid import uuid1 from django.conf import settings from django.contrib.auth import models as auth_models from django.db import models import facepy import simplejson from facebook_auth import utils class FacebookUser(auth_models.User): user_id = models.BigIntegerField(unique=True) access_token = models.TextField(blank=True, null=True) app_friends = models.ManyToManyField('self') @property def graph(self): return facepy.GraphAPI(self.access_token) @property def js_session(self): return simplejson.dumps({ 'access_token': self.access_token, 'uid': self.user_id }) @property def friends(self): return utils.get_from_graph_api(self.graph, "me/friends")['data'] def update_app_friends(self): friends = self.friends friends_ids = [f['id'] for f in friends] self.app_friends.clear() self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids)) def get_auth_address(request, redirect_to, scope=''): state = unicode(uuid1()) request.session['state'] = state return 'https://www.facebook.com/dialog/oauth?client_id=%s&redirect_uri=%s&scope=%s&state=%s' % ( settings.FACEBOOK_APP_ID, redirect_to, scope, state )
... from uuid import uuid1 from django.conf import settings from django.contrib.auth import models as auth_models from django.db import models import facepy ... friends_ids = [f['id'] for f in friends] self.app_friends.clear() self.app_friends.add(*FacebookUser.objects.filter(user_id__in=friends_ids)) def get_auth_address(request, redirect_to, scope=''): state = unicode(uuid1()) request.session['state'] = state return 'https://www.facebook.com/dialog/oauth?client_id=%s&redirect_uri=%s&scope=%s&state=%s' % ( settings.FACEBOOK_APP_ID, redirect_to, scope, state ) ...
ccf3bcfc962a37d088507b542bd8e3af2ce515b6
tests/test_with_testcase.py
tests/test_with_testcase.py
import time import unittest import pytest class TerribleTerribleWayToWriteTests(unittest.TestCase): @pytest.fixture(autouse=True) def setupBenchmark(self, benchmark): self.benchmark = benchmark def test_foo(self): self.benchmark(time.sleep, 0.000001) class TerribleTerribleWayToWritePatchTests(unittest.TestCase): @pytest.fixture(autouse=True) def setupBenchmark(self, benchmark_weave): self.benchmark_weave = benchmark_weave def test_foo2(self): with self.benchmark_weave('time.sleep'): time.sleep(0.0000001)
import time import unittest import pytest class TerribleTerribleWayToWriteTests(unittest.TestCase): @pytest.fixture(autouse=True) def setupBenchmark(self, benchmark): self.benchmark = benchmark def test_foo(self): self.benchmark(time.sleep, 0.000001) class TerribleTerribleWayToWritePatchTests(unittest.TestCase): @pytest.fixture(autouse=True) def setupBenchmark(self, benchmark_weave): self.benchmark_weave = benchmark_weave def test_foo2(self): self.benchmark_weave('time.sleep') time.sleep(0.0000001)
Remove use of context manager.
Remove use of context manager.
Python
bsd-2-clause
thedrow/pytest-benchmark,aldanor/pytest-benchmark,SectorLabs/pytest-benchmark,ionelmc/pytest-benchmark
python
## Code Before: import time import unittest import pytest class TerribleTerribleWayToWriteTests(unittest.TestCase): @pytest.fixture(autouse=True) def setupBenchmark(self, benchmark): self.benchmark = benchmark def test_foo(self): self.benchmark(time.sleep, 0.000001) class TerribleTerribleWayToWritePatchTests(unittest.TestCase): @pytest.fixture(autouse=True) def setupBenchmark(self, benchmark_weave): self.benchmark_weave = benchmark_weave def test_foo2(self): with self.benchmark_weave('time.sleep'): time.sleep(0.0000001) ## Instruction: Remove use of context manager. ## Code After: import time import unittest import pytest class TerribleTerribleWayToWriteTests(unittest.TestCase): @pytest.fixture(autouse=True) def setupBenchmark(self, benchmark): self.benchmark = benchmark def test_foo(self): self.benchmark(time.sleep, 0.000001) class TerribleTerribleWayToWritePatchTests(unittest.TestCase): @pytest.fixture(autouse=True) def setupBenchmark(self, benchmark_weave): self.benchmark_weave = benchmark_weave def test_foo2(self): self.benchmark_weave('time.sleep') time.sleep(0.0000001)
# ... existing code ... self.benchmark_weave = benchmark_weave def test_foo2(self): self.benchmark_weave('time.sleep') time.sleep(0.0000001) # ... rest of the code ...
d042b90239d2c995b69ea5352d72373faa5a72cc
java/src/main/java/com/google/appengine/tools/cloudstorage/oauth/AppIdentityAccessTokenProvider.java
java/src/main/java/com/google/appengine/tools/cloudstorage/oauth/AppIdentityAccessTokenProvider.java
package com.google.appengine.tools.cloudstorage.oauth; import com.google.appengine.api.appidentity.AppIdentityService; import com.google.appengine.api.appidentity.AppIdentityService.GetAccessTokenResult; import com.google.appengine.api.appidentity.AppIdentityServiceFactory; import com.google.appengine.api.utils.SystemProperty; import java.util.List; /** * Provider that uses the AppIdentityService for generating access tokens. */ final class AppIdentityAccessTokenProvider implements AccessTokenProvider { private final AppIdentityService appIdentityService; public AppIdentityAccessTokenProvider() { if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Development) { throw new IllegalStateException( "The access token from AppIdentity won't work in the development environment."); } this.appIdentityService = AppIdentityServiceFactory.getAppIdentityService(); } @Override public GetAccessTokenResult getNewAccessToken(List<String> scopes) { return appIdentityService.getAccessToken(scopes); } }
package com.google.appengine.tools.cloudstorage.oauth; import com.google.appengine.api.appidentity.AppIdentityService; import com.google.appengine.api.appidentity.AppIdentityService.GetAccessTokenResult; import com.google.appengine.api.appidentity.AppIdentityServiceFactory; import com.google.appengine.api.utils.SystemProperty; import java.util.List; /** * Provider that uses the AppIdentityService for generating access tokens. */ final class AppIdentityAccessTokenProvider implements AccessTokenProvider { private final AppIdentityService appIdentityService; public AppIdentityAccessTokenProvider() { this.appIdentityService = AppIdentityServiceFactory.getAppIdentityService(); } @Override public GetAccessTokenResult getNewAccessToken(List<String> scopes) { if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Development) { throw new IllegalStateException( "The access token from AppIdentity won't work in the development environment."); } return appIdentityService.getAccessToken(scopes); } }
Move check for development environment into the getNewAccessToken method.
Move check for development environment into the getNewAccessToken method. Revision created by MOE tool push_codebase. MOE_MIGRATION=7102
Java
apache-2.0
GoogleCloudPlatform/appengine-gcs-client,GoogleCloudPlatform/appengine-gcs-client,aozarov/appengine-gcs-client,aozarov/appengine-gcs-client,GoogleCloudPlatform/appengine-gcs-client,aozarov/appengine-gcs-client
java
## Code Before: package com.google.appengine.tools.cloudstorage.oauth; import com.google.appengine.api.appidentity.AppIdentityService; import com.google.appengine.api.appidentity.AppIdentityService.GetAccessTokenResult; import com.google.appengine.api.appidentity.AppIdentityServiceFactory; import com.google.appengine.api.utils.SystemProperty; import java.util.List; /** * Provider that uses the AppIdentityService for generating access tokens. */ final class AppIdentityAccessTokenProvider implements AccessTokenProvider { private final AppIdentityService appIdentityService; public AppIdentityAccessTokenProvider() { if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Development) { throw new IllegalStateException( "The access token from AppIdentity won't work in the development environment."); } this.appIdentityService = AppIdentityServiceFactory.getAppIdentityService(); } @Override public GetAccessTokenResult getNewAccessToken(List<String> scopes) { return appIdentityService.getAccessToken(scopes); } } ## Instruction: Move check for development environment into the getNewAccessToken method. Revision created by MOE tool push_codebase. MOE_MIGRATION=7102 ## Code After: package com.google.appengine.tools.cloudstorage.oauth; import com.google.appengine.api.appidentity.AppIdentityService; import com.google.appengine.api.appidentity.AppIdentityService.GetAccessTokenResult; import com.google.appengine.api.appidentity.AppIdentityServiceFactory; import com.google.appengine.api.utils.SystemProperty; import java.util.List; /** * Provider that uses the AppIdentityService for generating access tokens. */ final class AppIdentityAccessTokenProvider implements AccessTokenProvider { private final AppIdentityService appIdentityService; public AppIdentityAccessTokenProvider() { this.appIdentityService = AppIdentityServiceFactory.getAppIdentityService(); } @Override public GetAccessTokenResult getNewAccessToken(List<String> scopes) { if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Development) { throw new IllegalStateException( "The access token from AppIdentity won't work in the development environment."); } return appIdentityService.getAccessToken(scopes); } }
... private final AppIdentityService appIdentityService; public AppIdentityAccessTokenProvider() { this.appIdentityService = AppIdentityServiceFactory.getAppIdentityService(); } @Override public GetAccessTokenResult getNewAccessToken(List<String> scopes) { if (SystemProperty.environment.value() == SystemProperty.Environment.Value.Development) { throw new IllegalStateException( "The access token from AppIdentity won't work in the development environment."); } return appIdentityService.getAccessToken(scopes); } } ...
824c8cd3eb563de60ddf13fac1f7ca1341aa01f1
astral/api/tests/test_streams.py
astral/api/tests/test_streams.py
from tornado.httpclient import HTTPRequest from nose.tools import eq_, ok_ import json import faker from astral.api.tests import BaseTest from astral.models import Stream from astral.models.tests.factories import StreamFactory class StreamsHandlerTest(BaseTest): def test_get_streams(self): [StreamFactory() for _ in range(3)] response = self.fetch('/streams') eq_(response.code, 200) result = json.loads(response.body) ok_('streams' in result) for stream in result['streams']: ok_(Stream.get_by(name=stream['name'])) def test_create_stream(self): data = {'name': faker.lorem.sentence()} eq_(Stream.get_by(name=data['name']), None) self.http_client.fetch(HTTPRequest( self.get_url('/streams'), 'POST', body=json.dumps(data)), self.stop) response = self.wait() eq_(response.code, 200) ok_(Stream.get_by(name=data['name']))
from tornado.httpclient import HTTPRequest from nose.tools import eq_, ok_ import json import faker from astral.api.tests import BaseTest from astral.models import Stream from astral.models.tests.factories import StreamFactory class StreamsHandlerTest(BaseTest): def test_get_streams(self): [StreamFactory() for _ in range(3)] response = self.fetch('/streams') eq_(response.code, 200) result = json.loads(response.body) ok_('streams' in result) for stream in result['streams']: ok_(Stream.get_by(name=stream['name'])) def test_create_stream(self): data = {'name': faker.lorem.sentence()} eq_(Stream.get_by(name=data['name']), None) self.http_client.fetch(HTTPRequest( self.get_url('/streams'), 'POST', body=json.dumps(data), follow_redirects=False), self.stop) response = self.wait() eq_(response.code, 302) ok_(Stream.get_by(name=data['name']))
Update tests for new redirect-after-create stream.
Update tests for new redirect-after-create stream.
Python
mit
peplin/astral
python
## Code Before: from tornado.httpclient import HTTPRequest from nose.tools import eq_, ok_ import json import faker from astral.api.tests import BaseTest from astral.models import Stream from astral.models.tests.factories import StreamFactory class StreamsHandlerTest(BaseTest): def test_get_streams(self): [StreamFactory() for _ in range(3)] response = self.fetch('/streams') eq_(response.code, 200) result = json.loads(response.body) ok_('streams' in result) for stream in result['streams']: ok_(Stream.get_by(name=stream['name'])) def test_create_stream(self): data = {'name': faker.lorem.sentence()} eq_(Stream.get_by(name=data['name']), None) self.http_client.fetch(HTTPRequest( self.get_url('/streams'), 'POST', body=json.dumps(data)), self.stop) response = self.wait() eq_(response.code, 200) ok_(Stream.get_by(name=data['name'])) ## Instruction: Update tests for new redirect-after-create stream. ## Code After: from tornado.httpclient import HTTPRequest from nose.tools import eq_, ok_ import json import faker from astral.api.tests import BaseTest from astral.models import Stream from astral.models.tests.factories import StreamFactory class StreamsHandlerTest(BaseTest): def test_get_streams(self): [StreamFactory() for _ in range(3)] response = self.fetch('/streams') eq_(response.code, 200) result = json.loads(response.body) ok_('streams' in result) for stream in result['streams']: ok_(Stream.get_by(name=stream['name'])) def test_create_stream(self): data = {'name': faker.lorem.sentence()} eq_(Stream.get_by(name=data['name']), None) self.http_client.fetch(HTTPRequest( self.get_url('/streams'), 'POST', body=json.dumps(data), follow_redirects=False), self.stop) response = self.wait() eq_(response.code, 302) ok_(Stream.get_by(name=data['name']))
// ... existing code ... data = {'name': faker.lorem.sentence()} eq_(Stream.get_by(name=data['name']), None) self.http_client.fetch(HTTPRequest( self.get_url('/streams'), 'POST', body=json.dumps(data), follow_redirects=False), self.stop) response = self.wait() eq_(response.code, 302) ok_(Stream.get_by(name=data['name'])) // ... rest of the code ...
70d009834123cb5a10788763fed3193017cc8162
libpebble2/__init__.py
libpebble2/__init__.py
__author__ = 'katharine' from .exceptions import *
__author__ = 'katharine' import logging from .exceptions import * logging.getLogger('libpebble2').addHandler(logging.NullHandler())
Add a default null logger per python recommendations.
Add a default null logger per python recommendations.
Python
mit
pebble/libpebble2
python
## Code Before: __author__ = 'katharine' from .exceptions import * ## Instruction: Add a default null logger per python recommendations. ## Code After: __author__ = 'katharine' import logging from .exceptions import * logging.getLogger('libpebble2').addHandler(logging.NullHandler())
# ... existing code ... __author__ = 'katharine' import logging from .exceptions import * logging.getLogger('libpebble2').addHandler(logging.NullHandler()) # ... rest of the code ...
29562b08e436abc8465404e49d9193537721b717
src/odin/contrib/money/fields.py
src/odin/contrib/money/fields.py
from __future__ import absolute_import, print_function from odin import exceptions from odin.fields import ScalarField from odin.validators import EMPTY_VALUES from .datatypes import Amount __all__ = ('AmountField', ) class AmountField(ScalarField): """ Field that contains a monetary amount (with an optional currency). """ default_error_messages = { 'invalid': "'%s' value must be a (amount, currency).", 'invalid_currency': "'%s' currency is not supported.", } data_type_name = "Amount" def __init__(self, allowed_currencies=None, **kwargs): super(AmountField, self).__init__(**kwargs) self.allowed_currencies = allowed_currencies def to_python(self, value): if value in EMPTY_VALUES: return if isinstance(value, Amount): return value try: return Amount(value) except (ValueError, TypeError): msg = self.error_messages['invalid'] % value raise exceptions.ValidationError(msg) def validate(self, value): super(AmountField, self).validate(value) if self.allowed_currencies and value not in EMPTY_VALUES: if value.currency not in self.allowed_currencies: msg = self.error_messages['invalid_currency'] % str(value.currency) raise exceptions.ValidationError(msg) def prepare(self, value): if value in EMPTY_VALUES: return return float(value), value.currency.code
from __future__ import absolute_import, print_function from odin import exceptions from odin.fields import ScalarField from odin.validators import EMPTY_VALUES from .datatypes import Amount __all__ = ("AmountField",) class AmountField(ScalarField): """ Field that contains a monetary amount (with an optional currency). """ default_error_messages = { "invalid": "'%s' value must be a (amount, currency).", "invalid_currency": "'%s' currency is not supported.", } data_type_name = "Amount" def __init__(self, allowed_currencies=None, **kwargs): super(AmountField, self).__init__(**kwargs) self.allowed_currencies = allowed_currencies def to_python(self, value): if value in EMPTY_VALUES: return if isinstance(value, Amount): return value try: return Amount(value) except (ValueError, TypeError): msg = self.error_messages["invalid"] % value raise exceptions.ValidationError(msg) def validate(self, value): super(AmountField, self).validate(value) if ( self.allowed_currencies and (value not in EMPTY_VALUES) and (value.currency not in self.allowed_currencies) ): msg = self.error_messages["invalid_currency"] % str(value.currency) raise exceptions.ValidationError(msg) def prepare(self, value): if value in EMPTY_VALUES: return return float(value), value.currency.code
Correct issue from Sonar (and black file)
Correct issue from Sonar (and black file)
Python
bsd-3-clause
python-odin/odin
python
## Code Before: from __future__ import absolute_import, print_function from odin import exceptions from odin.fields import ScalarField from odin.validators import EMPTY_VALUES from .datatypes import Amount __all__ = ('AmountField', ) class AmountField(ScalarField): """ Field that contains a monetary amount (with an optional currency). """ default_error_messages = { 'invalid': "'%s' value must be a (amount, currency).", 'invalid_currency': "'%s' currency is not supported.", } data_type_name = "Amount" def __init__(self, allowed_currencies=None, **kwargs): super(AmountField, self).__init__(**kwargs) self.allowed_currencies = allowed_currencies def to_python(self, value): if value in EMPTY_VALUES: return if isinstance(value, Amount): return value try: return Amount(value) except (ValueError, TypeError): msg = self.error_messages['invalid'] % value raise exceptions.ValidationError(msg) def validate(self, value): super(AmountField, self).validate(value) if self.allowed_currencies and value not in EMPTY_VALUES: if value.currency not in self.allowed_currencies: msg = self.error_messages['invalid_currency'] % str(value.currency) raise exceptions.ValidationError(msg) def prepare(self, value): if value in EMPTY_VALUES: return return float(value), value.currency.code ## Instruction: Correct issue from Sonar (and black file) ## Code After: from __future__ import absolute_import, print_function from odin import exceptions from odin.fields import ScalarField from odin.validators import EMPTY_VALUES from .datatypes import Amount __all__ = ("AmountField",) class AmountField(ScalarField): """ Field that contains a monetary amount (with an optional currency). """ default_error_messages = { "invalid": "'%s' value must be a (amount, currency).", "invalid_currency": "'%s' currency is not supported.", } data_type_name = "Amount" def __init__(self, allowed_currencies=None, **kwargs): super(AmountField, self).__init__(**kwargs) self.allowed_currencies = allowed_currencies def to_python(self, value): if value in EMPTY_VALUES: return if isinstance(value, Amount): return value try: return Amount(value) except (ValueError, TypeError): msg = self.error_messages["invalid"] % value raise exceptions.ValidationError(msg) def validate(self, value): super(AmountField, self).validate(value) if ( self.allowed_currencies and (value not in EMPTY_VALUES) and (value.currency not in self.allowed_currencies) ): msg = self.error_messages["invalid_currency"] % str(value.currency) raise exceptions.ValidationError(msg) def prepare(self, value): if value in EMPTY_VALUES: return return float(value), value.currency.code
... from odin.validators import EMPTY_VALUES from .datatypes import Amount __all__ = ("AmountField",) class AmountField(ScalarField): ... """ Field that contains a monetary amount (with an optional currency). """ default_error_messages = { "invalid": "'%s' value must be a (amount, currency).", "invalid_currency": "'%s' currency is not supported.", } data_type_name = "Amount" ... try: return Amount(value) except (ValueError, TypeError): msg = self.error_messages["invalid"] % value raise exceptions.ValidationError(msg) def validate(self, value): super(AmountField, self).validate(value) if ( self.allowed_currencies and (value not in EMPTY_VALUES) and (value.currency not in self.allowed_currencies) ): msg = self.error_messages["invalid_currency"] % str(value.currency) raise exceptions.ValidationError(msg) def prepare(self, value): if value in EMPTY_VALUES: ...
bf24b8dab13c3779514a00d61c3ea440704b1cbf
setup.py
setup.py
try: from setuptools import setup from setuptools import find_packages packages = find_packages() except ImportError: from distutils.core import setup import os packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')] if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") setup( name='cle', description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.', version='8.20.1.7', python_requires='>=3.5', packages=packages, install_requires=[ 'pyelftools>=0.25', 'cffi', 'pyvex==8.20.1.7', 'pefile', 'sortedcontainers>=2.0', ], extras_require={ "minidump": ["minidump==0.0.10"], "xbe": ["pyxbe==0.0.2"], } )
try: from setuptools import setup from setuptools import find_packages packages = find_packages() except ImportError: from distutils.core import setup import os packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')] if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") setup( name='cle', description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.', version='8.20.1.7', python_requires='>=3.5', packages=packages, install_requires=[ 'pyelftools>=0.25', 'cffi', 'pyvex==8.20.1.7', 'pefile', 'sortedcontainers>=2.0', ], extras_require={ "minidump": ["minidump==0.0.10"], "xbe": ["pyxbe==0.0.2"], "ar": ["arpy==1.1.1"], } )
Add optional dependency on arpy
Add optional dependency on arpy
Python
bsd-2-clause
angr/cle
python
## Code Before: try: from setuptools import setup from setuptools import find_packages packages = find_packages() except ImportError: from distutils.core import setup import os packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')] if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") setup( name='cle', description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.', version='8.20.1.7', python_requires='>=3.5', packages=packages, install_requires=[ 'pyelftools>=0.25', 'cffi', 'pyvex==8.20.1.7', 'pefile', 'sortedcontainers>=2.0', ], extras_require={ "minidump": ["minidump==0.0.10"], "xbe": ["pyxbe==0.0.2"], } ) ## Instruction: Add optional dependency on arpy ## Code After: try: from setuptools import setup from setuptools import find_packages packages = find_packages() except ImportError: from distutils.core import setup import os packages = [x.strip('./').replace('/','.') for x in os.popen('find -name "__init__.py" | xargs -n1 dirname').read().strip().split('\n')] if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.") setup( name='cle', description='CLE Loads Everything (at least, many binary formats!) and provides a pythonic interface to analyze what they are and what they would look like in memory.', version='8.20.1.7', python_requires='>=3.5', packages=packages, install_requires=[ 'pyelftools>=0.25', 'cffi', 'pyvex==8.20.1.7', 'pefile', 'sortedcontainers>=2.0', ], extras_require={ "minidump": ["minidump==0.0.10"], "xbe": ["pyxbe==0.0.2"], "ar": ["arpy==1.1.1"], } )
// ... existing code ... extras_require={ "minidump": ["minidump==0.0.10"], "xbe": ["pyxbe==0.0.2"], "ar": ["arpy==1.1.1"], } ) // ... rest of the code ...
65bfede8d8739699e57ddd4f66049ac0374d1a8d
ydf/instructions.py
ydf/instructions.py
__all__ = [] FROM = 'FROM' RUN = 'RUN' CMD = 'CMD' LABEL = 'LABEL' EXPOSE = 'EXPOSE' ENV = 'ENV' ADD = 'ADD' COPY = 'COPY' ENTRYPOINT = 'ENTRYPOINT' VOLUME = 'VOLUME' USER = 'USER' WORKDIR = 'WORKDIR' ARG = 'ARG' ONBUILD = 'ONBUILD' STOPSIGNAL = 'STOPSIGNAL' HEALTHCHECK = 'HEALTHCHECK' SHELL = 'SHELL'
import collections import functools from ydf import meta __all__ = [] FROM = 'FROM' RUN = 'RUN' CMD = 'CMD' LABEL = 'LABEL' EXPOSE = 'EXPOSE' ENV = 'ENV' ADD = 'ADD' COPY = 'COPY' ENTRYPOINT = 'ENTRYPOINT' VOLUME = 'VOLUME' USER = 'USER' WORKDIR = 'WORKDIR' ARG = 'ARG' ONBUILD = 'ONBUILD' STOPSIGNAL = 'STOPSIGNAL' HEALTHCHECK = 'HEALTHCHECK' SHELL = 'SHELL' def get_instructions(): """ Get all functions within this module that are decorated with :func:`~ydf.instructions.instruction`. """ instructions = collections.defaultdict(dict) for func in (value for key, value in globals().items() if meta.is_instruction(value)): instructions[func.instruction_name][func.instruction_type] = func return instructions def instruction(name, type, desc): """ Decorate a function to indicate that it is responsible for converting a python type to a Docker instruction. :param name: Name of docker instruction :param type: Type of python object it can convert :param desc: Short description of expected format for the python object. """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): return '{} {}'.format(name, func(*args, **kwargs)) wrapper.instruction_name = name wrapper.instruction_type = type wrapper.instruction_desc = desc return wrapper return decorator
Add @instruction decorator to mark module level funcs as handlers.
Add @instruction decorator to mark module level funcs as handlers.
Python
apache-2.0
ahawker/ydf
python
## Code Before: __all__ = [] FROM = 'FROM' RUN = 'RUN' CMD = 'CMD' LABEL = 'LABEL' EXPOSE = 'EXPOSE' ENV = 'ENV' ADD = 'ADD' COPY = 'COPY' ENTRYPOINT = 'ENTRYPOINT' VOLUME = 'VOLUME' USER = 'USER' WORKDIR = 'WORKDIR' ARG = 'ARG' ONBUILD = 'ONBUILD' STOPSIGNAL = 'STOPSIGNAL' HEALTHCHECK = 'HEALTHCHECK' SHELL = 'SHELL' ## Instruction: Add @instruction decorator to mark module level funcs as handlers. ## Code After: import collections import functools from ydf import meta __all__ = [] FROM = 'FROM' RUN = 'RUN' CMD = 'CMD' LABEL = 'LABEL' EXPOSE = 'EXPOSE' ENV = 'ENV' ADD = 'ADD' COPY = 'COPY' ENTRYPOINT = 'ENTRYPOINT' VOLUME = 'VOLUME' USER = 'USER' WORKDIR = 'WORKDIR' ARG = 'ARG' ONBUILD = 'ONBUILD' STOPSIGNAL = 'STOPSIGNAL' HEALTHCHECK = 'HEALTHCHECK' SHELL = 'SHELL' def get_instructions(): """ Get all functions within this module that are decorated with :func:`~ydf.instructions.instruction`. """ instructions = collections.defaultdict(dict) for func in (value for key, value in globals().items() if meta.is_instruction(value)): instructions[func.instruction_name][func.instruction_type] = func return instructions def instruction(name, type, desc): """ Decorate a function to indicate that it is responsible for converting a python type to a Docker instruction. :param name: Name of docker instruction :param type: Type of python object it can convert :param desc: Short description of expected format for the python object. """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): return '{} {}'.format(name, func(*args, **kwargs)) wrapper.instruction_name = name wrapper.instruction_type = type wrapper.instruction_desc = desc return wrapper return decorator
# ... existing code ... import collections import functools from ydf import meta __all__ = [] # ... modified code ... STOPSIGNAL = 'STOPSIGNAL' HEALTHCHECK = 'HEALTHCHECK' SHELL = 'SHELL' def get_instructions(): """ Get all functions within this module that are decorated with :func:`~ydf.instructions.instruction`. """ instructions = collections.defaultdict(dict) for func in (value for key, value in globals().items() if meta.is_instruction(value)): instructions[func.instruction_name][func.instruction_type] = func return instructions def instruction(name, type, desc): """ Decorate a function to indicate that it is responsible for converting a python type to a Docker instruction. :param name: Name of docker instruction :param type: Type of python object it can convert :param desc: Short description of expected format for the python object. """ def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): return '{} {}'.format(name, func(*args, **kwargs)) wrapper.instruction_name = name wrapper.instruction_type = type wrapper.instruction_desc = desc return wrapper return decorator # ... rest of the code ...
cf7086620df23d8af15f7c9898edf39f64965549
dbaas/workflow/steps/util/region_migration/check_instances_status.py
dbaas/workflow/steps/util/region_migration/check_instances_status.py
import logging from util import full_stack from workflow.steps.util.base import BaseStep from workflow.exceptions.error_codes import DBAAS_0020 LOG = logging.getLogger(__name__) class DecreaseTTL(BaseStep): def __unicode__(self): return "Checking instances status..." def do(self, workflow_dict): try: return True except Exception: traceback = full_stack() workflow_dict['exceptions']['error_codes'].append(DBAAS_0020) workflow_dict['exceptions']['traceback'].append(traceback) return False def undo(self, workflow_dict): LOG.info("Running undo...") try: return True except Exception: traceback = full_stack() workflow_dict['exceptions']['error_codes'].append(DBAAS_0020) workflow_dict['exceptions']['traceback'].append(traceback) return False
import logging from util import full_stack from workflow.steps.util.base import BaseStep from workflow.exceptions.error_codes import DBAAS_0020 from drivers.base import ConnectionError LOG = logging.getLogger(__name__) class CheckInstancesStatus(BaseStep): def __unicode__(self): return "Checking instances status..." def do(self, workflow_dict): try: databaseinfra = workflow_dict['databaseinfra'] driver = databaseinfra.get_driver() for instance in driver.get_database_instances(): msg = "Instance({}) is down".format(instance) exception_msg = Exception(msg) try: status = driver.check_status(instance) except ConnectionError: raise exception_msg else: if status is False: raise exception_msg return True except Exception: traceback = full_stack() workflow_dict['exceptions']['error_codes'].append(DBAAS_0020) workflow_dict['exceptions']['traceback'].append(traceback) return False def undo(self, workflow_dict): LOG.info("Running undo...") try: return True except Exception: traceback = full_stack() workflow_dict['exceptions']['error_codes'].append(DBAAS_0020) workflow_dict['exceptions']['traceback'].append(traceback) return False
Add step to check instances status
Add step to check instances status
Python
bsd-3-clause
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
python
## Code Before: import logging from util import full_stack from workflow.steps.util.base import BaseStep from workflow.exceptions.error_codes import DBAAS_0020 LOG = logging.getLogger(__name__) class DecreaseTTL(BaseStep): def __unicode__(self): return "Checking instances status..." def do(self, workflow_dict): try: return True except Exception: traceback = full_stack() workflow_dict['exceptions']['error_codes'].append(DBAAS_0020) workflow_dict['exceptions']['traceback'].append(traceback) return False def undo(self, workflow_dict): LOG.info("Running undo...") try: return True except Exception: traceback = full_stack() workflow_dict['exceptions']['error_codes'].append(DBAAS_0020) workflow_dict['exceptions']['traceback'].append(traceback) return False ## Instruction: Add step to check instances status ## Code After: import logging from util import full_stack from workflow.steps.util.base import BaseStep from workflow.exceptions.error_codes import DBAAS_0020 from drivers.base import ConnectionError LOG = logging.getLogger(__name__) class CheckInstancesStatus(BaseStep): def __unicode__(self): return "Checking instances status..." def do(self, workflow_dict): try: databaseinfra = workflow_dict['databaseinfra'] driver = databaseinfra.get_driver() for instance in driver.get_database_instances(): msg = "Instance({}) is down".format(instance) exception_msg = Exception(msg) try: status = driver.check_status(instance) except ConnectionError: raise exception_msg else: if status is False: raise exception_msg return True except Exception: traceback = full_stack() workflow_dict['exceptions']['error_codes'].append(DBAAS_0020) workflow_dict['exceptions']['traceback'].append(traceback) return False def undo(self, workflow_dict): LOG.info("Running undo...") try: return True except Exception: traceback = full_stack() workflow_dict['exceptions']['error_codes'].append(DBAAS_0020) workflow_dict['exceptions']['traceback'].append(traceback) return False
... from util import full_stack from workflow.steps.util.base import BaseStep from workflow.exceptions.error_codes import DBAAS_0020 from drivers.base import ConnectionError LOG = logging.getLogger(__name__) class CheckInstancesStatus(BaseStep): def __unicode__(self): return "Checking instances status..." ... def do(self, workflow_dict): try: databaseinfra = workflow_dict['databaseinfra'] driver = databaseinfra.get_driver() for instance in driver.get_database_instances(): msg = "Instance({}) is down".format(instance) exception_msg = Exception(msg) try: status = driver.check_status(instance) except ConnectionError: raise exception_msg else: if status is False: raise exception_msg return True except Exception: traceback = full_stack() ...
f651d51d97b75f12ba68f1cbfca914724136d121
tools/halide_image.h
tools/halide_image.h
/* This allows code that relied on halide_image.h and Halide::Tools::Image to continue to work with newer versions of Halide where HalideBuffer.h and Halide::Buffer are the way to work with data. Besides mapping Halide::Tools::Image to Halide::Buffer, it defines USING_HALIDE_BUFFER to allow code to conditionally compile for one or the other. It is intended as a stop-gap measure until the code can be updated. */ #include "HalideBuffer.h" namespace Halide { namespace Tools { #define USING_HALIDE_BUFFER template< typename T > using Image = Buffer<T>; } // namespace Tools } // mamespace Halide #endif // #ifndef HALIDE_TOOLS_IMAGE_H
/** \file * * This allows code that relied on halide_image.h and * Halide::Tools::Image to continue to work with newer versions of * Halide where HalideBuffer.h and Halide::Buffer are the way to work * with data. * * Besides mapping Halide::Tools::Image to Halide::Buffer, it defines * USING_HALIDE_BUFFER to allow code to conditionally compile for one * or the other. * * It is intended as a stop-gap measure until the code can be updated. */ #include "HalideBuffer.h" namespace Halide { namespace Tools { #define USING_HALIDE_BUFFER template< typename T > using Image = Buffer<T>; } // namespace Tools } // mamespace Halide #endif // #ifndef HALIDE_TOOLS_IMAGE_H
Reformat comment into Doxygen comment for file.
Reformat comment into Doxygen comment for file.
C
mit
kgnk/Halide,kgnk/Halide,psuriana/Halide,psuriana/Halide,kgnk/Halide,kgnk/Halide,psuriana/Halide,psuriana/Halide,kgnk/Halide,kgnk/Halide,psuriana/Halide,psuriana/Halide,kgnk/Halide,psuriana/Halide,kgnk/Halide
c
## Code Before: /* This allows code that relied on halide_image.h and Halide::Tools::Image to continue to work with newer versions of Halide where HalideBuffer.h and Halide::Buffer are the way to work with data. Besides mapping Halide::Tools::Image to Halide::Buffer, it defines USING_HALIDE_BUFFER to allow code to conditionally compile for one or the other. It is intended as a stop-gap measure until the code can be updated. */ #include "HalideBuffer.h" namespace Halide { namespace Tools { #define USING_HALIDE_BUFFER template< typename T > using Image = Buffer<T>; } // namespace Tools } // mamespace Halide #endif // #ifndef HALIDE_TOOLS_IMAGE_H ## Instruction: Reformat comment into Doxygen comment for file. ## Code After: /** \file * * This allows code that relied on halide_image.h and * Halide::Tools::Image to continue to work with newer versions of * Halide where HalideBuffer.h and Halide::Buffer are the way to work * with data. * * Besides mapping Halide::Tools::Image to Halide::Buffer, it defines * USING_HALIDE_BUFFER to allow code to conditionally compile for one * or the other. * * It is intended as a stop-gap measure until the code can be updated. */ #include "HalideBuffer.h" namespace Halide { namespace Tools { #define USING_HALIDE_BUFFER template< typename T > using Image = Buffer<T>; } // namespace Tools } // mamespace Halide #endif // #ifndef HALIDE_TOOLS_IMAGE_H
# ... existing code ... /** \file * * This allows code that relied on halide_image.h and * Halide::Tools::Image to continue to work with newer versions of * Halide where HalideBuffer.h and Halide::Buffer are the way to work * with data. * * Besides mapping Halide::Tools::Image to Halide::Buffer, it defines * USING_HALIDE_BUFFER to allow code to conditionally compile for one * or the other. * * It is intended as a stop-gap measure until the code can be updated. */ #include "HalideBuffer.h" # ... rest of the code ...
98b0eb3d492cb816db7ffa7ad062dde36a1feadf
tests/unit/test_gettext.py
tests/unit/test_gettext.py
import logging import unittest from openstack.common.gettextutils import _ LOG = logging.getLogger(__name__) class GettextTest(unittest.TestCase): def test_gettext_does_not_blow_up(self): LOG.info(_('test'))
import logging import testtools from openstack.common.gettextutils import _ LOG = logging.getLogger(__name__) class GettextTest(testtools.TestCase): def test_gettext_does_not_blow_up(self): LOG.info(_('test'))
Use testtools as test base class.
Use testtools as test base class. On the path to testr migration, we need to replace the unittest base classes with testtools. Replace tearDown with addCleanup, addCleanup is more resilient than tearDown. The fixtures library has excellent support for managing and cleaning tempfiles. Use it. Replace skip_ with testtools.skipTest Part of blueprint grizzly-testtools. Change-Id: I45e11bbb1ff9b31f3278d3b016737dcb7850cd98
Python
apache-2.0
varunarya10/oslo.i18n,openstack/oslo.i18n
python
## Code Before: import logging import unittest from openstack.common.gettextutils import _ LOG = logging.getLogger(__name__) class GettextTest(unittest.TestCase): def test_gettext_does_not_blow_up(self): LOG.info(_('test')) ## Instruction: Use testtools as test base class. On the path to testr migration, we need to replace the unittest base classes with testtools. Replace tearDown with addCleanup, addCleanup is more resilient than tearDown. The fixtures library has excellent support for managing and cleaning tempfiles. Use it. Replace skip_ with testtools.skipTest Part of blueprint grizzly-testtools. Change-Id: I45e11bbb1ff9b31f3278d3b016737dcb7850cd98 ## Code After: import logging import testtools from openstack.common.gettextutils import _ LOG = logging.getLogger(__name__) class GettextTest(testtools.TestCase): def test_gettext_does_not_blow_up(self): LOG.info(_('test'))
// ... existing code ... import logging import testtools from openstack.common.gettextutils import _ // ... modified code ... LOG = logging.getLogger(__name__) class GettextTest(testtools.TestCase): def test_gettext_does_not_blow_up(self): LOG.info(_('test')) // ... rest of the code ...
c3284516e8dc2c7fccfbf7e4bff46a66b4ad2f15
cref/evaluation/__init__.py
cref/evaluation/__init__.py
import os import statistics from cref.structure import rmsd from cref.app.terminal import download_pdb, download_fasta, predict_fasta pdbs = ['1zdd', '1gab'] runs = 100 fragment_sizes = range(5, 13, 2) number_of_clusters = range(4, 20, 1) for pdb in pdbs: output_dir = 'predictions/evaluation/{}/'.format(pdb) try: os.mkdir(output_dir) except FileExistsError as e: print(e) for fragment_size in fragment_sizes: fasta_file = output_dir + pdb + '.fasta' download_fasta(pdb, fasta_file) for n in number_of_clusters: rmsds = [] for run in range(runs): params = { 'pdb': pdb, 'fragment_size': fragment_size, 'number_of_clusters': n } output_files = predict_fasta(fasta_file, output_dir, params) predicted_structure = output_files[0] filepath = os.path.join( os.path.dirname(predicted_structure), 'experimental_structure.pdb' ) experimental_structure = download_pdb(pdb, filepath) rmsds.append(rmsd(predicted_structure, experimental_structure)) print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
import os import statistics from cref.structure import rmsd from cref.app.terminal import download_pdb, download_fasta, predict_fasta pdbs = ['1zdd', '1gab'] runs = 5 fragment_sizes = range(5, 13, 2) number_of_clusters = range(4, 20, 1) for pdb in pdbs: output_dir = 'predictions/evaluation/{}/'.format(pdb) try: os.mkdir(output_dir) except FileExistsError as e: print(e) for fragment_size in fragment_sizes: fasta_file = output_dir + pdb + '.fasta' download_fasta(pdb, fasta_file) for n in number_of_clusters: rmsds = [] for run in range(runs): params = { 'pdb': pdb, 'fragment_size': fragment_size, 'number_of_clusters': n } prediction_output = output_dir + str(run) os.mkdir(prediction_output) output_files = predict_fasta(fasta_file, prediction_output, params) predicted_structure = output_files[0] filepath = os.path.join( os.path.dirname(predicted_structure), 'experimental_structure.pdb' ) experimental_structure = download_pdb(pdb, filepath) rmsds.append(rmsd(predicted_structure, experimental_structure)) print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
Save output for every run
Save output for every run
Python
mit
mchelem/cref2,mchelem/cref2,mchelem/cref2
python
## Code Before: import os import statistics from cref.structure import rmsd from cref.app.terminal import download_pdb, download_fasta, predict_fasta pdbs = ['1zdd', '1gab'] runs = 100 fragment_sizes = range(5, 13, 2) number_of_clusters = range(4, 20, 1) for pdb in pdbs: output_dir = 'predictions/evaluation/{}/'.format(pdb) try: os.mkdir(output_dir) except FileExistsError as e: print(e) for fragment_size in fragment_sizes: fasta_file = output_dir + pdb + '.fasta' download_fasta(pdb, fasta_file) for n in number_of_clusters: rmsds = [] for run in range(runs): params = { 'pdb': pdb, 'fragment_size': fragment_size, 'number_of_clusters': n } output_files = predict_fasta(fasta_file, output_dir, params) predicted_structure = output_files[0] filepath = os.path.join( os.path.dirname(predicted_structure), 'experimental_structure.pdb' ) experimental_structure = download_pdb(pdb, filepath) rmsds.append(rmsd(predicted_structure, experimental_structure)) print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds)) ## Instruction: Save output for every run ## Code After: import os import statistics from cref.structure import rmsd from cref.app.terminal import download_pdb, download_fasta, predict_fasta pdbs = ['1zdd', '1gab'] runs = 5 fragment_sizes = range(5, 13, 2) number_of_clusters = range(4, 20, 1) for pdb in pdbs: output_dir = 'predictions/evaluation/{}/'.format(pdb) try: os.mkdir(output_dir) except FileExistsError as e: print(e) for fragment_size in fragment_sizes: fasta_file = output_dir + pdb + '.fasta' download_fasta(pdb, fasta_file) for n in number_of_clusters: rmsds = [] for run in range(runs): params = { 'pdb': pdb, 'fragment_size': fragment_size, 'number_of_clusters': n } prediction_output = output_dir + str(run) os.mkdir(prediction_output) output_files = predict_fasta(fasta_file, prediction_output, params) predicted_structure = output_files[0] filepath = os.path.join( os.path.dirname(predicted_structure), 'experimental_structure.pdb' ) experimental_structure = download_pdb(pdb, filepath) rmsds.append(rmsd(predicted_structure, experimental_structure)) print(pdb, fragment_size, n, statistics.mean(rmsds), statistics.pstdev(rmsds))
... pdbs = ['1zdd', '1gab'] runs = 5 fragment_sizes = range(5, 13, 2) number_of_clusters = range(4, 20, 1) ... } prediction_output = output_dir + str(run) os.mkdir(prediction_output) output_files = predict_fasta(fasta_file, prediction_output, params) predicted_structure = output_files[0] filepath = os.path.join( os.path.dirname(predicted_structure), ...
4653b9f493d28a6beb88a97d3d396ec1c9288f53
Sketches/JT/Jam/library/trunk/Kamaelia/Apps/Jam/Audio/Mixer.py
Sketches/JT/Jam/library/trunk/Kamaelia/Apps/Jam/Audio/Mixer.py
import numpy import Axon class MonoMixer(Axon.AdaptiveCommsComponent.AdaptiveCommsComponent): channels = 8 bufferSize = 1024 def __init__(self, **argd): super(MonoMixer, self).__init__(**argd) for i in range(self.channels): self.addInbox("in%i" % i) def main(self): while 1: output = numpy.zeros(self.bufferSize) for i in range(self.channels): if self.dataReady("in%i" % i): output += self.recv("in%i" % i) output /= self.channels self.send(output, "outbox") if not self.anyReady(): self.pause() yield 1
import numpy import Axon import time from Axon.SchedulingComponent import SchedulingAdaptiveCommsComponent class MonoMixer(SchedulingAdaptiveCommsComponent): channels = 8 bufferSize = 1024 sampleRate = 44100 def __init__(self, **argd): super(MonoMixer, self).__init__(**argd) for i in range(self.channels): self.addInbox("in%i" % i) self.period = float(self.bufferSize)/self.sampleRate self.lastSendTime = time.time() self.scheduleAbs("Send", self.lastSendTime + self.period) def main(self): while 1: if self.dataReady("event"): output = numpy.zeros(self.bufferSize) self.recv("event") for i in range(self.channels): if self.dataReady("in%i" % i): data = self.recv("in%i" % i) if data != None: output += data output /= self.channels self.send(output, "outbox") self.lastSendTime += self.period self.scheduleAbs("Send", self.lastSendTime + self.period) else: self.pause()
Change the mixer to be a scheduled component, and stop it from sending unnecessary messages when it has only received data from a few of it's inputs.
Change the mixer to be a scheduled component, and stop it from sending unnecessary messages when it has only received data from a few of it's inputs.
Python
apache-2.0
sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia
python
## Code Before: import numpy import Axon class MonoMixer(Axon.AdaptiveCommsComponent.AdaptiveCommsComponent): channels = 8 bufferSize = 1024 def __init__(self, **argd): super(MonoMixer, self).__init__(**argd) for i in range(self.channels): self.addInbox("in%i" % i) def main(self): while 1: output = numpy.zeros(self.bufferSize) for i in range(self.channels): if self.dataReady("in%i" % i): output += self.recv("in%i" % i) output /= self.channels self.send(output, "outbox") if not self.anyReady(): self.pause() yield 1 ## Instruction: Change the mixer to be a scheduled component, and stop it from sending unnecessary messages when it has only received data from a few of it's inputs. ## Code After: import numpy import Axon import time from Axon.SchedulingComponent import SchedulingAdaptiveCommsComponent class MonoMixer(SchedulingAdaptiveCommsComponent): channels = 8 bufferSize = 1024 sampleRate = 44100 def __init__(self, **argd): super(MonoMixer, self).__init__(**argd) for i in range(self.channels): self.addInbox("in%i" % i) self.period = float(self.bufferSize)/self.sampleRate self.lastSendTime = time.time() self.scheduleAbs("Send", self.lastSendTime + self.period) def main(self): while 1: if self.dataReady("event"): output = numpy.zeros(self.bufferSize) self.recv("event") for i in range(self.channels): if self.dataReady("in%i" % i): data = self.recv("in%i" % i) if data != None: output += data output /= self.channels self.send(output, "outbox") self.lastSendTime += self.period self.scheduleAbs("Send", self.lastSendTime + self.period) else: self.pause()
... import numpy import Axon import time from Axon.SchedulingComponent import SchedulingAdaptiveCommsComponent class MonoMixer(SchedulingAdaptiveCommsComponent): channels = 8 bufferSize = 1024 sampleRate = 44100 def __init__(self, **argd): super(MonoMixer, self).__init__(**argd) for i in range(self.channels): self.addInbox("in%i" % i) self.period = float(self.bufferSize)/self.sampleRate self.lastSendTime = time.time() self.scheduleAbs("Send", self.lastSendTime + self.period) def main(self): while 1: if self.dataReady("event"): output = numpy.zeros(self.bufferSize) self.recv("event") for i in range(self.channels): if self.dataReady("in%i" % i): data = self.recv("in%i" % i) if data != None: output += data output /= self.channels self.send(output, "outbox") self.lastSendTime += self.period self.scheduleAbs("Send", self.lastSendTime + self.period) else: self.pause() ...
b2155e167b559367bc24ba614f51360793951f12
mythril/support/source_support.py
mythril/support/source_support.py
from mythril.solidity.soliditycontract import SolidityContract from mythril.ethereum.evmcontract import EVMContract class Source: def __init__( self, source_type=None, source_format=None, source_list=None, meta=None ): self.source_type = source_type self.source_format = source_format self.source_list = [] self.meta = meta def get_source_from_contracts_list(self, contracts): if contracts is None or len(contracts) == 0: return if isinstance(contracts[0], SolidityContract): self.source_type = "solidity-file" self.source_format = "text" for contract in contracts: self.source_list += [file.filename for file in contract.solidity_files] elif isinstance(contracts[0], EVMContract): self.source_format = "evm-byzantium-bytecode" self.source_type = ( "raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address" ) for contract in contracts: self.source_list.append(contract.bytecode_hash) else: assert False # Fail hard self.meta = ""
from mythril.solidity.soliditycontract import SolidityContract from mythril.ethereum.evmcontract import EVMContract class Source: def __init__( self, source_type=None, source_format=None, source_list=None, meta=None ): self.source_type = source_type self.source_format = source_format self.source_list = [] self.meta = meta def get_source_from_contracts_list(self, contracts): if contracts is None or len(contracts) == 0: return if isinstance(contracts[0], SolidityContract): self.source_type = "solidity-file" self.source_format = "text" for contract in contracts: self.source_list += [file.filename for file in contract.solidity_files] elif isinstance(contracts[0], EVMContract): self.source_format = "evm-byzantium-bytecode" self.source_type = ( "raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address" ) for contract in contracts: self.source_list.append(contract.bytecode_hash) else: assert False # Fail hard
Remove meta from source class (belongs to issue not source)
Remove meta from source class (belongs to issue not source)
Python
mit
b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril
python
## Code Before: from mythril.solidity.soliditycontract import SolidityContract from mythril.ethereum.evmcontract import EVMContract class Source: def __init__( self, source_type=None, source_format=None, source_list=None, meta=None ): self.source_type = source_type self.source_format = source_format self.source_list = [] self.meta = meta def get_source_from_contracts_list(self, contracts): if contracts is None or len(contracts) == 0: return if isinstance(contracts[0], SolidityContract): self.source_type = "solidity-file" self.source_format = "text" for contract in contracts: self.source_list += [file.filename for file in contract.solidity_files] elif isinstance(contracts[0], EVMContract): self.source_format = "evm-byzantium-bytecode" self.source_type = ( "raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address" ) for contract in contracts: self.source_list.append(contract.bytecode_hash) else: assert False # Fail hard self.meta = "" ## Instruction: Remove meta from source class (belongs to issue not source) ## Code After: from mythril.solidity.soliditycontract import SolidityContract from mythril.ethereum.evmcontract import EVMContract class Source: def __init__( self, source_type=None, source_format=None, source_list=None, meta=None ): self.source_type = source_type self.source_format = source_format self.source_list = [] self.meta = meta def get_source_from_contracts_list(self, contracts): if contracts is None or len(contracts) == 0: return if isinstance(contracts[0], SolidityContract): self.source_type = "solidity-file" self.source_format = "text" for contract in contracts: self.source_list += [file.filename for file in contract.solidity_files] elif isinstance(contracts[0], EVMContract): self.source_format = "evm-byzantium-bytecode" self.source_type = ( "raw-bytecode" if contracts[0].name == "MAIN" else "ethereum-address" ) for contract in contracts: self.source_list.append(contract.bytecode_hash) else: assert False # Fail hard
... else: assert False # Fail hard ...
c057f4865052c893af9abcae2c2f37ec02d56118
example_test_set/tests/test_set_root.py
example_test_set/tests/test_set_root.py
import pytest class Dut(object): 'fake a device under test' _allowed = ('a', 'b', 'c') def __init__(self, mode=None): self._mode = mode def get_mode(self): return self._mode def set_mode(self, val): self._mode = val def check_mode(self): assert self._mode in self._allowed # fixtures @pytest.fixture def dut(request): return Dut('c') @pytest.yield_fixture(params=('a', 'b', 'c')) def mode(request, dut): orig_mode = dut.get_mode() dut.set_mode(request.param) yield dut dut.set_mode(orig_mode) @pytest.yield_fixture(params=[1, 2, 3]) def inputs(request): yield request.param def test_modes(mode): assert mode.check_mode() def test_inputs(inputs): assert inputs < 2 class TestBoth(object): def test_m(self, mode, inputs): assert mode.check_mode() assert inputs < 2
import pytest class Dut(object): 'fake a device under test' _allowed = ('a', 'b', 'c') def __init__(self, mode=None): self._mode = mode def get_mode(self): return self._mode def set_mode(self, val): self._mode = val def check_mode(self): assert self._mode in self._allowed # fixtures @pytest.fixture def dut(request): return Dut('c') @pytest.yield_fixture(params=('a', 'b', 'c')) def mode(request, dut): orig_mode = dut.get_mode() dut.set_mode(request.param) yield dut dut.set_mode(orig_mode) @pytest.yield_fixture(params=['dog', 'cat', 'mouse']) def inputs(request): yield request.param def test_modes(mode): assert mode.check_mode() def test_inputs(inputs): assert inputs < 2 class TestBoth(object): def test_m(self, mode, inputs): assert mode.check_mode() assert inputs < 2
Tweak some example fixture ids
Tweak some example fixture ids
Python
mit
tgoodlet/pytest-interactive
python
## Code Before: import pytest class Dut(object): 'fake a device under test' _allowed = ('a', 'b', 'c') def __init__(self, mode=None): self._mode = mode def get_mode(self): return self._mode def set_mode(self, val): self._mode = val def check_mode(self): assert self._mode in self._allowed # fixtures @pytest.fixture def dut(request): return Dut('c') @pytest.yield_fixture(params=('a', 'b', 'c')) def mode(request, dut): orig_mode = dut.get_mode() dut.set_mode(request.param) yield dut dut.set_mode(orig_mode) @pytest.yield_fixture(params=[1, 2, 3]) def inputs(request): yield request.param def test_modes(mode): assert mode.check_mode() def test_inputs(inputs): assert inputs < 2 class TestBoth(object): def test_m(self, mode, inputs): assert mode.check_mode() assert inputs < 2 ## Instruction: Tweak some example fixture ids ## Code After: import pytest class Dut(object): 'fake a device under test' _allowed = ('a', 'b', 'c') def __init__(self, mode=None): self._mode = mode def get_mode(self): return self._mode def set_mode(self, val): self._mode = val def check_mode(self): assert self._mode in self._allowed # fixtures @pytest.fixture def dut(request): return Dut('c') @pytest.yield_fixture(params=('a', 'b', 'c')) def mode(request, dut): orig_mode = dut.get_mode() dut.set_mode(request.param) yield dut dut.set_mode(orig_mode) @pytest.yield_fixture(params=['dog', 'cat', 'mouse']) def inputs(request): yield request.param def test_modes(mode): assert mode.check_mode() def test_inputs(inputs): assert inputs < 2 class TestBoth(object): def test_m(self, mode, inputs): assert mode.check_mode() assert inputs < 2
... dut.set_mode(orig_mode) @pytest.yield_fixture(params=['dog', 'cat', 'mouse']) def inputs(request): yield request.param ...
5a885124432ccb33d180a8e73c753ceab54ffdf5
src/Itemizers.py
src/Itemizers.py
from Foundation import objc from Foundation import NSBundle from AppKit import NSImage def iconForName(klass, name): """Return the NSImage instance representing a `name` item.""" imgpath = NSBundle.bundleForClass_(klass).pathForResource_ofType_(name, 'png') img = NSImage.alloc().initWithContentsOfFile_(imgpath) img.autorelease() return img class HaskellModuleItem(objc.lookUpClass('ESBaseItem')): """Itemizer for modules""" def isDecorator(self): return True def image(self): return iconForName(self.class__(), 'module') class HaskellTypeItem(objc.lookUpClass('ESBaseItem')): """Itemizer for datatypes""" def isDecorator(self): return True def image(self): return iconForName(self.class__(), 'type') def isTextualizer(self): return True def title(self): return self.text().lstrip() class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')): """Itemizer for functions""" pass class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')): """Itemizer for code blocks""" def isTextualizer(self): return True def title(self): return '%s %s' % (u'{…}', self.text().lstrip())
from Foundation import objc from Foundation import NSBundle from AppKit import NSImage haskellBundleIdentifier = 'org.purl.net.mkhl.haskell' def iconForName(name): """Return the NSImage instance representing a `name` item.""" bundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier) imgpath = bundle.pathForResource_ofType_(name, 'png') img = NSImage.alloc().initWithContentsOfFile_(imgpath) img.autorelease() return img class HaskellModuleItem(objc.lookUpClass('ESBaseItem')): """Itemizer for modules""" def isDecorator(self): return True def image(self): return iconForName('module') class HaskellTypeItem(objc.lookUpClass('ESBaseItem')): """Itemizer for datatypes""" def isDecorator(self): return True def image(self): return iconForName('type') def isTextualizer(self): return True def title(self): return self.text().lstrip() class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')): """Itemizer for functions""" pass class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')): """Itemizer for code blocks""" def isTextualizer(self): return True def title(self): return '%s %s' % (u'{…}', self.text().lstrip())
Simplify the icon finder function.
Simplify the icon finder function. We statically know our bundle identifier, so we don’t have too find the bundle by runtime class.
Python
mit
mkhl/haskell.sugar
python
## Code Before: from Foundation import objc from Foundation import NSBundle from AppKit import NSImage def iconForName(klass, name): """Return the NSImage instance representing a `name` item.""" imgpath = NSBundle.bundleForClass_(klass).pathForResource_ofType_(name, 'png') img = NSImage.alloc().initWithContentsOfFile_(imgpath) img.autorelease() return img class HaskellModuleItem(objc.lookUpClass('ESBaseItem')): """Itemizer for modules""" def isDecorator(self): return True def image(self): return iconForName(self.class__(), 'module') class HaskellTypeItem(objc.lookUpClass('ESBaseItem')): """Itemizer for datatypes""" def isDecorator(self): return True def image(self): return iconForName(self.class__(), 'type') def isTextualizer(self): return True def title(self): return self.text().lstrip() class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')): """Itemizer for functions""" pass class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')): """Itemizer for code blocks""" def isTextualizer(self): return True def title(self): return '%s %s' % (u'{…}', self.text().lstrip()) ## Instruction: Simplify the icon finder function. We statically know our bundle identifier, so we don’t have too find the bundle by runtime class. ## Code After: from Foundation import objc from Foundation import NSBundle from AppKit import NSImage haskellBundleIdentifier = 'org.purl.net.mkhl.haskell' def iconForName(name): """Return the NSImage instance representing a `name` item.""" bundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier) imgpath = bundle.pathForResource_ofType_(name, 'png') img = NSImage.alloc().initWithContentsOfFile_(imgpath) img.autorelease() return img class HaskellModuleItem(objc.lookUpClass('ESBaseItem')): """Itemizer for modules""" def isDecorator(self): return True def image(self): return iconForName('module') class HaskellTypeItem(objc.lookUpClass('ESBaseItem')): """Itemizer for datatypes""" def isDecorator(self): return True def image(self): return iconForName('type') def isTextualizer(self): return True def title(self): return self.text().lstrip() class HaskellFunctionItem(objc.lookUpClass('ESBaseItem')): """Itemizer for functions""" pass class HaskellCodeBlockItem(objc.lookUpClass('ESCodeBlockItem')): """Itemizer for code blocks""" def isTextualizer(self): return True def title(self): return '%s %s' % (u'{…}', self.text().lstrip())
// ... existing code ... from Foundation import NSBundle from AppKit import NSImage haskellBundleIdentifier = 'org.purl.net.mkhl.haskell' def iconForName(name): """Return the NSImage instance representing a `name` item.""" bundle = NSBundle.bundleWithIdentifier_(haskellBundleIdentifier) imgpath = bundle.pathForResource_ofType_(name, 'png') img = NSImage.alloc().initWithContentsOfFile_(imgpath) img.autorelease() return img // ... modified code ... return True def image(self): return iconForName('module') class HaskellTypeItem(objc.lookUpClass('ESBaseItem')): ... return True def image(self): return iconForName('type') def isTextualizer(self): return True // ... rest of the code ...
44d20ecaf13cb0245ee562d234939e762b5b0921
include/agent.h
include/agent.h
// forward declaration namespace Url { class Url; } namespace Rep { class Agent { public: /* The type for the delay. */ typedef float delay_t; /** * Construct an agent. */ explicit Agent(const std::string& host) : directives_(), delay_(-1.0), sorted_(true), host_(host) {} /** * Add an allowed directive. */ Agent& allow(const std::string& query); /** * Add a disallowed directive. */ Agent& disallow(const std::string& query); /** * Set the delay for this agent. */ Agent& delay(delay_t value) { delay_ = value; return *this; } /** * Return the delay for this agent. */ delay_t delay() const { return delay_; } /** * A vector of the directives, in priority-sorted order. */ const std::vector<Directive>& directives() const; /** * Return true if the URL (either a full URL or a path) is allowed. */ bool allowed(const std::string& path) const; std::string str() const; private: bool is_external(const Url::Url& url) const; mutable std::vector<Directive> directives_; delay_t delay_; mutable bool sorted_; std::string host_; }; } #endif
// forward declaration namespace Url { class Url; } namespace Rep { class Agent { public: /* The type for the delay. */ typedef float delay_t; /** * Default constructor */ Agent() : Agent("") {} /** * Construct an agent. */ explicit Agent(const std::string& host) : directives_(), delay_(-1.0), sorted_(true), host_(host) {} /** * Add an allowed directive. */ Agent& allow(const std::string& query); /** * Add a disallowed directive. */ Agent& disallow(const std::string& query); /** * Set the delay for this agent. */ Agent& delay(delay_t value) { delay_ = value; return *this; } /** * Return the delay for this agent. */ delay_t delay() const { return delay_; } /** * A vector of the directives, in priority-sorted order. */ const std::vector<Directive>& directives() const; /** * Return true if the URL (either a full URL or a path) is allowed. */ bool allowed(const std::string& path) const; std::string str() const; private: bool is_external(const Url::Url& url) const; mutable std::vector<Directive> directives_; delay_t delay_; mutable bool sorted_; std::string host_; }; } #endif
Add back default constructor for Agent.
Add back default constructor for Agent. Previously, this was removed in #28, but the Cython bindings in reppy *really* want there to be a default constructor, so I'm adding it back for convenience.
C
mit
seomoz/rep-cpp,seomoz/rep-cpp
c
## Code Before: // forward declaration namespace Url { class Url; } namespace Rep { class Agent { public: /* The type for the delay. */ typedef float delay_t; /** * Construct an agent. */ explicit Agent(const std::string& host) : directives_(), delay_(-1.0), sorted_(true), host_(host) {} /** * Add an allowed directive. */ Agent& allow(const std::string& query); /** * Add a disallowed directive. */ Agent& disallow(const std::string& query); /** * Set the delay for this agent. */ Agent& delay(delay_t value) { delay_ = value; return *this; } /** * Return the delay for this agent. */ delay_t delay() const { return delay_; } /** * A vector of the directives, in priority-sorted order. */ const std::vector<Directive>& directives() const; /** * Return true if the URL (either a full URL or a path) is allowed. */ bool allowed(const std::string& path) const; std::string str() const; private: bool is_external(const Url::Url& url) const; mutable std::vector<Directive> directives_; delay_t delay_; mutable bool sorted_; std::string host_; }; } #endif ## Instruction: Add back default constructor for Agent. Previously, this was removed in #28, but the Cython bindings in reppy *really* want there to be a default constructor, so I'm adding it back for convenience. ## Code After: // forward declaration namespace Url { class Url; } namespace Rep { class Agent { public: /* The type for the delay. */ typedef float delay_t; /** * Default constructor */ Agent() : Agent("") {} /** * Construct an agent. */ explicit Agent(const std::string& host) : directives_(), delay_(-1.0), sorted_(true), host_(host) {} /** * Add an allowed directive. */ Agent& allow(const std::string& query); /** * Add a disallowed directive. */ Agent& disallow(const std::string& query); /** * Set the delay for this agent. */ Agent& delay(delay_t value) { delay_ = value; return *this; } /** * Return the delay for this agent. */ delay_t delay() const { return delay_; } /** * A vector of the directives, in priority-sorted order. */ const std::vector<Directive>& directives() const; /** * Return true if the URL (either a full URL or a path) is allowed. */ bool allowed(const std::string& path) const; std::string str() const; private: bool is_external(const Url::Url& url) const; mutable std::vector<Directive> directives_; delay_t delay_; mutable bool sorted_; std::string host_; }; } #endif
# ... existing code ... public: /* The type for the delay. */ typedef float delay_t; /** * Default constructor */ Agent() : Agent("") {} /** * Construct an agent. # ... rest of the code ...
1a71fba6224a9757f19e702a3b9a1cebf496a754
src/loop+blkback/plugin.py
src/loop+blkback/plugin.py
import os import sys import xapi import xapi.plugin from xapi.storage.datapath import log class Implementation(xapi.plugin.Plugin_skeleton): def query(self, dbg): return { "plugin": "loopdev+blkback", "name": "The loopdev+blkback kernel-space datapath plugin", "description": ("This plugin manages and configures loop" " devices which can be connected to VMs" " directly via kernel-space blkback"), "vendor": "Citrix", "copyright": "(C) 2015 Citrix Inc", "version": "3.0", "required_api_version": "3.0", "features": [ ], "configuration": {}, "required_cluster_stack": []} if __name__ == "__main__": log.log_call_argv() cmd = xapi.plugin.Plugin_commandline(Implementation()) base = os.path.basename(sys.argv[0]) if base == "Plugin.Query": cmd.query() else: raise xapi.plugin.Unimplemented(base)
import os import sys import xapi import xapi.storage.api.plugin from xapi.storage import log class Implementation(xapi.storage.api.plugin.Plugin_skeleton): def query(self, dbg): return { "plugin": "loopdev+blkback", "name": "The loopdev+blkback kernel-space datapath plugin", "description": ("This plugin manages and configures loop" " devices which can be connected to VMs" " directly via kernel-space blkback"), "vendor": "Citrix", "copyright": "(C) 2015 Citrix Inc", "version": "3.0", "required_api_version": "3.0", "features": [ ], "configuration": {}, "required_cluster_stack": []} if __name__ == "__main__": log.log_call_argv() cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation()) base = os.path.basename(sys.argv[0]) if base == "Plugin.Query": cmd.query() else: raise xapi.storage.api.plugin.Unimplemented(base)
Use the new xapi.storage package hierarchy
Use the new xapi.storage package hierarchy Signed-off-by: David Scott <[email protected]>
Python
lgpl-2.1
jjd27/xapi-storage-datapath-plugins,robertbreker/xapi-storage-datapath-plugins,djs55/xapi-storage-datapath-plugins,xapi-project/xapi-storage-datapath-plugins,stefanopanella/xapi-storage-plugins,stefanopanella/xapi-storage-plugins,stefanopanella/xapi-storage-plugins
python
## Code Before: import os import sys import xapi import xapi.plugin from xapi.storage.datapath import log class Implementation(xapi.plugin.Plugin_skeleton): def query(self, dbg): return { "plugin": "loopdev+blkback", "name": "The loopdev+blkback kernel-space datapath plugin", "description": ("This plugin manages and configures loop" " devices which can be connected to VMs" " directly via kernel-space blkback"), "vendor": "Citrix", "copyright": "(C) 2015 Citrix Inc", "version": "3.0", "required_api_version": "3.0", "features": [ ], "configuration": {}, "required_cluster_stack": []} if __name__ == "__main__": log.log_call_argv() cmd = xapi.plugin.Plugin_commandline(Implementation()) base = os.path.basename(sys.argv[0]) if base == "Plugin.Query": cmd.query() else: raise xapi.plugin.Unimplemented(base) ## Instruction: Use the new xapi.storage package hierarchy Signed-off-by: David Scott <[email protected]> ## Code After: import os import sys import xapi import xapi.storage.api.plugin from xapi.storage import log class Implementation(xapi.storage.api.plugin.Plugin_skeleton): def query(self, dbg): return { "plugin": "loopdev+blkback", "name": "The loopdev+blkback kernel-space datapath plugin", "description": ("This plugin manages and configures loop" " devices which can be connected to VMs" " directly via kernel-space blkback"), "vendor": "Citrix", "copyright": "(C) 2015 Citrix Inc", "version": "3.0", "required_api_version": "3.0", "features": [ ], "configuration": {}, "required_cluster_stack": []} if __name__ == "__main__": log.log_call_argv() cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation()) base = os.path.basename(sys.argv[0]) if base == "Plugin.Query": cmd.query() else: raise xapi.storage.api.plugin.Unimplemented(base)
# ... existing code ... import os import sys import xapi import xapi.storage.api.plugin from xapi.storage import log class Implementation(xapi.storage.api.plugin.Plugin_skeleton): def query(self, dbg): return { # ... modified code ... if __name__ == "__main__": log.log_call_argv() cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation()) base = os.path.basename(sys.argv[0]) if base == "Plugin.Query": cmd.query() else: raise xapi.storage.api.plugin.Unimplemented(base) # ... rest of the code ...
fc7db2a55ad3f612ac6ef01cfa57ce03040708a5
evelink/__init__.py
evelink/__init__.py
"""EVELink - Python bindings for the EVE API.""" import logging from evelink import account from evelink import api from evelink import char from evelink import constants from evelink import corp from evelink import eve from evelink import map from evelink import parsing from evelink import server # Implement NullHandler because it was only added in Python 2.7+. class NullHandler(logging.Handler): def emit(self, record): pass # Create a logger, but by default, have it do nothing _log = logging.getLogger('evelink') _log.addHandler(NullHandler()) __all__ = [ "account", "api", "char", "constants", "corp", "eve", "map", "parsing", "server", ]
"""EVELink - Python bindings for the EVE API.""" import logging from evelink import account from evelink import api from evelink import char from evelink import constants from evelink import corp from evelink import eve from evelink import map from evelink import server # Implement NullHandler because it was only added in Python 2.7+. class NullHandler(logging.Handler): def emit(self, record): pass # Create a logger, but by default, have it do nothing _log = logging.getLogger('evelink') _log.addHandler(NullHandler()) __all__ = [ "account", "api", "char", "constants", "corp", "eve", "map", "parsing", "server", ]
Remove parsing from public interface
Remove parsing from public interface
Python
mit
zigdon/evelink,FashtimeDotCom/evelink,bastianh/evelink,ayust/evelink,Morloth1274/EVE-Online-POCO-manager
python
## Code Before: """EVELink - Python bindings for the EVE API.""" import logging from evelink import account from evelink import api from evelink import char from evelink import constants from evelink import corp from evelink import eve from evelink import map from evelink import parsing from evelink import server # Implement NullHandler because it was only added in Python 2.7+. class NullHandler(logging.Handler): def emit(self, record): pass # Create a logger, but by default, have it do nothing _log = logging.getLogger('evelink') _log.addHandler(NullHandler()) __all__ = [ "account", "api", "char", "constants", "corp", "eve", "map", "parsing", "server", ] ## Instruction: Remove parsing from public interface ## Code After: """EVELink - Python bindings for the EVE API.""" import logging from evelink import account from evelink import api from evelink import char from evelink import constants from evelink import corp from evelink import eve from evelink import map from evelink import server # Implement NullHandler because it was only added in Python 2.7+. class NullHandler(logging.Handler): def emit(self, record): pass # Create a logger, but by default, have it do nothing _log = logging.getLogger('evelink') _log.addHandler(NullHandler()) __all__ = [ "account", "api", "char", "constants", "corp", "eve", "map", "parsing", "server", ]
# ... existing code ... from evelink import corp from evelink import eve from evelink import map from evelink import server # Implement NullHandler because it was only added in Python 2.7+. # ... rest of the code ...
493637ace6881defedee22971f3bc39fe9a5bd0a
freesas/test/__init__.py
freesas/test/__init__.py
__author__ = "Jérôme Kieffer" __license__ = "MIT" __date__ = "05/09/2017" __copyright__ = "2015, ESRF" import unittest from .test_all import suite def run(): runner = unittest.TextTestRunner() return runner.run(suite()) if __name__ == '__main__': run()
__author__ = "Jérôme Kieffer" __license__ = "MIT" __date__ = "15/01/2021" __copyright__ = "2015-2021, ESRF" import sys import unittest from .test_all import suite def run_tests(): """Run test complete test_suite""" mysuite = suite() runner = unittest.TextTestRunner() if not runner.run(mysuite).wasSuccessful(): print("Test suite failed") return 1 else: print("Test suite succeeded") return 0 run = run_tests if __name__ == '__main__': sys.exit(run_tests())
Make it compatible with Bob
Make it compatible with Bob
Python
mit
kif/freesas,kif/freesas,kif/freesas
python
## Code Before: __author__ = "Jérôme Kieffer" __license__ = "MIT" __date__ = "05/09/2017" __copyright__ = "2015, ESRF" import unittest from .test_all import suite def run(): runner = unittest.TextTestRunner() return runner.run(suite()) if __name__ == '__main__': run() ## Instruction: Make it compatible with Bob ## Code After: __author__ = "Jérôme Kieffer" __license__ = "MIT" __date__ = "15/01/2021" __copyright__ = "2015-2021, ESRF" import sys import unittest from .test_all import suite def run_tests(): """Run test complete test_suite""" mysuite = suite() runner = unittest.TextTestRunner() if not runner.run(mysuite).wasSuccessful(): print("Test suite failed") return 1 else: print("Test suite succeeded") return 0 run = run_tests if __name__ == '__main__': sys.exit(run_tests())
// ... existing code ... __author__ = "Jérôme Kieffer" __license__ = "MIT" __date__ = "15/01/2021" __copyright__ = "2015-2021, ESRF" import sys import unittest from .test_all import suite def run_tests(): """Run test complete test_suite""" mysuite = suite() runner = unittest.TextTestRunner() if not runner.run(mysuite).wasSuccessful(): print("Test suite failed") return 1 else: print("Test suite succeeded") return 0 run = run_tests if __name__ == '__main__': sys.exit(run_tests()) // ... rest of the code ...
e4a7139aaf7abb3df49079e9fbe150917a0225dc
src/main/java/seedu/jimi/commons/events/ui/ShowTaskPanelSectionEvent.java
src/main/java/seedu/jimi/commons/events/ui/ShowTaskPanelSectionEvent.java
package seedu.jimi.commons.events.ui; import seedu.jimi.commons.events.BaseEvent; /** * Indicates user request to show a section of the taskList panel. * @author zexuan * */ public class ShowTaskPanelSectionEvent extends BaseEvent{ String sectionToDisplay; public ShowTaskPanelSectionEvent(String sectionToDisplay) { this.sectionToDisplay = sectionToDisplay; } @Override public String toString() { return this.getClass().getSimpleName(); } }
package seedu.jimi.commons.events.ui; import seedu.jimi.commons.events.BaseEvent; /** * Indicates user request to show a section of the taskList panel. * @author zexuan * */ public class ShowTaskPanelSectionEvent extends BaseEvent{ public final String sectionToDisplay; public ShowTaskPanelSectionEvent(String sectionToDisplay) { this.sectionToDisplay = sectionToDisplay; } @Override public String toString() { return this.getClass().getSimpleName(); } }
Modify access level of member var
Modify access level of member var
Java
mit
CS2103AUG2016-T09-C2/main,CS2103AUG2016-T09-C2/main
java
## Code Before: package seedu.jimi.commons.events.ui; import seedu.jimi.commons.events.BaseEvent; /** * Indicates user request to show a section of the taskList panel. * @author zexuan * */ public class ShowTaskPanelSectionEvent extends BaseEvent{ String sectionToDisplay; public ShowTaskPanelSectionEvent(String sectionToDisplay) { this.sectionToDisplay = sectionToDisplay; } @Override public String toString() { return this.getClass().getSimpleName(); } } ## Instruction: Modify access level of member var ## Code After: package seedu.jimi.commons.events.ui; import seedu.jimi.commons.events.BaseEvent; /** * Indicates user request to show a section of the taskList panel. * @author zexuan * */ public class ShowTaskPanelSectionEvent extends BaseEvent{ public final String sectionToDisplay; public ShowTaskPanelSectionEvent(String sectionToDisplay) { this.sectionToDisplay = sectionToDisplay; } @Override public String toString() { return this.getClass().getSimpleName(); } }
... */ public class ShowTaskPanelSectionEvent extends BaseEvent{ public final String sectionToDisplay; public ShowTaskPanelSectionEvent(String sectionToDisplay) { this.sectionToDisplay = sectionToDisplay; ...
fbadf23356b40c36378cef8f3a9c8b382bce9e32
comics/core/admin.py
comics/core/admin.py
from django.contrib import admin from comics.core import models class ComicAdmin(admin.ModelAdmin): list_display = ('slug', 'name', 'language', 'url', 'rights') prepopulated_fields = { 'slug': ('name',) } class ReleaseAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'comic', 'pub_date', 'fetched') list_filter = ['pub_date', 'fetched', 'comic'] date_hierarchy = 'pub_date' exclude = ('images',) class ImageAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text') list_filter = ['fetched', 'comic'] date_hierarchy = 'fetched' admin.site.register(models.Comic, ComicAdmin) admin.site.register(models.Release, ReleaseAdmin) admin.site.register(models.Image, ImageAdmin)
from django.contrib import admin from comics.core import models class ComicAdmin(admin.ModelAdmin): list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date', 'end_date', 'active') prepopulated_fields = { 'slug': ('name',) } class ReleaseAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'comic', 'pub_date', 'fetched') list_filter = ['pub_date', 'fetched', 'comic'] date_hierarchy = 'pub_date' exclude = ('images',) class ImageAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text') list_filter = ['fetched', 'comic'] date_hierarchy = 'fetched' admin.site.register(models.Comic, ComicAdmin) admin.site.register(models.Release, ReleaseAdmin) admin.site.register(models.Image, ImageAdmin)
Include start date, end date, and active flag in comics list
Include start date, end date, and active flag in comics list
Python
agpl-3.0
jodal/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics,datagutten/comics,jodal/comics,datagutten/comics
python
## Code Before: from django.contrib import admin from comics.core import models class ComicAdmin(admin.ModelAdmin): list_display = ('slug', 'name', 'language', 'url', 'rights') prepopulated_fields = { 'slug': ('name',) } class ReleaseAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'comic', 'pub_date', 'fetched') list_filter = ['pub_date', 'fetched', 'comic'] date_hierarchy = 'pub_date' exclude = ('images',) class ImageAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text') list_filter = ['fetched', 'comic'] date_hierarchy = 'fetched' admin.site.register(models.Comic, ComicAdmin) admin.site.register(models.Release, ReleaseAdmin) admin.site.register(models.Image, ImageAdmin) ## Instruction: Include start date, end date, and active flag in comics list ## Code After: from django.contrib import admin from comics.core import models class ComicAdmin(admin.ModelAdmin): list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date', 'end_date', 'active') prepopulated_fields = { 'slug': ('name',) } class ReleaseAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'comic', 'pub_date', 'fetched') list_filter = ['pub_date', 'fetched', 'comic'] date_hierarchy = 'pub_date' exclude = ('images',) class ImageAdmin(admin.ModelAdmin): list_display = ('__unicode__', 'file', 'height', 'width', 'fetched', 'title', 'text') list_filter = ['fetched', 'comic'] date_hierarchy = 'fetched' admin.site.register(models.Comic, ComicAdmin) admin.site.register(models.Release, ReleaseAdmin) admin.site.register(models.Image, ImageAdmin)
// ... existing code ... class ComicAdmin(admin.ModelAdmin): list_display = ('slug', 'name', 'language', 'url', 'rights', 'start_date', 'end_date', 'active') prepopulated_fields = { 'slug': ('name',) } // ... rest of the code ...
c6a161b5c0fa3d76b09b34dfab8f057e8b10bce2
tests/test_extensions.py
tests/test_extensions.py
import unittest class TestExtensions(unittest.TestCase): def test_import_extension(self): import pybel.ext.test assert pybel.ext.test.an_extension_function() == 42 def test_import_extension_2(self): from pybel.ext.test import an_extension_function assert an_extension_function() == 42 def test_import_extension_3(self): from pybel.ext import test assert test.an_extension_function() == 42
import unittest class TestExtensions(unittest.TestCase): def test_import_extension(self): import pybel.ext.test assert pybel.ext.test.an_extension_function() == 42 def test_import_extension_2(self): from pybel.ext.test import an_extension_function assert an_extension_function() == 42 def test_import_extension_3(self): from pybel.ext import test assert test.an_extension_function() == 42 def test_import_extension_4(self): with self.assertRaises(ImportError): from pybel.ext import not_an_extension
Add a test for importing a nonexistent extension
Add a test for importing a nonexistent extension
Python
mit
pybel/pybel,pybel/pybel,pybel/pybel
python
## Code Before: import unittest class TestExtensions(unittest.TestCase): def test_import_extension(self): import pybel.ext.test assert pybel.ext.test.an_extension_function() == 42 def test_import_extension_2(self): from pybel.ext.test import an_extension_function assert an_extension_function() == 42 def test_import_extension_3(self): from pybel.ext import test assert test.an_extension_function() == 42 ## Instruction: Add a test for importing a nonexistent extension ## Code After: import unittest class TestExtensions(unittest.TestCase): def test_import_extension(self): import pybel.ext.test assert pybel.ext.test.an_extension_function() == 42 def test_import_extension_2(self): from pybel.ext.test import an_extension_function assert an_extension_function() == 42 def test_import_extension_3(self): from pybel.ext import test assert test.an_extension_function() == 42 def test_import_extension_4(self): with self.assertRaises(ImportError): from pybel.ext import not_an_extension
... from pybel.ext import test assert test.an_extension_function() == 42 def test_import_extension_4(self): with self.assertRaises(ImportError): from pybel.ext import not_an_extension ...
bf007267246bd317dc3ccad9f5cf8a9f452b3e0b
firecares/utils/__init__.py
firecares/utils/__init__.py
from django.core.files.storage import get_storage_class from storages.backends.s3boto import S3BotoStorage from PIL import Image def convert_png_to_jpg(img): """ Converts a png to a jpg. :param img: Absolute path to the image. :returns: the filename """ im = Image.open(img) bg = Image.new("RGB", im.size, (255, 255, 255)) bg.paste(im, im) filename = img.replace('png', 'jpg') bg.save(filename, quality=85) return filename class CachedS3BotoStorage(S3BotoStorage): """ S3 storage backend that saves the files locally, too. """ def __init__(self, *args, **kwargs): super(CachedS3BotoStorage, self).__init__(*args, **kwargs) self.local_storage = get_storage_class( "compressor.storage.CompressorFileStorage")() def save(self, name, content): name = super(CachedS3BotoStorage, self).save(name, content) self.local_storage._save(name, content) return name def dictfetchall(cursor): """ Returns all rows from a cursor as a dict """ desc = cursor.description return [ dict(zip([col[0] for col in desc], row)) for row in cursor.fetchall() ]
from django.core.files.storage import get_storage_class from storages.backends.s3boto import S3BotoStorage from PIL import Image class CachedS3BotoStorage(S3BotoStorage): """ S3 storage backend that saves the files locally, too. """ def __init__(self, *args, **kwargs): super(CachedS3BotoStorage, self).__init__(*args, **kwargs) self.local_storage = get_storage_class( "compressor.storage.CompressorFileStorage")() def save(self, name, content): name = super(CachedS3BotoStorage, self).save(name, content) self.local_storage._save(name, content) return name def dictfetchall(cursor): """ Returns all rows from a cursor as a dict """ desc = cursor.description return [ dict(zip([col[0] for col in desc], row)) for row in cursor.fetchall() ]
Remove the unused convert_png_to_jpg method.
Remove the unused convert_png_to_jpg method.
Python
mit
FireCARES/firecares,FireCARES/firecares,meilinger/firecares,meilinger/firecares,FireCARES/firecares,meilinger/firecares,HunterConnelly/firecares,HunterConnelly/firecares,FireCARES/firecares,HunterConnelly/firecares,FireCARES/firecares,meilinger/firecares,HunterConnelly/firecares
python
## Code Before: from django.core.files.storage import get_storage_class from storages.backends.s3boto import S3BotoStorage from PIL import Image def convert_png_to_jpg(img): """ Converts a png to a jpg. :param img: Absolute path to the image. :returns: the filename """ im = Image.open(img) bg = Image.new("RGB", im.size, (255, 255, 255)) bg.paste(im, im) filename = img.replace('png', 'jpg') bg.save(filename, quality=85) return filename class CachedS3BotoStorage(S3BotoStorage): """ S3 storage backend that saves the files locally, too. """ def __init__(self, *args, **kwargs): super(CachedS3BotoStorage, self).__init__(*args, **kwargs) self.local_storage = get_storage_class( "compressor.storage.CompressorFileStorage")() def save(self, name, content): name = super(CachedS3BotoStorage, self).save(name, content) self.local_storage._save(name, content) return name def dictfetchall(cursor): """ Returns all rows from a cursor as a dict """ desc = cursor.description return [ dict(zip([col[0] for col in desc], row)) for row in cursor.fetchall() ] ## Instruction: Remove the unused convert_png_to_jpg method. ## Code After: from django.core.files.storage import get_storage_class from storages.backends.s3boto import S3BotoStorage from PIL import Image class CachedS3BotoStorage(S3BotoStorage): """ S3 storage backend that saves the files locally, too. """ def __init__(self, *args, **kwargs): super(CachedS3BotoStorage, self).__init__(*args, **kwargs) self.local_storage = get_storage_class( "compressor.storage.CompressorFileStorage")() def save(self, name, content): name = super(CachedS3BotoStorage, self).save(name, content) self.local_storage._save(name, content) return name def dictfetchall(cursor): """ Returns all rows from a cursor as a dict """ desc = cursor.description return [ dict(zip([col[0] for col in desc], row)) for row in cursor.fetchall() ]
# ... existing code ... from django.core.files.storage import get_storage_class from storages.backends.s3boto import S3BotoStorage from PIL import Image class CachedS3BotoStorage(S3BotoStorage): # ... rest of the code ...
7476a8d3a70bc0d69a4683afc45e0663640701dc
src/main/java/de/flux/playground/deckcompare/dto/Deck.java
src/main/java/de/flux/playground/deckcompare/dto/Deck.java
package de.flux.playground.deckcompare.dto; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import lombok.Data; @Data @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public class Deck { @XmlAttribute(name = "game") private String id; @XmlElement(name = "section") private List<Section> sections; private String notes; }
package de.flux.playground.deckcompare.dto; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import lombok.Data; @Data @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public class Deck { private static final int DEFAULT_SECTIONS = 2; private static final int CARDS_SECTION = 1; @XmlAttribute(name = "game") private String id; @XmlElement(name = "section") private List<Section> sections; private String notes; public List<Card> getCards() { List<Card> cards = new ArrayList<Card>(); if (sections.size() > DEFAULT_SECTIONS) { cards = sections.get(CARDS_SECTION).getCards(); } return cards; } }
Add method to extract playable cards form deck
Add method to extract playable cards form deck Without having to take the detour over the sections
Java
apache-2.0
JLengenfeld/deckcompare,JLengenfeld/deckcompare,JLengenfeld/deckcompare
java
## Code Before: package de.flux.playground.deckcompare.dto; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import lombok.Data; @Data @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public class Deck { @XmlAttribute(name = "game") private String id; @XmlElement(name = "section") private List<Section> sections; private String notes; } ## Instruction: Add method to extract playable cards form deck Without having to take the detour over the sections ## Code After: package de.flux.playground.deckcompare.dto; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import lombok.Data; @Data @XmlRootElement @XmlAccessorType(XmlAccessType.FIELD) public class Deck { private static final int DEFAULT_SECTIONS = 2; private static final int CARDS_SECTION = 1; @XmlAttribute(name = "game") private String id; @XmlElement(name = "section") private List<Section> sections; private String notes; public List<Card> getCards() { List<Card> cards = new ArrayList<Card>(); if (sections.size() > DEFAULT_SECTIONS) { cards = sections.get(CARDS_SECTION).getCards(); } return cards; } }
// ... existing code ... package de.flux.playground.deckcompare.dto; import java.util.ArrayList; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; // ... modified code ... @XmlAccessorType(XmlAccessType.FIELD) public class Deck { private static final int DEFAULT_SECTIONS = 2; private static final int CARDS_SECTION = 1; @XmlAttribute(name = "game") private String id; @XmlElement(name = "section") ... private List<Section> sections; private String notes; public List<Card> getCards() { List<Card> cards = new ArrayList<Card>(); if (sections.size() > DEFAULT_SECTIONS) { cards = sections.get(CARDS_SECTION).getCards(); } return cards; } } // ... rest of the code ...
f943aa57d6ee462146ff0ab2a091c406d009acce
polyaxon/scheduler/spawners/templates/services/default_env_vars.py
polyaxon/scheduler/spawners/templates/services/default_env_vars.py
from django.conf import settings from scheduler.spawners.templates.env_vars import get_from_app_secret def get_service_env_vars(): return [ get_from_app_secret('POLYAXON_SECRET_KEY', 'polyaxon-secret'), get_from_app_secret('POLYAXON_INTERNAL_SECRET_TOKEN', 'polyaxon-internal-secret-token'), get_from_app_secret('POLYAXON_RABBITMQ_PASSWORD', 'rabbitmq-password', settings.POLYAXON_K8S_RABBITMQ_SECRET_NAME) ]
from django.conf import settings from libs.api import API_KEY_NAME, get_settings_api_url from scheduler.spawners.templates.env_vars import get_env_var, get_from_app_secret def get_service_env_vars(): return [ get_from_app_secret('POLYAXON_SECRET_KEY', 'polyaxon-secret'), get_from_app_secret('POLYAXON_INTERNAL_SECRET_TOKEN', 'polyaxon-internal-secret-token'), get_from_app_secret('POLYAXON_RABBITMQ_PASSWORD', 'rabbitmq-password', settings.POLYAXON_K8S_RABBITMQ_SECRET_NAME), get_env_var(name=API_KEY_NAME, value=get_settings_api_url()), ]
Add api url to default env vars
Add api url to default env vars
Python
apache-2.0
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
python
## Code Before: from django.conf import settings from scheduler.spawners.templates.env_vars import get_from_app_secret def get_service_env_vars(): return [ get_from_app_secret('POLYAXON_SECRET_KEY', 'polyaxon-secret'), get_from_app_secret('POLYAXON_INTERNAL_SECRET_TOKEN', 'polyaxon-internal-secret-token'), get_from_app_secret('POLYAXON_RABBITMQ_PASSWORD', 'rabbitmq-password', settings.POLYAXON_K8S_RABBITMQ_SECRET_NAME) ] ## Instruction: Add api url to default env vars ## Code After: from django.conf import settings from libs.api import API_KEY_NAME, get_settings_api_url from scheduler.spawners.templates.env_vars import get_env_var, get_from_app_secret def get_service_env_vars(): return [ get_from_app_secret('POLYAXON_SECRET_KEY', 'polyaxon-secret'), get_from_app_secret('POLYAXON_INTERNAL_SECRET_TOKEN', 'polyaxon-internal-secret-token'), get_from_app_secret('POLYAXON_RABBITMQ_PASSWORD', 'rabbitmq-password', settings.POLYAXON_K8S_RABBITMQ_SECRET_NAME), get_env_var(name=API_KEY_NAME, value=get_settings_api_url()), ]
# ... existing code ... from django.conf import settings from libs.api import API_KEY_NAME, get_settings_api_url from scheduler.spawners.templates.env_vars import get_env_var, get_from_app_secret def get_service_env_vars(): # ... modified code ... get_from_app_secret('POLYAXON_SECRET_KEY', 'polyaxon-secret'), get_from_app_secret('POLYAXON_INTERNAL_SECRET_TOKEN', 'polyaxon-internal-secret-token'), get_from_app_secret('POLYAXON_RABBITMQ_PASSWORD', 'rabbitmq-password', settings.POLYAXON_K8S_RABBITMQ_SECRET_NAME), get_env_var(name=API_KEY_NAME, value=get_settings_api_url()), ] # ... rest of the code ...
1f3730ac4d531ca0d582a8b8bded871acb409847
backend/api-server/warehaus_api/events/models.py
backend/api-server/warehaus_api/events/models.py
from .. import db class Event(db.Model): timestamp = db.Field() obj_id = db.Field() # The object for which this event was created about user_id = db.Field() # The user who performed the action # A list of IDs which are interested in this event. For example, when creating # a server we obviously want this event to be shows in the server page, but we # also want it to be shown in the lab page. So we put two IDs in the list: the # server ID and the lab ID. # Another example is when we delete the server. Then we would be able to show # that event in the lab page although the server is already deleted. interested_ids = db.Field() title = db.Field() # Event title content = db.Field() # Event content def create_event(obj_id, user_id, interested_ids, title, content=''): event = Event( timestamp = db.times.now(), obj_id = obj_id, interested_ids = interested_ids, title = title, content = content, ) event.save()
from .. import db class Event(db.Model): timestamp = db.Field() obj_id = db.Field() # The object for which this event was created about user_id = db.Field() # The user who performed the action # A list of IDs which are interested in this event. For example, when creating # a server we obviously want this event to be shows in the server page, but we # also want it to be shown in the lab page. So we put two IDs in the list: the # server ID and the lab ID. # Another example is when we delete the server. Then we would be able to show # that event in the lab page although the server is already deleted. interested_ids = db.Field() title = db.Field() # Event title content = db.Field() # Event content def create_event(obj_id, user_id, interested_ids, title, content=''): event = Event( timestamp = db.times.now(), obj_id = obj_id, user_id = user_id, interested_ids = interested_ids, title = title, content = content, ) event.save()
Fix api-server events not saving the user ID
Fix api-server events not saving the user ID
Python
agpl-3.0
labsome/labsome,warehaus/warehaus,warehaus/warehaus,labsome/labsome,warehaus/warehaus,labsome/labsome
python
## Code Before: from .. import db class Event(db.Model): timestamp = db.Field() obj_id = db.Field() # The object for which this event was created about user_id = db.Field() # The user who performed the action # A list of IDs which are interested in this event. For example, when creating # a server we obviously want this event to be shows in the server page, but we # also want it to be shown in the lab page. So we put two IDs in the list: the # server ID and the lab ID. # Another example is when we delete the server. Then we would be able to show # that event in the lab page although the server is already deleted. interested_ids = db.Field() title = db.Field() # Event title content = db.Field() # Event content def create_event(obj_id, user_id, interested_ids, title, content=''): event = Event( timestamp = db.times.now(), obj_id = obj_id, interested_ids = interested_ids, title = title, content = content, ) event.save() ## Instruction: Fix api-server events not saving the user ID ## Code After: from .. import db class Event(db.Model): timestamp = db.Field() obj_id = db.Field() # The object for which this event was created about user_id = db.Field() # The user who performed the action # A list of IDs which are interested in this event. For example, when creating # a server we obviously want this event to be shows in the server page, but we # also want it to be shown in the lab page. So we put two IDs in the list: the # server ID and the lab ID. # Another example is when we delete the server. Then we would be able to show # that event in the lab page although the server is already deleted. interested_ids = db.Field() title = db.Field() # Event title content = db.Field() # Event content def create_event(obj_id, user_id, interested_ids, title, content=''): event = Event( timestamp = db.times.now(), obj_id = obj_id, user_id = user_id, interested_ids = interested_ids, title = title, content = content, ) event.save()
# ... existing code ... event = Event( timestamp = db.times.now(), obj_id = obj_id, user_id = user_id, interested_ids = interested_ids, title = title, content = content, # ... rest of the code ...
6108ec5c8790a2cf5244b1c8e707474cd142c05d
src/main/java/org/hildan/fx/components/ValidatingTextFieldListCell.java
src/main/java/org/hildan/fx/components/ValidatingTextFieldListCell.java
package org.hildan.fx.components; import java.util.function.Predicate; import javafx.scene.control.cell.TextFieldListCell; import javafx.util.StringConverter; import org.hildan.fxlog.themes.Css; public class ValidatingTextFieldListCell<T> extends TextFieldListCell<T> { private final Predicate<String> validator; public ValidatingTextFieldListCell(StringConverter<T> converter, Predicate<String> validator) { super(converter); this.validator = validator; } public void commitEdit(T item) { if (!isEditing()) { return; } boolean itemIsValid = validator.test(getText()); pseudoClassStateChanged(Css.INVALID, !itemIsValid); if (itemIsValid) { // only commit if the item is valid, otherwise we stay in edit state super.commitEdit(item); } } }
package org.hildan.fx.components; import java.util.function.Predicate; import javafx.scene.control.*; import javafx.scene.control.cell.TextFieldListCell; import javafx.util.StringConverter; import org.hildan.fxlog.themes.Css; public class ValidatingTextFieldListCell<T> extends TextFieldListCell<T> { private final Predicate<String> validator; public ValidatingTextFieldListCell(StringConverter<T> converter, Predicate<String> validator) { super(converter); this.validator = validator; } public void commitEdit(T item) { if (!isEditing()) { return; } // the edited text is not in getText() but in the TextField used as Graphic for this cell TextField textField = (TextField) getGraphic(); String editedText = textField.getText(); boolean itemIsValid = validator.test(editedText); pseudoClassStateChanged(Css.INVALID, !itemIsValid); if (itemIsValid) { // only commit if the item is valid, otherwise we stay in edit state super.commitEdit(item); } } }
Fix ValidatedTextFieldListCell to use the actual edited text
Fix ValidatedTextFieldListCell to use the actual edited text
Java
mit
joffrey-bion/fx-log
java
## Code Before: package org.hildan.fx.components; import java.util.function.Predicate; import javafx.scene.control.cell.TextFieldListCell; import javafx.util.StringConverter; import org.hildan.fxlog.themes.Css; public class ValidatingTextFieldListCell<T> extends TextFieldListCell<T> { private final Predicate<String> validator; public ValidatingTextFieldListCell(StringConverter<T> converter, Predicate<String> validator) { super(converter); this.validator = validator; } public void commitEdit(T item) { if (!isEditing()) { return; } boolean itemIsValid = validator.test(getText()); pseudoClassStateChanged(Css.INVALID, !itemIsValid); if (itemIsValid) { // only commit if the item is valid, otherwise we stay in edit state super.commitEdit(item); } } } ## Instruction: Fix ValidatedTextFieldListCell to use the actual edited text ## Code After: package org.hildan.fx.components; import java.util.function.Predicate; import javafx.scene.control.*; import javafx.scene.control.cell.TextFieldListCell; import javafx.util.StringConverter; import org.hildan.fxlog.themes.Css; public class ValidatingTextFieldListCell<T> extends TextFieldListCell<T> { private final Predicate<String> validator; public ValidatingTextFieldListCell(StringConverter<T> converter, Predicate<String> validator) { super(converter); this.validator = validator; } public void commitEdit(T item) { if (!isEditing()) { return; } // the edited text is not in getText() but in the TextField used as Graphic for this cell TextField textField = (TextField) getGraphic(); String editedText = textField.getText(); boolean itemIsValid = validator.test(editedText); pseudoClassStateChanged(Css.INVALID, !itemIsValid); if (itemIsValid) { // only commit if the item is valid, otherwise we stay in edit state super.commitEdit(item); } } }
// ... existing code ... import java.util.function.Predicate; import javafx.scene.control.*; import javafx.scene.control.cell.TextFieldListCell; import javafx.util.StringConverter; // ... modified code ... if (!isEditing()) { return; } // the edited text is not in getText() but in the TextField used as Graphic for this cell TextField textField = (TextField) getGraphic(); String editedText = textField.getText(); boolean itemIsValid = validator.test(editedText); pseudoClassStateChanged(Css.INVALID, !itemIsValid); if (itemIsValid) { // only commit if the item is valid, otherwise we stay in edit state // ... rest of the code ...
234df393c438fdf729dc050d20084e1fe1a4c2ee
backend/mcapi/mcdir.py
backend/mcapi/mcdir.py
import utils from os import environ import os.path MCDIR = environ.get("MCDIR") or '/mcfs/data' def for_uid(uidstr): pieces = uidstr.split('-') path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4]) utils.mkdirp(path) return path
import utils from os import environ import os.path MCDIR = environ.get("MCDIR") or '/mcfs/data/materialscommons' def for_uid(uidstr): pieces = uidstr.split('-') path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4]) utils.mkdirp(path) return path
Change directory where data is written to.
Change directory where data is written to.
Python
mit
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
python
## Code Before: import utils from os import environ import os.path MCDIR = environ.get("MCDIR") or '/mcfs/data' def for_uid(uidstr): pieces = uidstr.split('-') path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4]) utils.mkdirp(path) return path ## Instruction: Change directory where data is written to. ## Code After: import utils from os import environ import os.path MCDIR = environ.get("MCDIR") or '/mcfs/data/materialscommons' def for_uid(uidstr): pieces = uidstr.split('-') path = os.path.join(MCDIR, pieces[1][0:2], pieces[1][2:4]) utils.mkdirp(path) return path
// ... existing code ... import os.path MCDIR = environ.get("MCDIR") or '/mcfs/data/materialscommons' def for_uid(uidstr): // ... rest of the code ...
6f464e422befe22e56bb759a7ac7ff52a353c6d9
accountant/functional_tests/test_layout_and_styling.py
accountant/functional_tests/test_layout_and_styling.py
import unittest from .base import FunctionalTestCase from .pages import game class StylesheetTests(FunctionalTestCase): def test_color_css_loaded(self): self.story('Create a game') self.browser.get(self.live_server_url) page = game.Homepage(self.browser) page.start_button.click() self.assertTrue(any('css/color.css' in s.get_attribute('href') for s in page.stylesheets)) def test_main_stylesheet_loaded(self): self.story('Load the start page') self.browser.get(self.live_server_url) page = game.Homepage(self.browser) self.assertTrue(any('css/main.css' in s.get_attribute('href') for s in page.stylesheets))
import unittest from .base import FunctionalTestCase from .pages import game class StylesheetTests(FunctionalTestCase): def test_color_css_loaded(self): self.story('Create a game') self.browser.get(self.live_server_url) page = game.Homepage(self.browser) page.start_button.click() self.assertTrue(any('css/color.css' in s.get_attribute('href') for s in page.stylesheets)) def test_main_stylesheet_loaded(self): self.story('Load the start page') self.browser.get(self.live_server_url) page = game.Homepage(self.browser) self.assertTrue(any('css/main.css' in s.get_attribute('href') for s in page.stylesheets)) # Test constant to see if css actually gets loaded self.assertEqual('rgb(55, 71, 79)', page.bank_cash.value_of_css_property('border-color'))
Test is loaded CSS is applied
Test is loaded CSS is applied
Python
mit
XeryusTC/18xx-accountant,XeryusTC/18xx-accountant,XeryusTC/18xx-accountant,XeryusTC/18xx-accountant,XeryusTC/18xx-accountant
python
## Code Before: import unittest from .base import FunctionalTestCase from .pages import game class StylesheetTests(FunctionalTestCase): def test_color_css_loaded(self): self.story('Create a game') self.browser.get(self.live_server_url) page = game.Homepage(self.browser) page.start_button.click() self.assertTrue(any('css/color.css' in s.get_attribute('href') for s in page.stylesheets)) def test_main_stylesheet_loaded(self): self.story('Load the start page') self.browser.get(self.live_server_url) page = game.Homepage(self.browser) self.assertTrue(any('css/main.css' in s.get_attribute('href') for s in page.stylesheets)) ## Instruction: Test is loaded CSS is applied ## Code After: import unittest from .base import FunctionalTestCase from .pages import game class StylesheetTests(FunctionalTestCase): def test_color_css_loaded(self): self.story('Create a game') self.browser.get(self.live_server_url) page = game.Homepage(self.browser) page.start_button.click() self.assertTrue(any('css/color.css' in s.get_attribute('href') for s in page.stylesheets)) def test_main_stylesheet_loaded(self): self.story('Load the start page') self.browser.get(self.live_server_url) page = game.Homepage(self.browser) self.assertTrue(any('css/main.css' in s.get_attribute('href') for s in page.stylesheets)) # Test constant to see if css actually gets loaded self.assertEqual('rgb(55, 71, 79)', page.bank_cash.value_of_css_property('border-color'))
// ... existing code ... self.assertTrue(any('css/main.css' in s.get_attribute('href') for s in page.stylesheets)) # Test constant to see if css actually gets loaded self.assertEqual('rgb(55, 71, 79)', page.bank_cash.value_of_css_property('border-color')) // ... rest of the code ...
a077a5b7731e7d609b5c3adc8f8176ad79053f17
rmake/lib/twisted_extras/tools.py
rmake/lib/twisted_extras/tools.py
from twisted.internet import defer class Serializer(object): def __init__(self): self._lock = defer.DeferredLock() self._waiting = {} def call(self, func, args=(), kwargs=None, collapsible=False): d = self._lock.acquire() self._waiting[d] = collapsible if not kwargs: kwargs = {} @d.addCallback def _locked(_): if collapsible and len(self._waiting) > 1: # Superseded return return func(*args, **kwargs) @d.addBoth def _unlock(result): self._lock.release() del self._waiting[d] return result return d
from twisted.internet import defer class Serializer(object): def __init__(self): self._lock = defer.DeferredLock() self._waiting = {} def call(self, func, args=(), kwargs=None, collapsible=False): d = self._lock.acquire() self._waiting[d] = collapsible if not kwargs: kwargs = {} @d.addCallback def _locked(_): if collapsible and len(self._waiting) > 1: # Superseded return return func(*args, **kwargs) @d.addBoth def _unlock(result): del self._waiting[d] self._lock.release() return result return d
Fix Serializer locking bug that caused it to skip calls it should have made
Fix Serializer locking bug that caused it to skip calls it should have made
Python
apache-2.0
sassoftware/rmake3,sassoftware/rmake3,sassoftware/rmake3
python
## Code Before: from twisted.internet import defer class Serializer(object): def __init__(self): self._lock = defer.DeferredLock() self._waiting = {} def call(self, func, args=(), kwargs=None, collapsible=False): d = self._lock.acquire() self._waiting[d] = collapsible if not kwargs: kwargs = {} @d.addCallback def _locked(_): if collapsible and len(self._waiting) > 1: # Superseded return return func(*args, **kwargs) @d.addBoth def _unlock(result): self._lock.release() del self._waiting[d] return result return d ## Instruction: Fix Serializer locking bug that caused it to skip calls it should have made ## Code After: from twisted.internet import defer class Serializer(object): def __init__(self): self._lock = defer.DeferredLock() self._waiting = {} def call(self, func, args=(), kwargs=None, collapsible=False): d = self._lock.acquire() self._waiting[d] = collapsible if not kwargs: kwargs = {} @d.addCallback def _locked(_): if collapsible and len(self._waiting) > 1: # Superseded return return func(*args, **kwargs) @d.addBoth def _unlock(result): del self._waiting[d] self._lock.release() return result return d
// ... existing code ... return func(*args, **kwargs) @d.addBoth def _unlock(result): del self._waiting[d] self._lock.release() return result return d // ... rest of the code ...
d45391429f01d5d4ea22e28bef39a2bb419df04f
djangae/apps.py
djangae/apps.py
from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ImproperlyConfigured class DjangaeConfig(AppConfig): name = 'djangae' verbose_name = _("Djangae") def ready(self): from djangae.db.backends.appengine.caching import reset_context from django.core.signals import request_finished, request_started request_finished.connect(reset_context, dispatch_uid="request_finished_context_reset") request_started.connect(reset_context, dispatch_uid="request_started_context_reset") from django.conf import settings if 'django.contrib.contenttypes' in settings.INSTALLED_APPS and ( not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS): raise ImproperlyConfigured( "If you're using django.contrib.contenttypes, then you need " "to add djangae.contrib.contenttypes to INSTALLED_APPS after " "django.contrib.contenttypes." )
from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ImproperlyConfigured class DjangaeConfig(AppConfig): name = 'djangae' verbose_name = _("Djangae") def ready(self): from djangae.db.backends.appengine.caching import reset_context from django.core.signals import request_finished, request_started request_finished.connect(reset_context, dispatch_uid="request_finished_context_reset") request_started.connect(reset_context, dispatch_uid="request_started_context_reset") from django.conf import settings contenttype_configuration_error = ImproperlyConfigured( "If you're using django.contrib.contenttypes, then you need " "to add djangae.contrib.contenttypes to INSTALLED_APPS after " "django.contrib.contenttypes." ) if 'django.contrib.contenttypes' in settings.INSTALLED_APPS: if not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS: # Raise error if User is using Django CT, but not Djangae raise contenttype_configuration_error else: if settings.INSTALLED_APPS.index('django.contrib.contenttypes') > \ settings.INSTALLED_APPS.index('djangae.contrib.contenttypes'): # Raise error if User is using both Django and Djangae CT, but # Django CT comes after Djangae CT raise contenttype_configuration_error
Raise configuration error if django.contrib.contenttypes comes after djangae.contrib.contenttypes
Raise configuration error if django.contrib.contenttypes comes after djangae.contrib.contenttypes
Python
bsd-3-clause
potatolondon/djangae,grzes/djangae,kirberich/djangae,kirberich/djangae,kirberich/djangae,grzes/djangae,potatolondon/djangae,grzes/djangae
python
## Code Before: from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ImproperlyConfigured class DjangaeConfig(AppConfig): name = 'djangae' verbose_name = _("Djangae") def ready(self): from djangae.db.backends.appengine.caching import reset_context from django.core.signals import request_finished, request_started request_finished.connect(reset_context, dispatch_uid="request_finished_context_reset") request_started.connect(reset_context, dispatch_uid="request_started_context_reset") from django.conf import settings if 'django.contrib.contenttypes' in settings.INSTALLED_APPS and ( not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS): raise ImproperlyConfigured( "If you're using django.contrib.contenttypes, then you need " "to add djangae.contrib.contenttypes to INSTALLED_APPS after " "django.contrib.contenttypes." ) ## Instruction: Raise configuration error if django.contrib.contenttypes comes after djangae.contrib.contenttypes ## Code After: from django.apps import AppConfig from django.utils.translation import ugettext_lazy as _ from django.core.exceptions import ImproperlyConfigured class DjangaeConfig(AppConfig): name = 'djangae' verbose_name = _("Djangae") def ready(self): from djangae.db.backends.appengine.caching import reset_context from django.core.signals import request_finished, request_started request_finished.connect(reset_context, dispatch_uid="request_finished_context_reset") request_started.connect(reset_context, dispatch_uid="request_started_context_reset") from django.conf import settings contenttype_configuration_error = ImproperlyConfigured( "If you're using django.contrib.contenttypes, then you need " "to add djangae.contrib.contenttypes to INSTALLED_APPS after " "django.contrib.contenttypes." ) if 'django.contrib.contenttypes' in settings.INSTALLED_APPS: if not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS: # Raise error if User is using Django CT, but not Djangae raise contenttype_configuration_error else: if settings.INSTALLED_APPS.index('django.contrib.contenttypes') > \ settings.INSTALLED_APPS.index('djangae.contrib.contenttypes'): # Raise error if User is using both Django and Djangae CT, but # Django CT comes after Djangae CT raise contenttype_configuration_error
# ... existing code ... request_started.connect(reset_context, dispatch_uid="request_started_context_reset") from django.conf import settings contenttype_configuration_error = ImproperlyConfigured( "If you're using django.contrib.contenttypes, then you need " "to add djangae.contrib.contenttypes to INSTALLED_APPS after " "django.contrib.contenttypes." ) if 'django.contrib.contenttypes' in settings.INSTALLED_APPS: if not 'djangae.contrib.contenttypes' in settings.INSTALLED_APPS: # Raise error if User is using Django CT, but not Djangae raise contenttype_configuration_error else: if settings.INSTALLED_APPS.index('django.contrib.contenttypes') > \ settings.INSTALLED_APPS.index('djangae.contrib.contenttypes'): # Raise error if User is using both Django and Djangae CT, but # Django CT comes after Djangae CT raise contenttype_configuration_error # ... rest of the code ...
99496d97f3e00284840d2127556bba0e21d1a99e
frappe/tests/test_commands.py
frappe/tests/test_commands.py
from __future__ import unicode_literals import shlex import subprocess import unittest import frappe def clean(value): if isinstance(value, (bytes, str)): value = value.decode().strip() return value class BaseTestCommands: def execute(self, command): command = command.format(**{"site": frappe.local.site}) command = shlex.split(command) self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.stdout = clean(self._proc.stdout) self.stderr = clean(self._proc.stderr) self.returncode = clean(self._proc.returncode)
from __future__ import unicode_literals import shlex import subprocess import unittest import frappe def clean(value): if isinstance(value, (bytes, str)): value = value.decode().strip() return value class BaseTestCommands: def execute(self, command): command = command.format(**{"site": frappe.local.site}) command = shlex.split(command) self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.stdout = clean(self._proc.stdout) self.stderr = clean(self._proc.stderr) self.returncode = clean(self._proc.returncode) class TestCommands(BaseTestCommands, unittest.TestCase): def test_execute(self): # execute a command expecting a numeric output self.execute("bench --site {site} execute frappe.db.get_database_size") self.assertEquals(self.returncode, 0) self.assertIsInstance(float(self.stdout), float) # execute a command expecting an errored output as local won't exist self.execute("bench --site {site} execute frappe.local.site") self.assertEquals(self.returncode, 1) self.assertIsNotNone(self.stderr) # execute a command with kwargs self.execute("""bench --site {site} execute frappe.bold --kwargs '{{"text": "DocType"}}'""") self.assertEquals(self.returncode, 0) self.assertEquals(self.stdout[1:-1], frappe.bold(text='DocType'))
Add tests for bench execute
test: Add tests for bench execute
Python
mit
saurabh6790/frappe,StrellaGroup/frappe,adityahase/frappe,mhbu50/frappe,adityahase/frappe,yashodhank/frappe,mhbu50/frappe,yashodhank/frappe,mhbu50/frappe,mhbu50/frappe,StrellaGroup/frappe,saurabh6790/frappe,yashodhank/frappe,frappe/frappe,almeidapaulopt/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,adityahase/frappe,frappe/frappe,saurabh6790/frappe,almeidapaulopt/frappe,yashodhank/frappe,adityahase/frappe,saurabh6790/frappe,almeidapaulopt/frappe,frappe/frappe
python
## Code Before: from __future__ import unicode_literals import shlex import subprocess import unittest import frappe def clean(value): if isinstance(value, (bytes, str)): value = value.decode().strip() return value class BaseTestCommands: def execute(self, command): command = command.format(**{"site": frappe.local.site}) command = shlex.split(command) self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.stdout = clean(self._proc.stdout) self.stderr = clean(self._proc.stderr) self.returncode = clean(self._proc.returncode) ## Instruction: test: Add tests for bench execute ## Code After: from __future__ import unicode_literals import shlex import subprocess import unittest import frappe def clean(value): if isinstance(value, (bytes, str)): value = value.decode().strip() return value class BaseTestCommands: def execute(self, command): command = command.format(**{"site": frappe.local.site}) command = shlex.split(command) self._proc = subprocess.run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.stdout = clean(self._proc.stdout) self.stderr = clean(self._proc.stderr) self.returncode = clean(self._proc.returncode) class TestCommands(BaseTestCommands, unittest.TestCase): def test_execute(self): # execute a command expecting a numeric output self.execute("bench --site {site} execute frappe.db.get_database_size") self.assertEquals(self.returncode, 0) self.assertIsInstance(float(self.stdout), float) # execute a command expecting an errored output as local won't exist self.execute("bench --site {site} execute frappe.local.site") self.assertEquals(self.returncode, 1) self.assertIsNotNone(self.stderr) # execute a command with kwargs self.execute("""bench --site {site} execute frappe.bold --kwargs '{{"text": "DocType"}}'""") self.assertEquals(self.returncode, 0) self.assertEquals(self.stdout[1:-1], frappe.bold(text='DocType'))
# ... existing code ... self.stdout = clean(self._proc.stdout) self.stderr = clean(self._proc.stderr) self.returncode = clean(self._proc.returncode) class TestCommands(BaseTestCommands, unittest.TestCase): def test_execute(self): # execute a command expecting a numeric output self.execute("bench --site {site} execute frappe.db.get_database_size") self.assertEquals(self.returncode, 0) self.assertIsInstance(float(self.stdout), float) # execute a command expecting an errored output as local won't exist self.execute("bench --site {site} execute frappe.local.site") self.assertEquals(self.returncode, 1) self.assertIsNotNone(self.stderr) # execute a command with kwargs self.execute("""bench --site {site} execute frappe.bold --kwargs '{{"text": "DocType"}}'""") self.assertEquals(self.returncode, 0) self.assertEquals(self.stdout[1:-1], frappe.bold(text='DocType')) # ... rest of the code ...
3db3ed080c6a372188f3a7366b7bb001c3829b4f
src/main/java/persistence/util/TransactionUtil.java
src/main/java/persistence/util/TransactionUtil.java
package persistence.util; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.EntityTransaction; public class TransactionUtil { public static void doTransaction(EntityManagerFactory emf, Transaction t) { EntityManager em = emf.createEntityManager(); EMInjectorConstraintValidatorFactory.setThreadLocalEntityManager(em); try{ doTransaction(em, t); }finally { em.close(); } } public static void doTransaction(EntityManager em, Transaction t) { EntityTransaction tx = null; try{ tx = em.getTransaction(); tx.begin(); t.doTransation(em); tx.commit(); } finally { if (tx != null && tx.isActive()) { tx.rollback(); } } } }
package persistence.util; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.EntityTransaction; public class TransactionUtil { public static void doTransaction(EntityManagerFactory emf, Transaction t) { EntityManager em = emf.createEntityManager(); EMInjectorConstraintValidatorFactory.setThreadLocalEntityManager(em); try{ doTransaction(em, t); }finally { em.close(); } } public static void doTransaction(EntityManager em, Transaction t) { EntityTransaction tx = null; EMInjectorConstraintValidatorFactory.setThreadLocalEntityManager(em); try{ tx = em.getTransaction(); tx.begin(); t.doTransation(em); tx.commit(); } finally { if (tx != null && tx.isActive()) { tx.rollback(); } } } }
Set thread local the entity manager in validatorfactory also in doTransaction with already created EntityManagers
Set thread local the entity manager in validatorfactory also in doTransaction with already created EntityManagers
Java
apache-2.0
lipido/si1415_persistence
java
## Code Before: package persistence.util; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.EntityTransaction; public class TransactionUtil { public static void doTransaction(EntityManagerFactory emf, Transaction t) { EntityManager em = emf.createEntityManager(); EMInjectorConstraintValidatorFactory.setThreadLocalEntityManager(em); try{ doTransaction(em, t); }finally { em.close(); } } public static void doTransaction(EntityManager em, Transaction t) { EntityTransaction tx = null; try{ tx = em.getTransaction(); tx.begin(); t.doTransation(em); tx.commit(); } finally { if (tx != null && tx.isActive()) { tx.rollback(); } } } } ## Instruction: Set thread local the entity manager in validatorfactory also in doTransaction with already created EntityManagers ## Code After: package persistence.util; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.EntityTransaction; public class TransactionUtil { public static void doTransaction(EntityManagerFactory emf, Transaction t) { EntityManager em = emf.createEntityManager(); EMInjectorConstraintValidatorFactory.setThreadLocalEntityManager(em); try{ doTransaction(em, t); }finally { em.close(); } } public static void doTransaction(EntityManager em, Transaction t) { EntityTransaction tx = null; EMInjectorConstraintValidatorFactory.setThreadLocalEntityManager(em); try{ tx = em.getTransaction(); tx.begin(); t.doTransation(em); tx.commit(); } finally { if (tx != null && tx.isActive()) { tx.rollback(); } } } }
// ... existing code ... public static void doTransaction(EntityManager em, Transaction t) { EntityTransaction tx = null; EMInjectorConstraintValidatorFactory.setThreadLocalEntityManager(em); try{ tx = em.getTransaction(); tx.begin(); // ... rest of the code ...
1768207c57b66812931d2586c5544c9b74446918
peering/management/commands/update_peering_session_states.py
peering/management/commands/update_peering_session_states.py
import logging from django.core.management.base import BaseCommand from peering.models import InternetExchange class Command(BaseCommand): help = "Update peering session states for Internet Exchanges." logger = logging.getLogger("peering.manager.peering") def handle(self, *args, **options): self.logger.info("Updating peering session states...") internet_exchanges = InternetExchange.objects.all() for internet_exchange in internet_exchanges: internet_exchange.update_peering_session_states()
import logging from django.core.management.base import BaseCommand from peering.models import InternetExchange class Command(BaseCommand): help = "Update peering session states for Internet Exchanges." logger = logging.getLogger("peering.manager.peering") def handle(self, *args, **options): self.logger.info("Updating peering session states...") internet_exchanges = InternetExchange.objects.all() for internet_exchange in internet_exchanges: internet_exchange.poll_peering_sessions()
Fix command polling sessions for IX.
Fix command polling sessions for IX.
Python
apache-2.0
respawner/peering-manager,respawner/peering-manager,respawner/peering-manager,respawner/peering-manager
python
## Code Before: import logging from django.core.management.base import BaseCommand from peering.models import InternetExchange class Command(BaseCommand): help = "Update peering session states for Internet Exchanges." logger = logging.getLogger("peering.manager.peering") def handle(self, *args, **options): self.logger.info("Updating peering session states...") internet_exchanges = InternetExchange.objects.all() for internet_exchange in internet_exchanges: internet_exchange.update_peering_session_states() ## Instruction: Fix command polling sessions for IX. ## Code After: import logging from django.core.management.base import BaseCommand from peering.models import InternetExchange class Command(BaseCommand): help = "Update peering session states for Internet Exchanges." logger = logging.getLogger("peering.manager.peering") def handle(self, *args, **options): self.logger.info("Updating peering session states...") internet_exchanges = InternetExchange.objects.all() for internet_exchange in internet_exchanges: internet_exchange.poll_peering_sessions()
# ... existing code ... internet_exchanges = InternetExchange.objects.all() for internet_exchange in internet_exchanges: internet_exchange.poll_peering_sessions() # ... rest of the code ...
46231cf5938f090521f7d65c881f0df8b6e34511
app/src/main/java/mozilla/org/webmaker/activity/Tinker.java
app/src/main/java/mozilla/org/webmaker/activity/Tinker.java
package mozilla.org.webmaker.activity; import android.app.ActionBar; import android.graphics.Color; import android.graphics.drawable.ColorDrawable; import android.os.Bundle; import android.view.Menu; import android.view.Window; import android.view.WindowManager; import android.widget.RelativeLayout; import mozilla.org.webmaker.R; import mozilla.org.webmaker.WebmakerActivity; import mozilla.org.webmaker.view.WebmakerWebView; public class Tinker extends WebmakerActivity { public Tinker() { super("tinker", R.id.tinker_layout, R.layout.tinker_layout, R.menu.menu_tinker); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Custom styles ActionBar actionBar = getActionBar(); ColorDrawable colorOne = new ColorDrawable(Color.parseColor("#ff303250")); ColorDrawable colorTwo = new ColorDrawable(Color.parseColor("#ff303250")); actionBar.setStackedBackgroundDrawable(colorOne); actionBar.setBackgroundDrawable(colorTwo); Window window = getWindow(); window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS); window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS); window.setStatusBarColor(0xff282733); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_main, menu); return true; } }
package mozilla.org.webmaker.activity; import android.app.ActionBar; import android.content.res.Resources; import android.graphics.Color; import android.graphics.drawable.ColorDrawable; import android.os.Bundle; import android.view.Menu; import android.view.Window; import android.view.WindowManager; import android.widget.RelativeLayout; import mozilla.org.webmaker.R; import mozilla.org.webmaker.WebmakerActivity; import mozilla.org.webmaker.view.WebmakerWebView; public class Tinker extends WebmakerActivity { public Tinker() { super("tinker", R.id.tinker_layout, R.layout.tinker_layout, R.menu.menu_tinker); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Custom styles Resources res = getResources(); int shadowPlum = res.getColor(R.color.shadow_plum); int plum = res.getColor(R.color.plum); ActionBar actionBar = getActionBar(); actionBar.setStackedBackgroundDrawable(plum); actionBar.setBackgroundDrawable(plum); Window window = getWindow(); window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS); window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS); window.setStatusBarColor(shadowPlum); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_main, menu); return true; } }
Use plum and shadow_plum from colors.xml
Use plum and shadow_plum from colors.xml
Java
mpl-2.0
adamlofting/webmaker-android,k88hudson/webmaker-android,alicoding/webmaker-android,alanmoo/webmaker-android,alicoding/webmaker-android,bolaram/webmaker-android,alanmoo/webmaker-android,j796160836/webmaker-android,gvn/webmaker-android,rodmoreno/webmaker-android,j796160836/webmaker-android,rodmoreno/webmaker-android,mozilla/webmaker-android,gvn/webmaker-android,bolaram/webmaker-android,k88hudson/webmaker-android,mozilla/webmaker-android
java
## Code Before: package mozilla.org.webmaker.activity; import android.app.ActionBar; import android.graphics.Color; import android.graphics.drawable.ColorDrawable; import android.os.Bundle; import android.view.Menu; import android.view.Window; import android.view.WindowManager; import android.widget.RelativeLayout; import mozilla.org.webmaker.R; import mozilla.org.webmaker.WebmakerActivity; import mozilla.org.webmaker.view.WebmakerWebView; public class Tinker extends WebmakerActivity { public Tinker() { super("tinker", R.id.tinker_layout, R.layout.tinker_layout, R.menu.menu_tinker); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Custom styles ActionBar actionBar = getActionBar(); ColorDrawable colorOne = new ColorDrawable(Color.parseColor("#ff303250")); ColorDrawable colorTwo = new ColorDrawable(Color.parseColor("#ff303250")); actionBar.setStackedBackgroundDrawable(colorOne); actionBar.setBackgroundDrawable(colorTwo); Window window = getWindow(); window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS); window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS); window.setStatusBarColor(0xff282733); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_main, menu); return true; } } ## Instruction: Use plum and shadow_plum from colors.xml ## Code After: package mozilla.org.webmaker.activity; import android.app.ActionBar; import android.content.res.Resources; import android.graphics.Color; import android.graphics.drawable.ColorDrawable; import android.os.Bundle; import android.view.Menu; import android.view.Window; import android.view.WindowManager; import android.widget.RelativeLayout; import mozilla.org.webmaker.R; import mozilla.org.webmaker.WebmakerActivity; import mozilla.org.webmaker.view.WebmakerWebView; public class Tinker extends WebmakerActivity { public Tinker() { super("tinker", R.id.tinker_layout, R.layout.tinker_layout, R.menu.menu_tinker); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Custom styles Resources res = getResources(); int shadowPlum = res.getColor(R.color.shadow_plum); int plum = res.getColor(R.color.plum); ActionBar actionBar = getActionBar(); actionBar.setStackedBackgroundDrawable(plum); actionBar.setBackgroundDrawable(plum); Window window = getWindow(); window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS); window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS); window.setStatusBarColor(shadowPlum); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_main, menu); return true; } }
// ... existing code ... package mozilla.org.webmaker.activity; import android.app.ActionBar; import android.content.res.Resources; import android.graphics.Color; import android.graphics.drawable.ColorDrawable; import android.os.Bundle; // ... modified code ... super.onCreate(savedInstanceState); // Custom styles Resources res = getResources(); int shadowPlum = res.getColor(R.color.shadow_plum); int plum = res.getColor(R.color.plum); ActionBar actionBar = getActionBar(); actionBar.setStackedBackgroundDrawable(plum); actionBar.setBackgroundDrawable(plum); Window window = getWindow(); window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS); window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS); window.setStatusBarColor(shadowPlum); } @Override // ... rest of the code ...
09c3c511687de8888180577fa66f4ca51f4bc237
taggit_autosuggest_select2/views.py
taggit_autosuggest_select2/views.py
from django.conf import settings from django.http import HttpResponse from django.utils import simplejson as json from taggit.models import Tag MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20) def list_tags(request): """ Returns a list of JSON objects with a `name` and a `value` property that all start like your query string `q` (not case sensitive). """ query = request.GET.get('q', '') limit = request.GET.get('limit', MAX_SUGGESTIONS) try: request.GET.get('limit', MAX_SUGGESTIONS) limit = min(int(limit), MAX_SUGGESTIONS) # max or less except ValueError: limit = MAX_SUGGESTIONS tag_name_qs = Tag.objects.filter(name__istartswith=query).\ values_list('name', flat=True) data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]] return HttpResponse(json.dumps(data), mimetype='application/json') def list_all_tags(request): """Returns all the tags in the database""" all_tags = Tag.objects.all().values_list('name', flat=True) return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
from django.conf import settings from django.http import HttpResponse import json from taggit.models import Tag MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20) def list_tags(request): """ Returns a list of JSON objects with a `name` and a `value` property that all start like your query string `q` (not case sensitive). """ query = request.GET.get('q', '') limit = request.GET.get('limit', MAX_SUGGESTIONS) try: request.GET.get('limit', MAX_SUGGESTIONS) limit = min(int(limit), MAX_SUGGESTIONS) # max or less except ValueError: limit = MAX_SUGGESTIONS tag_name_qs = Tag.objects.filter(name__istartswith=query).\ values_list('name', flat=True) data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]] return HttpResponse(json.dumps(data), mimetype='application/json') def list_all_tags(request): """Returns all the tags in the database""" all_tags = Tag.objects.all().values_list('name', flat=True) return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
Remove deprecated django json shim
Remove deprecated django json shim
Python
mit
iris-edu/django-taggit-autosuggest-select2,iris-edu-int/django-taggit-autosuggest-select2,adam-iris/django-taggit-autosuggest-select2,adam-iris/django-taggit-autosuggest-select2,iris-edu/django-taggit-autosuggest-select2,iris-edu-int/django-taggit-autosuggest-select2,iris-edu-int/django-taggit-autosuggest-select2,iris-edu/django-taggit-autosuggest-select2,adam-iris/django-taggit-autosuggest-select2
python
## Code Before: from django.conf import settings from django.http import HttpResponse from django.utils import simplejson as json from taggit.models import Tag MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20) def list_tags(request): """ Returns a list of JSON objects with a `name` and a `value` property that all start like your query string `q` (not case sensitive). """ query = request.GET.get('q', '') limit = request.GET.get('limit', MAX_SUGGESTIONS) try: request.GET.get('limit', MAX_SUGGESTIONS) limit = min(int(limit), MAX_SUGGESTIONS) # max or less except ValueError: limit = MAX_SUGGESTIONS tag_name_qs = Tag.objects.filter(name__istartswith=query).\ values_list('name', flat=True) data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]] return HttpResponse(json.dumps(data), mimetype='application/json') def list_all_tags(request): """Returns all the tags in the database""" all_tags = Tag.objects.all().values_list('name', flat=True) return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json') ## Instruction: Remove deprecated django json shim ## Code After: from django.conf import settings from django.http import HttpResponse import json from taggit.models import Tag MAX_SUGGESTIONS = getattr(settings, 'TAGGIT_AUTOSUGGEST_MAX_SUGGESTIONS', 20) def list_tags(request): """ Returns a list of JSON objects with a `name` and a `value` property that all start like your query string `q` (not case sensitive). """ query = request.GET.get('q', '') limit = request.GET.get('limit', MAX_SUGGESTIONS) try: request.GET.get('limit', MAX_SUGGESTIONS) limit = min(int(limit), MAX_SUGGESTIONS) # max or less except ValueError: limit = MAX_SUGGESTIONS tag_name_qs = Tag.objects.filter(name__istartswith=query).\ values_list('name', flat=True) data = [{'name': n, 'value': n} for n in tag_name_qs[:limit]] return HttpResponse(json.dumps(data), mimetype='application/json') def list_all_tags(request): """Returns all the tags in the database""" all_tags = Tag.objects.all().values_list('name', flat=True) return HttpResponse(json.dumps(list(all_tags)), mimetype='application/json')
... from django.conf import settings from django.http import HttpResponse import json from taggit.models import Tag ...