{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); '\n \n- with open(confound_report_dir + save_name + '_confoundcorr.html', 'w') as file:\n- file.write(report)\n-\n- file.close()\n+ with open(confound_report_dir + save_name + '_confoundcorr.html', 'w') as file:\n+ file.write(report)\n \n \n def make_functional_connectivity(self,njobs=None,returngroup=False,file_hdr=None,file_idx=None):"},"message":{"kind":"string","value":"added possibility to supress report (fixed bug)"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1405,"cells":{"diff":{"kind":"string","value":"diff --git a/udata/search/__init__.py b/udata/search/__init__.py\nindex .. 100644\n--- a/udata/search/__init__.py\n+++ b/udata/search/__init__.py\n@@ -185,7 +185,7 @@ def suggest(q, field, size=10):\n result = s.execute_suggest()\n try:\n return result.suggestions[0]['options']\n- except IndexError:\n+ except (IndexError, AttributeError):\n return []"},"message":{"kind":"string","value":"Do not fail when no suggestion is available"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1406,"cells":{"diff":{"kind":"string","value":"diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate_commands/metadata.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate_commands/metadata.py\nindex .. 100644\n--- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate_commands/metadata.py\n+++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate_commands/metadata.py\n@@ -174,6 +174,11 @@ VALID_UNIT_NAMES = {\n 'exacore',\n 'build',\n 'prediction',\n+ 'watt',\n+ 'kilowatt',\n+ 'megawatt',\n+ 'gigawatt',\n+ 'terawatt',\n 'heap',\n 'volume',\n }"},"message":{"kind":"string","value":"add watt unit (#)"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1407,"cells":{"diff":{"kind":"string","value":"diff --git a/lettuce/core.py b/lettuce/core.py\nindex .. 100644\n--- a/lettuce/core.py\n+++ b/lettuce/core.py\n@@ -298,7 +298,8 @@ class Step(object):\n def elsewhere(step):\n # actual step behavior, maybe.\n \n- This will raise error (thus halting execution of the step) if a subordinate step fails.\n+ This will raise the error of the first failing step (thus halting \n+ execution of the step) if a subordinate step fails.\n \n \"\"\"\n lines = string.split('\\n')\n@@ -312,7 +313,7 @@ class Step(object):\n else:\n self.passed = False\n self.failed = True\n- assert not steps_failed, \"Subordinate steps failed for this step.\"\n+ assert not steps_failed, steps_failed[0].why.exception\n \n def run(self, ignore_case):\n \"\"\"Runs a step, trying to resolve it on available step"},"message":{"kind":"string","value":"When a subordinate step fails, raise the exception of that step rather than a generic 'Subordinate steps failed for this step.' message."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1408,"cells":{"diff":{"kind":"string","value":"diff --git a/master/buildbot/test/unit/test_process_buildstep.py b/master/buildbot/test/unit/test_process_buildstep.py\nindex .. 100644\n--- a/master/buildbot/test/unit/test_process_buildstep.py\n+++ b/master/buildbot/test/unit/test_process_buildstep.py\n@@ -219,11 +219,15 @@ class TestBuildStep(steps.BuildStepMixin, config.ConfigErrorsMixin, unittest.Tes\n d.addCallback(lambda _: self.assertTrue(called[0]))\n return d\n \n+ @defer.inlineCallbacks\n def test_hideStepIf_fails(self):\n # 0/0 causes DivideByZeroError, which should be flagged as an exception\n+\n self._setupWaterfallTest(\n- lambda: 0 / 0, False, expectedResult=EXCEPTION)\n- return self.runStep()\n+ lambda x, y: 0 / 0, False, expectedResult=EXCEPTION)\n+ self.step.addLogWithFailure = mock.Mock()\n+ yield self.runStep()\n+ self.assertEqual(len(self.flushLoggedErrors(ZeroDivisionError)), 1)\n \n @compat.usesFlushLoggedErrors\n def test_hideStepIf_Callable_Exception(self):"},"message":{"kind":"string","value":"fix hideStepIf unit tests actually test that an exception is hideStepIf will be catched in error logs"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1409,"cells":{"diff":{"kind":"string","value":"diff --git a/GPy/testing/examples_tests.py b/GPy/testing/examples_tests.py\nindex .. 100644\n--- a/GPy/testing/examples_tests.py\n+++ b/GPy/testing/examples_tests.py\n@@ -29,11 +29,11 @@ def checkgrads_generator(model):\n \"\"\"\n \n def model_checkgrads(model):\n- assert model.checkgrad() is True\n+ assert model.checkgrad()\n \n \n def model_instance(model):\n- assert model.checkgrad() is True\n+ assert isinstance(model, GPy.core.model)\n \n \n def test_models():\n@@ -45,7 +45,7 @@ def test_models():\n print \"MODULE\", module_examples\n print \"Before\"\n print inspect.getmembers(module_examples, predicate=inspect.isfunction)\n- functions = [ func for func in inspect.getmembers(module_examples, predicate=inspect.isfunction) if func[0].startswith('_') is False ]\n+ functions = [ func for func in inspect.getmembers(module_examples, predicate=inspect.isfunction) if func[0].startswith('_') is False ][::-1]\n print \"After\"\n print functions\n for example in functions:\n@@ -72,3 +72,4 @@ def test_models():\n \n if __name__ == \"__main__\":\n print \"Running unit tests, please be (very) patient...\"\n+ unittest.main()"},"message":{"kind":"string","value":"Should now test all (although upon error it stops trying to generate any more)"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1410,"cells":{"diff":{"kind":"string","value":"diff --git a/mdf_toolbox/globus_search/search_helper.py b/mdf_toolbox/globus_search/search_helper.py\nindex .. 100644\n--- a/mdf_toolbox/globus_search/search_helper.py\n+++ b/mdf_toolbox/globus_search/search_helper.py\n@@ -424,7 +424,7 @@ class SearchHelper():\n dict: The full mapping for the index.\n \"\"\"\n return (self.__search_client.get(\n- \"/unstable/index/{}/mapping\".format(mdf_toolbox.translate_index(self.index)))\n+ \"/beta/index/{}/mapping\".format(mdf_toolbox.translate_index(self.index)))\n [\"mappings\"])\n \n # ************************************************************************************"},"message":{"kind":"string","value":"Update for Search change - unstable features now beta"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1411,"cells":{"diff":{"kind":"string","value":"diff --git a/tests/test_features.py b/tests/test_features.py\nindex .. 100644\n--- a/tests/test_features.py\n+++ b/tests/test_features.py\n@@ -814,7 +814,7 @@ def test_fourier_tempogram_invert(sr, hop_length, win_length, center, window):\n \n odf_inv = librosa.istft(tempogram, hop_length=1, center=center, window=window,\n length=len(odf))\n- assert np.allclose(odf[sl], odf_inv[sl])\n+ assert np.allclose(odf_inv[sl], odf[sl])\n \n \n def test_cens():"},"message":{"kind":"string","value":"trying to fix fourier tempogram inversion test"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1412,"cells":{"diff":{"kind":"string","value":"diff --git a/tests/test_symmetric.py b/tests/test_symmetric.py\nindex .. 100644\n--- a/tests/test_symmetric.py\n+++ b/tests/test_symmetric.py\n@@ -70,6 +70,17 @@ class SymmetricTests(unittest.TestCase):\n plaintext = symmetric.rc2_cbc_pkcs5_decrypt(key, ciphertext, iv)\n self.assertEqual(data, plaintext)\n \n+ def test_rc2_40_encrypt_decrypt(self):\n+ key = util.rand_bytes(5)\n+ data = b'This is data to encrypt'\n+\n+ iv, ciphertext = symmetric.rc2_cbc_pkcs5_encrypt(key, data, None)\n+ self.assertNotEqual(data, ciphertext)\n+ self.assertEqual(byte_cls, type(ciphertext))\n+\n+ plaintext = symmetric.rc2_cbc_pkcs5_decrypt(key, ciphertext, iv)\n+ self.assertEqual(data, plaintext)\n+\n def test_des_encrypt_decrypt(self):\n key = util.rand_bytes(8)\n data = b'This is data to encrypt'"},"message":{"kind":"string","value":"Add a test for RC2 with a bit key"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1413,"cells":{"diff":{"kind":"string","value":"diff --git a/pyrogram/client/client.py b/pyrogram/client/client.py\nindex .. 100644\n--- a/pyrogram/client/client.py\n+++ b/pyrogram/client/client.py\n@@ -1677,7 +1677,7 @@ class Client(Methods, BaseClient):\n file_part += 1\n \n if progress:\n- progress(self, min(file_part * part_size, file_size), file_size, *progress_args)\n+ progress(min(file_part * part_size, file_size), file_size, *progress_args)\n except Client.StopTransmission:\n raise\n except Exception as e:\n@@ -1808,7 +1808,6 @@ class Client(Methods, BaseClient):\n \n if progress:\n progress(\n- self,\n min(offset, file_size)\n if file_size != 0\n else offset,\n@@ -1891,7 +1890,6 @@ class Client(Methods, BaseClient):\n \n if progress:\n progress(\n- self,\n min(offset, file_size)\n if file_size != 0\n else offset,"},"message":{"kind":"string","value":"Don't pass the client to progress callbacks anymore"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1414,"cells":{"diff":{"kind":"string","value":"diff --git a/niworkflows/interfaces/tests/test_bids.py b/niworkflows/interfaces/tests/test_bids.py\nindex .. 100644\n--- a/niworkflows/interfaces/tests/test_bids.py\n+++ b/niworkflows/interfaces/tests/test_bids.py\n@@ -519,7 +519,7 @@ def test_DerivativesDataSink_data_dtype_source(\n def make_empty_nii_with_dtype(fname, dtype):\n Path(fname).parent.mkdir(exist_ok=True, parents=True)\n \n- size = (30, 30, 30, 10)\n+ size = (2, 3, 4, 5)\n \n nb.Nifti1Image(np.zeros(size, dtype=dtype), np.eye(4)).to_filename(fname)"},"message":{"kind":"string","value":"Simplify test_DerivativesDataSink_data_dtype_source"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1415,"cells":{"diff":{"kind":"string","value":"diff --git a/masonite/drivers/SessionCookieDriver.py b/masonite/drivers/SessionCookieDriver.py\nindex .. 100644\n--- a/masonite/drivers/SessionCookieDriver.py\n+++ b/masonite/drivers/SessionCookieDriver.py\n@@ -85,7 +85,7 @@ class SessionCookieDriver(SessionContract, BaseDriver):\n bool -- If the key was deleted or not\n \"\"\"\n \n- data = self.__collect_data()\n+ self.__collect_data()\n \n if self.request.get_cookie('s_{}'.format(key)):\n self.request.delete_cookie('s_{}'.format(key))"},"message":{"kind":"string","value":"fix F local variable 'data' is assigned to but never used"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1416,"cells":{"diff":{"kind":"string","value":"diff --git a/tests/test_signature_parser.py b/tests/test_signature_parser.py\nindex .. 100644\n--- a/tests/test_signature_parser.py\n+++ b/tests/test_signature_parser.py\n@@ -79,9 +79,9 @@ class StrategyGenerator(Parser):\n \"\"\"\n \n if len(toks) == 5 and toks[1] == '{' and toks[4] == '}':\n- return strategies.dictionaries(keys=toks[2], values=toks[3])\n+ return strategies.dictionaries(keys=toks[2], values=toks[3], max_size=20)\n elif len(toks) == 2:\n- return strategies.lists(elements=toks[1])\n+ return strategies.lists(elements=toks[1], max_size=20)\n else: # pragma: no cover\n raise ValueError(\"unexpected tokens\")"},"message":{"kind":"string","value":"Shorten lists in generated values in strategy."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1417,"cells":{"diff":{"kind":"string","value":"diff --git a/lib/svtplay_dl/utils/__init__.py b/lib/svtplay_dl/utils/__init__.py\nindex .. 100644\n--- a/lib/svtplay_dl/utils/__init__.py\n+++ b/lib/svtplay_dl/utils/__init__.py\n@@ -117,7 +117,9 @@ def select_quality(options, streams):\n \n # Extract protocol prio, in the form of \"hls,hds,http,rtmp\",\n # we want it as a list\n- proto_prio = (options.stream_prio or '').split() or None\n+ proto_prio = None\n+ if options.stream_prio:\n+ proto_prio = options.stream_prio.split(',')\n \n return [x for\n x in prio_streams(streams, protocol_prio=proto_prio)"},"message":{"kind":"string","value":"select_quality: fix argument parsing Instead of parsing the argument to --stream-prio as a comma separated listed, it was accidentally handled as a space separated list."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1418,"cells":{"diff":{"kind":"string","value":"diff --git a/python/jsbeautifier/unpackers/packer.py b/python/jsbeautifier/unpackers/packer.py\nindex .. 100644\n--- a/python/jsbeautifier/unpackers/packer.py\n+++ b/python/jsbeautifier/unpackers/packer.py\n@@ -80,6 +80,7 @@ class Unbaser(object):\n \"\"\"Functor for a given base. Will efficiently convert\n strings to natural numbers.\"\"\"\n ALPHABET = {\n+ 53 : '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQ',\n 59 : '0123456789abcdefghijklmnopqrstuvwABCDEFGHIJKLMNOPQRSTUVWXYZ',\n 62 : '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ',\n 95 : (' !\"#$%&\\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ'"},"message":{"kind":"string","value":"More radix stuff Not much idea where all the radixes come from (dean.edwards.name doesn't seem to make them?) probably will need to extract radix params from the actual code."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1419,"cells":{"diff":{"kind":"string","value":"diff --git a/setup.py b/setup.py\nindex .. 100755\n--- a/setup.py\n+++ b/setup.py\n@@ -17,6 +17,5 @@ setup(\n # Any requirements here, e.g. \"Django >= 1.1.1\"\n install_requires=[\n 'django',\n- 'django-adminlte2'\n ],\n )"},"message":{"kind":"string","value":"Removing errant requirements in setup.py"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1420,"cells":{"diff":{"kind":"string","value":"diff --git a/riak/test_server.py b/riak/test_server.py\nindex .. 100644\n--- a/riak/test_server.py\n+++ b/riak/test_server.py\n@@ -125,6 +125,7 @@ class TestServer(object):\n \n def prepare(self):\n if not self._prepared:\n+ self.touch_ssl_distribution_args()\n self.create_temp_directories()\n self._riak_script = os.path.join(self._temp_bin, \"riak\")\n self.write_riak_script()\n@@ -243,6 +244,14 @@ class TestServer(object):\n app_config.write(erlang_config(self.app_config))\n app_config.write(\".\")\n \n+ def touch_ssl_distribution_args(self):\n+ # To make sure that the ssl_distribution.args file is present,\n+ # the control script in the source node has to have been run at\n+ # least once. Running the `chkconfig` command is innocuous\n+ # enough to accomplish this without other side-effects.\n+ script = os.path.join(self.bin_dir, \"riak\")\n+ Popen([script, \"chkconfig\"]).wait()\n+\n def _kv_backend(self):\n return self.app_config[\"riak_kv\"][\"storage_backend\"]"},"message":{"kind":"string","value":"Touch the ssl_distribution.args_file by running `riak chkconfig`. Closes #."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1421,"cells":{"diff":{"kind":"string","value":"diff --git a/harvesters/core.py b/harvesters/core.py\nindex .. 100755\n--- a/harvesters/core.py\n+++ b/harvesters/core.py\n@@ -327,6 +327,7 @@ class Component2D(ComponentBase):\n #\n if self._part:\n count = self._part.data_size\n+ count //= component_per_bytes\n data_offset = self._part.data_offset\n else:\n count = self.width * self.height\n@@ -336,7 +337,7 @@ class Component2D(ComponentBase):\n # Convert the Python's built-in bytes array to a Numpy array.\n self._data = np.frombuffer(\n self._buffer.raw_buffer,\n- count=count // component_per_bytes,\n+ count=count,\n dtype=dtype,\n offset=data_offset\n )"},"message":{"kind":"string","value":"Resolve issue #"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1422,"cells":{"diff":{"kind":"string","value":"diff --git a/command/build_ext.py b/command/build_ext.py\nindex .. 100644\n--- a/command/build_ext.py\n+++ b/command/build_ext.py\n@@ -146,8 +146,7 @@ class build_ext (Command):\n \n # Setup the CCompiler object that we'll use to do all the\n # compiling and linking\n- self.compiler = new_compiler (plat=os.environ.get ('PLAT'),\n- verbose=self.verbose,\n+ self.compiler = new_compiler (verbose=self.verbose,\n dry_run=self.dry_run,\n force=self.force)\n if self.include_dirs is not None:"},"message":{"kind":"string","value":"Took out what looks like old debugging code that probably should never have been checked in: was passing the PLAT environment variable as the 'plat' argument to 'new_compiler()'."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1423,"cells":{"diff":{"kind":"string","value":"diff --git a/jose/jwt.py b/jose/jwt.py\nindex .. 100644\n--- a/jose/jwt.py\n+++ b/jose/jwt.py\n@@ -10,6 +10,7 @@ from six import string_types\n \n from jose import jws\n \n+from .exceptions import JWSError\n from .exceptions import JWTClaimsError\n from .exceptions import JWTError\n from .exceptions import ExpiredSignatureError\n@@ -112,12 +113,14 @@ def decode(token, key, algorithms=None, options=None, audience=None, issuer=None\n defaults.update(options)\n \n verify_signature = defaults.get('verify_signature', True)\n- payload = jws.verify(token, key, algorithms, verify=verify_signature)\n+\n+ try:\n+ payload = jws.verify(token, key, algorithms, verify=verify_signature)\n+ except JWSError as e:\n+ raise JWTError(e)\n \n try:\n claims = json.loads(payload.decode('utf-8'))\n- except (TypeError, binascii.Error):\n- raise JWTError('Invalid payload padding')\n except ValueError as e:\n raise JWTError('Invalid payload string: %s' % e)"},"message":{"kind":"string","value":"Catch `JWSError`s in `jwt.decode()` So far exceptions raised in `jws.verify()` weren't caught in the above function, which led to it raising (undocumented) exceptions from the underlying module. This commit transforms said exceptions. This includes cases of invalid payload padding, error handling for which had previously been attached to the `json.loads()` call."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1424,"cells":{"diff":{"kind":"string","value":"diff --git a/Lib/extractor/formats/ttx.py b/Lib/extractor/formats/ttx.py\nindex .. 100644\n--- a/Lib/extractor/formats/ttx.py\n+++ b/Lib/extractor/formats/ttx.py\n@@ -1,12 +1,12 @@\n from extractor.formats.opentype import extractOpenTypeInfo, extractOpenTypeGlyphs, extractOpenTypeKerning\n \n def isTTX(pathOrFile):\n- from fontTools.ttLib import TTFont, TTLibError\n+ from fontTools.ttLib import TTFont\n try:\n font = TTFont()\n font.importXML(pathOrFile)\n del font\n- except TTLibError:\n+ except Exception:\n return False\n return True"},"message":{"kind":"string","value":"import xml can raise all sort of exceptions apart from TTLibError"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1425,"cells":{"diff":{"kind":"string","value":"diff --git a/mautrix/bridge/user.py b/mautrix/bridge/user.py\nindex .. 100644\n--- a/mautrix/bridge/user.py\n+++ b/mautrix/bridge/user.py\n@@ -84,11 +84,14 @@ class BaseUser(ABC):\n current_dms = {}\n if replace:\n # Filter away all existing DM statuses with bridge users\n- current_dms = {user: rooms for user, rooms in current_dms.items()\n- if not self.bridge.is_bridge_ghost(user)}\n+ filtered_dms = {user: rooms for user, rooms in current_dms.items()\n+ if not self.bridge.is_bridge_ghost(user)}\n+ else:\n+ filtered_dms = current_dms\n # Add DM statuses for all rooms in our database\n- current_dms.update(dms)\n- await puppet.intent.set_account_data(EventType.DIRECT, current_dms)\n+ new_dms = {**filtered_dms, **dms}\n+ if current_dms != new_dms:\n+ await puppet.intent.set_account_data(EventType.DIRECT, new_dms)\n \n def _track_metric(self, metric: Gauge, value: bool) -> None:\n if self._metric_value[metric] != value:"},"message":{"kind":"string","value":"Only send new m.direct account data if something changed"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1426,"cells":{"diff":{"kind":"string","value":"diff --git a/py3status/modules/imap.py b/py3status/modules/imap.py\nindex .. 100644\n--- a/py3status/modules/imap.py\n+++ b/py3status/modules/imap.py\n@@ -189,6 +189,9 @@ class Py3status:\n socket.write(b'DONE\\r\\n') # important!\n response = socket.read(4096).decode(encoding='ascii')\n expected_response = (command_tag + b' OK Idle completed').decode(encoding='ascii')\n+ if response.lower().startswith('* '.lower()): # '* OK Still here', mostly\n+ # sometimes, more messages come in between reading and DONEing; so read them again\n+ response = socket.read(4096).decode(encoding='ascii')\n if not response.lower().startswith(expected_response.lower()):\n raise imaplib.IMAP4.abort(\"While terminating IDLE: \" + response)"},"message":{"kind":"string","value":"catch 'OK still there' (_now_ i'm done) when a change comes in, we send 'DONE', but sometimes more messages or the heartbeat come through right between those statements. then, we'll read again as to not fill up the logs with unnecessary 'recoverable errors'"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1427,"cells":{"diff":{"kind":"string","value":"diff --git a/photutils/aperture_core.py b/photutils/aperture_core.py\nindex .. 100644\n--- a/photutils/aperture_core.py\n+++ b/photutils/aperture_core.py\n@@ -25,6 +25,7 @@ from .utils.wcs_helpers import (skycoord_to_pixel_scale_angle, assert_angle,\n from astropy import __version__ as astropy_version\n if version.LooseVersion(astropy_version) > version.LooseVersion('1.0'):\n from astropy.wcs.utils import skycoord_to_pixel\n+ from astropy.nddata import support_nddata\n skycoord_to_pixel_mode = 'all'\n else:\n from .extern.wcs_utils import skycoord_to_pixel\n@@ -1147,6 +1148,7 @@ class RectangularAnnulus(PixelAperture):\n return flux\n \n \n+@support_nddata\n def aperture_photometry(data, apertures, unit=None, wcs=None, error=None,\n effective_gain=None, mask=None, method='exact',\n subpixels=5, pixelwise_error=True):"},"message":{"kind":"string","value":"decorating aperture_photometry() with support_nddata"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1428,"cells":{"diff":{"kind":"string","value":"diff --git a/msvc9compiler.py b/msvc9compiler.py\nindex .. 100644\n--- a/msvc9compiler.py\n+++ b/msvc9compiler.py\n@@ -292,7 +292,6 @@ def query_vcvarsall(version, arch=\"x86\"):\n result[key] = removeDuplicates(value)\n \n finally:\n- popen.stdin.close()\n popen.stdout.close()\n popen.stderr.close()"},"message":{"kind":"string","value":"Merged revisions via svnmerge from svn+ssh:///python/branches/py3k ........ r | eric.araujo | -- :: (ven., nov. ) | 2 lines And now for something completely different: Finish fixing # again. ........"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1429,"cells":{"diff":{"kind":"string","value":"diff --git a/openquake/hazardlib/shakemap.py b/openquake/hazardlib/shakemap.py\nindex .. 100644\n--- a/openquake/hazardlib/shakemap.py\n+++ b/openquake/hazardlib/shakemap.py\n@@ -195,11 +195,9 @@ def amplify_gmfs(imts, vs30s, gmfs):\n Amplify the ground shaking depending on the vs30s\n \"\"\"\n n = len(vs30s)\n- for i, im in enumerate(imts):\n- for iloc in range(n):\n- gmfs[i * n + iloc] = amplify_ground_shaking(\n- im.period, vs30s[iloc], gmfs[i * n + iloc])\n- return gmfs\n+ out = [amplify_ground_shaking(im.period, vs30s[i], gmfs[m * n + i])\n+ for m, im in enumerate(imts) for i in range(n)]\n+ return numpy.array(out)\n \n \n def amplify_ground_shaking(T, vs30, gmvs):"},"message":{"kind":"string","value":"Minor refactoring Former-commit-id: ea3befe7cedcba8dcd1fc9df"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1430,"cells":{"diff":{"kind":"string","value":"diff --git a/sos/plugins/ipmitool.py b/sos/plugins/ipmitool.py\nindex .. 100644\n--- a/sos/plugins/ipmitool.py\n+++ b/sos/plugins/ipmitool.py\n@@ -26,14 +26,22 @@ class IpmiTool(Plugin, RedHatPlugin, DebianPlugin):\n packages = ('ipmitool',)\n \n def setup(self):\n+ result = self.get_command_output(\"ipmitool -I usb mc info\")\n+ have_usbintf = result['status']\n+\n+ if not have_usbintf:\n+ cmd = \"ipmitool -I usb\"\n+ else:\n+ cmd = \"ipmitool\"\n+\n self.add_cmd_output([\n- \"ipmitool sel info\",\n- \"ipmitool sel list\",\n- \"ipmitool sensor list\",\n- \"ipmitool chassis status\",\n- \"ipmitool fru print\",\n- \"ipmitool mc info\",\n- \"ipmitool sdr info\"\n+ \"%s sel info\" % cmd,\n+ \"%s sel list\" % cmd,\n+ \"%s sensor list\" % cmd,\n+ \"%s chassis status\" % cmd,\n+ \"%s fru print\" % cmd,\n+ \"%s mc info\" % cmd,\n+ \"%s sdr info\" % cmd\n ])\n \n # vim: set et ts=4 sw=4 :"},"message":{"kind":"string","value":"[ipmitool] use usb interface if available SOSREPORT generally uses the default interface (/dev/ipmi0) while executing ipmitool command, which is quite slow as compare to usb interface. This usb interface uses the virtual device exposed from BMC like in OpenPower system AMI exposes virtual USB interface. IPMITOOL command with usb interface sends the data to the kernel drivers which inturn sends it to the BMC. This patch enables ipmitool command to use usb interface if available. With this"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1431,"cells":{"diff":{"kind":"string","value":"diff --git a/openid/interface.py b/openid/interface.py\nindex .. 100644\n--- a/openid/interface.py\n+++ b/openid/interface.py\n@@ -60,7 +60,7 @@ class ValidLogin(ConsumerResponse):\n if ret is None:\n return False\n \n- return ret[1] == server_id\n+ return ret[1] == self.identity\n \n class InvalidLogin(ConsumerResponse):\n \"\"\"This subclass is used when the login wasn't valid.\"\"\""},"message":{"kind":"string","value":"[project @ Bug fix in consumer library]"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1432,"cells":{"diff":{"kind":"string","value":"diff --git a/future/tests/test_int.py b/future/tests/test_int.py\nindex .. 100644\n--- a/future/tests/test_int.py\n+++ b/future/tests/test_int.py\n@@ -299,6 +299,12 @@ class IntTestCases(unittest.TestCase):\n self.assertEqual(int(x), 100, msg=msg)\n self.assertEqual(int(x, 2), 4, msg=msg)\n \n+ def test_newint_of_newstr(self):\n+ a = str(u'123')\n+ b = int(a)\n+ self.assertEqual(b, 123)\n+ self.assertTrue(isinstance(b, int))\n+\n def test_string_float(self):\n self.assertRaises(ValueError, int, '1.2')"},"message":{"kind":"string","value":"Add a test for newint(newstr(u'')), which worked in but is failing now ..."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1433,"cells":{"diff":{"kind":"string","value":"diff --git a/tests/test_views.py b/tests/test_views.py\nindex .. 100644\n--- a/tests/test_views.py\n+++ b/tests/test_views.py\n@@ -67,6 +67,26 @@ class GenerateDataTests(TestCase):\n actual_list = self.mixin.generate_data(values_list_queryset, fields)\n assert list(actual_list) == list(expected_list)\n \n+ def test_follows_foreign_key_with_model_queryset(self):\n+ fields = ('title', 'author__name')\n+ queryset = MockModel.objects.all()\n+ expected_list = [\n+ (self.mock.title, self.author.name),\n+ (self.mock2.title, self.author.name),\n+ ]\n+ actual_list = self.mixin.generate_data(queryset, fields)\n+ assert list(actual_list) == list(expected_list)\n+\n+ def test_follows_foreign_key_with_values_list_queryset(self):\n+ fields = ('title', 'author__name')\n+ values_list_queryset = MockModel.objects.all().values_list()\n+ expected_list = [\n+ (self.mock.title, self.author.name),\n+ (self.mock2.title, self.author.name),\n+ ]\n+ actual_list = self.mixin.generate_data(values_list_queryset, fields)\n+ assert list(actual_list) == list(expected_list)\n+\n def test_reverse_ordering_when_fields_specified(self):\n fields = ('title', 'id')\n actual_list = self.mixin.generate_data(self.queryset, fields)"},"message":{"kind":"string","value":"Test that we can extract data by following a ForeignKey relation."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1434,"cells":{"diff":{"kind":"string","value":"diff --git a/stone/target/obj_c_types.py b/stone/target/obj_c_types.py\nindex .. 100644\n--- a/stone/target/obj_c_types.py\n+++ b/stone/target/obj_c_types.py\n@@ -995,7 +995,6 @@ class ObjCTypesGenerator(ObjCBaseGenerator):\n import_classes = [\n fmt_routes_class(namespace.name),\n fmt_route_obj_class(namespace.name),\n- 'DBTransportClient',\n 'DBStoneBase',\n 'DBRequestErrors',\n ]"},"message":{"kind":"string","value":"[Obj-C] removed transport client import."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1435,"cells":{"diff":{"kind":"string","value":"diff --git a/geomet/tests/wkt_test.py b/geomet/tests/wkt_test.py\nindex .. 100644\n--- a/geomet/tests/wkt_test.py\n+++ b/geomet/tests/wkt_test.py\n@@ -112,10 +112,10 @@ class LineStringTestCase(unittest.TestCase):\n self.assertEqual(expected, wkt.dumps(ls, decimals=3))\n \n def test_loads_linestring_2d(self):\n- ls = 'LINESTRING (0 1, 2 3, 4 5)'\n- expected = dict(type='LineString', coordinates=[[0.0, 1.0],\n- [2.0, 3.0],\n- [4.0, 5.0]])\n+ ls = 'LINESTRING (0 -1, -2 -3, -4 5)'\n+ expected = dict(type='LineString', coordinates=[[0.0, -1.0],\n+ [-2.0, -3.0],\n+ [-4.0, 5.0]])\n self.assertEqual(expected, wkt.loads(ls))\n \n def test_loads_linestring_3d(self):"},"message":{"kind":"string","value":"tests/wkt_test: Added negative test values to test_loads_linestring_2d."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1436,"cells":{"diff":{"kind":"string","value":"diff --git a/vprof/runtime_profile.py b/vprof/runtime_profile.py\nindex .. 100644\n--- a/vprof/runtime_profile.py\n+++ b/vprof/runtime_profile.py\n@@ -80,3 +80,9 @@ class RuntimeProfile(base_profile.BaseProfile):\n 'totalCalls': cprofile_stats.total_calls,\n 'callStats': self._transform_stats(cprofile_stats)\n }\n+\n+ def run(self):\n+ \"\"\"Runs profile and returns collected stats.\"\"\"\n+ runtime_stats = {}\n+ self.collect_stats(runtime_stats)\n+ return runtime_stats"},"message":{"kind":"string","value":"Collect runtime stats in vprof process."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1437,"cells":{"diff":{"kind":"string","value":"diff --git a/tests/test_generators.py b/tests/test_generators.py\nindex .. 100644\n--- a/tests/test_generators.py\n+++ b/tests/test_generators.py\n@@ -318,10 +318,10 @@ class TestDoped(unittest.TestCase):\n bqm = dimod.generators.random.doped(0.3, 100, seed=506)\n total = len(bqm.quadratic)\n afm = sum([val == 1 for val in bqm.quadratic.values()])\n- self.assertAlmostEqual(afm / total, 0.3)\n+ self.assertAlmostEqual(afm / total, 0.3, places=2)\n \n def test_correct_ratio_fm(self):\n bqm = dimod.generators.random.doped(0.3, 100, seed=506, fm=False)\n total = len(bqm.quadratic)\n fm = sum([val == -1 for val in bqm.quadratic.values()])\n- self.assertAlmostEqual(fm / total, 0.3)\n+ self.assertAlmostEqual(fm / total, 0.3, places=2)"},"message":{"kind":"string","value":"change decimal places for doping tests"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1438,"cells":{"diff":{"kind":"string","value":"diff --git a/azure-sdk-testutils/setup.py b/azure-sdk-testutils/setup.py\nindex .. 100644\n--- a/azure-sdk-testutils/setup.py\n+++ b/azure-sdk-testutils/setup.py\n@@ -11,4 +11,5 @@ setup(\n author_email='azpysdkhelp@microsoft.com',\n url='https://github.com/Azure/azure-sdk-for-python',\n packages=find_packages(),\n+ long_description=\"Specific tools for Azure SDK for Python testing\"\n )"},"message":{"kind":"string","value":"Update setup.py (#)"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1439,"cells":{"diff":{"kind":"string","value":"diff --git a/rest_framework_json_api/utils.py b/rest_framework_json_api/utils.py\nindex .. 100644\n--- a/rest_framework_json_api/utils.py\n+++ b/rest_framework_json_api/utils.py\n@@ -191,7 +191,10 @@ def get_related_resource_type(relation):\n # Django 1.7\n relation_model = parent_model_relation.related.model\n elif hasattr(parent_model_relation, 'field'):\n- relation_model = parent_model_relation.field.related.model\n+ try:\n+ relation_model = parent_model_relation.field.remote_field.model\n+ except AttributeError:\n+ relation_model = parent_model_relation.field.related.model\n else:\n return get_related_resource_type(parent_model_relation)\n return get_resource_type_from_model(relation_model)"},"message":{"kind":"string","value":"try to use field.remote_field instead of field.related (#)"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1440,"cells":{"diff":{"kind":"string","value":"diff --git a/python/rosette/api.py b/python/rosette/api.py\nindex .. 100644\n--- a/python/rosette/api.py\n+++ b/python/rosette/api.py\n@@ -111,16 +111,16 @@ def _retrying_request(op, url, data, headers):\n \t if status < 500:\n \t\tif not REUSE_CONNECTION:\n \t\t HTTP_CONNECTION.close()\n-\t\t return rdata, status\n-\t\tif rdata is not None:\n-\t\t try:\n-\t\t\tthe_json = _my_loads(rdata)\n-\t\t\tif \"message\" in the_json:\n-\t\t\t message = the_json[\"message\"]\n-\t\t\tif \"code\" in the_json:\n-\t\t\t code = the_json[\"code\"]\n-\t\t\texcept:\n-\t\t\t pass\n+\t\treturn rdata, status\n+\t if rdata is not None:\n+\t\ttry:\n+\t\t the_json = _my_loads(rdata)\n+\t\t if \"message\" in the_json:\n+\t\t\tmessage = the_json[\"message\"]\n+\t\t if \"code\" in the_json:\n+\t\t\tcode = the_json[\"code\"]\n+\t\texcept:\n+\t\t pass\n \t# If there are issues connecting to the API server,\n \t# try to regenerate the connection as long as there are\n \t# still retries left."},"message":{"kind":"string","value":"Updated python api again"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1441,"cells":{"diff":{"kind":"string","value":"diff --git a/autotweet/learning.py b/autotweet/learning.py\nindex .. 100644\n--- a/autotweet/learning.py\n+++ b/autotweet/learning.py\n@@ -149,9 +149,12 @@ class DataCollection(object):\n \"\"\"\n \n if not grams:\n- grams = session.query(Gram).all()\n+ grams = session.query(Gram)\n for gram in grams:\n+ orig_idf = gram.idf\n gram.idf = self._get_idf(session, gram)\n+ logger.debug('Recalculating {} {} -> {}'.format(\n+ gram.gram, orig_idf, gram.idf))\n \n def get_count(self):\n \"\"\"Get count of :class:`Document`."},"message":{"kind":"string","value":"Enhance logging for recalculation"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1442,"cells":{"diff":{"kind":"string","value":"diff --git a/api2.py b/api2.py\nindex .. 100644\n--- a/api2.py\n+++ b/api2.py\n@@ -23,11 +23,17 @@ class Canteen(Api2Entity):\n \t\t\tself.fromJsonDict(values)\n \n \t@staticmethod\n-\tdef find(ids=None):\n+\tdef find(ids=None, near=None):\n+\t\trecvCanteens = lambda **kwargs: list(map(lambda c: Canteen(values=c),\n+\t\t\tCanteen().request('canteens', params=kwargs)))\n \t\tif ids:\n \t\t\tids = ','.join(map(lambda i: str(i), ids))\n-\t\t\tcs = Canteen().request('canteens', params = { 'ids': ids })\n-\t\t\treturn list(map(lambda c: Canteen(values=c), cs))\n+\t\t\treturn recvCanteens(ids=ids)\n+\t\tif near is not None:\n+\t\t\tparams = { 'near[lat]': near[0], 'near[lng]': near[1] }\n+\t\t\tif len(near) > 2:\n+\t\t\t\tparams['near[dist]'] = near[2]\n+\t\t\treturn recvCanteens(**params)\n \t\traise NotImplemented\n \n \tdef __str__(self):"},"message":{"kind":"string","value":"api2: support canteens near filtering"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1443,"cells":{"diff":{"kind":"string","value":"diff --git a/benchexec/runexecutor.py b/benchexec/runexecutor.py\nindex .. 100644\n--- a/benchexec/runexecutor.py\n+++ b/benchexec/runexecutor.py\n@@ -364,6 +364,7 @@ class RunExecutor():\n stdin=DEVNULL,\n stdout=outputFile, stderr=outputFile,\n env=runningEnv, cwd=workingDir,\n+ close_fds=True,\n preexec_fn=preSubprocess)\n \n except OSError as e:"},"message":{"kind":"string","value":"Prevent inheriting file descriptors to the benchmarked tool. This is the default since Python , but we want it always."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1444,"cells":{"diff":{"kind":"string","value":"diff --git a/tools/dfu.py b/tools/dfu.py\nindex .. 100755\n--- a/tools/dfu.py\n+++ b/tools/dfu.py\n@@ -60,6 +60,10 @@ def build(file,targets,device=DEFAULT_DEVICE):\n for t,target in enumerate(targets):\n tdata = b''\n for image in target:\n+ # pad image to 8 bytes (needed at least for L476)\n+ pad = (8 - len(image['data']) % 8 ) % 8\n+ image['data'] = image['data'] + bytes(bytearray(8)[0:pad])\n+ #\n tdata += struct.pack('<2I',image['address'],len(image['data']))+image['data']\n tdata = struct.pack('<6sBI255s2I',b'Target',0,1, b'ST...',len(tdata),len(target)) + tdata\n data += tdata"},"message":{"kind":"string","value":"tools/dfu.py: Pad image data to 8 byte alignment to support L."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1445,"cells":{"diff":{"kind":"string","value":"diff --git a/sat_image/image.py b/sat_image/image.py\nindex .. 100644\n--- a/sat_image/image.py\n+++ b/sat_image/image.py\n@@ -1,3 +1,4 @@\n+# coding: utf-8\n # =============================================================================================\n # Copyright 2017 dgketchum\n #"},"message":{"kind":"string","value":"added '# coding: utf-8' to top of file, a doc string was raising a SyntaxError; preparing to change Raster.bounds call from transform=transform to transform=affine, transform is a list, affine an affine object"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1446,"cells":{"diff":{"kind":"string","value":"diff --git a/proj/__init__.py b/proj/__init__.py\nindex .. 100755\n--- a/proj/__init__.py\n+++ b/proj/__init__.py\n@@ -63,7 +63,10 @@ def archive(folder, dry_run=False):\n \n def _last_modified(folder):\n try:\n- return max(_time_modified(f) for f in _iter_files(folder))\n+ return max(\n+ _time_modified(f) for f in _iter_files(folder) if not os.path.islink(f)\n+ )\n+\n except ValueError:\n bail(\"no files in folder: \" + folder)"},"message":{"kind":"string","value":"Avoid timestamping symlinks, which can be broken"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1447,"cells":{"diff":{"kind":"string","value":"diff --git a/sitetree/management/commands/sitetreeload.py b/sitetree/management/commands/sitetreeload.py\nindex .. 100644\n--- a/sitetree/management/commands/sitetreeload.py\n+++ b/sitetree/management/commands/sitetreeload.py\n@@ -79,8 +79,14 @@ class Command(BaseCommand):\n tree_item_parents = defaultdict(list)\n tree_items_new_indexes = {}\n \n+ try:\n+ allow_migrate = router.allow_migrate\n+ except AttributeError:\n+ # Django < 1.7\n+ allow_migrate = router.allow_syncdb\n+\n for obj in objects:\n- if router.allow_syncdb(using, obj.object.__class__):\n+ if allow_migrate(using, obj.object.__class__):\n if isinstance(obj.object, (MODEL_TREE_CLASS, MODEL_TREE_ITEM_CLASS)):\n if isinstance(obj.object, MODEL_TREE_CLASS):\n trees.append(obj.object)"},"message":{"kind":"string","value":"fix sitetreeload management command for py and django "},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1448,"cells":{"diff":{"kind":"string","value":"diff --git a/sh.py b/sh.py\nindex .. 100644\n--- a/sh.py\n+++ b/sh.py\n@@ -987,8 +987,6 @@ class OProc(object):\n def setwinsize(fd):\n rows, cols = OProc._default_window_size\n TIOCSWINSZ = getattr(termios, 'TIOCSWINSZ', -2146929561)\n- if TIOCSWINSZ == 2148037735: # L is not required in Python >= 2.2.\n- TIOCSWINSZ = -2146929561 # Same bits, but with sign.\n \n s = struct.pack('HHHH', rows, cols, 0, 0)\n fcntl.ioctl(fd, TIOCSWINSZ, s)"},"message":{"kind":"string","value":"Pull in pexpect issue# See discussion in "},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1449,"cells":{"diff":{"kind":"string","value":"diff --git a/web3/contract.py b/web3/contract.py\nindex .. 100644\n--- a/web3/contract.py\n+++ b/web3/contract.py\n@@ -474,6 +474,10 @@ class Contract:\n encode_abi(cls.web3, constructor_abi, arguments, data=cls.bytecode)\n )\n else:\n+ if args is not None or kwargs is not None:\n+ msg = \"Constructor args were provided, but no constructor function was provided.\"\n+ raise TypeError(msg)\n+\n deploy_data = to_hex(cls.bytecode)\n \n return deploy_data"},"message":{"kind":"string","value":"bug: raise exception instead of silently ignoring unnecessary args"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1450,"cells":{"diff":{"kind":"string","value":"diff --git a/mapchete/formats/default/raster_file.py b/mapchete/formats/default/raster_file.py\nindex .. 100644\n--- a/mapchete/formats/default/raster_file.py\n+++ b/mapchete/formats/default/raster_file.py\n@@ -114,7 +114,8 @@ class InputData(base.InputData):\n )\n # If soucre and target CRSes differ, segmentize and reproject\n if inp_crs != out_crs:\n- segmentize = _get_segmentize_value(self.path, self.pyramid)\n+ # estimate segmentize value (raster pixel size * tile size)\n+ segmentize = inp.transform[0] * self.pyramid.tile_size\n ogr_bbox = ogr.CreateGeometryFromWkb(bbox.wkb)\n ogr_bbox.Segmentize(segmentize)\n self._bbox_cache[str(out_crs)] = reproject_geometry("},"message":{"kind":"string","value":"avoid opening file again with rasterio"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1451,"cells":{"diff":{"kind":"string","value":"diff --git a/python/setup.py b/python/setup.py\nindex .. 100644\n--- a/python/setup.py\n+++ b/python/setup.py\n@@ -14,7 +14,7 @@ setup(\n author_email=\"jbenet@cs.stanford.com\",\n url=\"http://github.com/jbenet/nanotime/tree/master/python\",\n keywords=[\"nanotime\", \"nanosecond\", \"time precision\", \"64bit time\"],\n- packages=[\"nanotime\"],\n+ modules=[\"nanotime\"],\n install_requires=[],\n license=\"MIT License\"\n )"},"message":{"kind":"string","value":"it is now a module not a whole package"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1452,"cells":{"diff":{"kind":"string","value":"diff --git a/vel/launcher.py b/vel/launcher.py\nindex .. 100644\n--- a/vel/launcher.py\n+++ b/vel/launcher.py\n@@ -16,7 +16,7 @@ def main():\n parser.add_argument('varargs', nargs='*', metavar='VARARGS', help='Extra options to the command')\n parser.add_argument('-r', '--run_number', default=0, help=\"A run number\")\n parser.add_argument('-d', '--device', default='cuda', help=\"A device to run the model on\")\n- parser.add_argument('-s', '--seed', default=dtm.date.today().year, help=\"Random seed for the project\")\n+ parser.add_argument('-s', '--seed', type=int, default=dtm.date.today().year, help=\"Random seed for the project\")\n parser.add_argument('--reset', action='store_true', default=False, help=\"Overwrite existing model storage\")\n \n args = parser.parse_args()"},"message":{"kind":"string","value":"Force seed to be an int."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1453,"cells":{"diff":{"kind":"string","value":"diff --git a/lenses/setter.py b/lenses/setter.py\nindex .. 100644\n--- a/lenses/setter.py\n+++ b/lenses/setter.py\n@@ -4,6 +4,12 @@ import copy\n \n @singledispatch\n def setitem_immutable(self, key, value):\n+ '''Takes an object, a key, and a value and produces a new object\n+ that is a copy of the original but with `value` as the new value of\n+ `key`.\n+\n+ setitem_immutable(obj, key, obj[key]) == obj\n+ '''\n try:\n self._lens_setitem\n except AttributeError:\n@@ -22,6 +28,12 @@ def _tuple_setitem_immutable(self, key, value):\n \n @singledispatch\n def setattr_immutable(self, name, value):\n+ '''Takes an object, a string, and a value and produces a new object\n+ that is a copy of the original but with the attribute called `name`\n+ set to `value`.\n+\n+ setattr_immutable(obj, 'attr', obj.attr) == obj\n+ '''\n try:\n self._lens_setattr\n except AttributeError:"},"message":{"kind":"string","value":"added simple docstrings to setter functions"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1454,"cells":{"diff":{"kind":"string","value":"diff --git a/conversejs/xmpp.py b/conversejs/xmpp.py\nindex .. 100644\n--- a/conversejs/xmpp.py\n+++ b/conversejs/xmpp.py\n@@ -212,7 +212,4 @@ def change_password(xmpp_account, new_password):\n logger.error('Unable to connect to XMPP server.')\n return False\n \n- xmpp_account.password = new_password\n- xmpp_account.save()\n-\n return True"},"message":{"kind":"string","value":"Removing XMPPAccount password changing during change_password"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1455,"cells":{"diff":{"kind":"string","value":"diff --git a/models.py b/models.py\nindex .. 100644\n--- a/models.py\n+++ b/models.py\n@@ -229,7 +229,7 @@ class MixtureDistribution(Mixture, GibbsSampling, Distribution):\n This makes a Mixture act like a Distribution for use in other compound models\n '''\n \n- def resample(self,data,niter=25):\n+ def resample(self,data,niter=25,temp=None):\n # doesn't keep a reference to the data like a model would\n assert isinstance(data,list) or isinstance(data,np.ndarray)\n \n@@ -240,11 +240,11 @@ class MixtureDistribution(Mixture, GibbsSampling, Distribution):\n self.add_data(data)\n \n for itr in range(niter):\n- self.resample_model()\n+ self.resample_model(temp=temp)\n \n self.labels_list.pop()\n else:\n- self.resample_model()\n+ self.resample_model(temp=temp)\n \n def max_likelihood(self,data,weights=None):\n if weights is not None:\n@@ -313,9 +313,9 @@ class FrozenMixtureDistribution(MixtureDistribution):\n weights=self.weights,\n likelihoods=self._likelihoods))\n \n- def resample_model(self):\n+ def resample_model(self, temp=None):\n for l in self.labels_list:\n- l.resample()\n+ l.resample(temp=temp)\n self.weights.resample([l.z for l in self.labels_list])\n \n def log_likelihood(self,x):"},"message":{"kind":"string","value":"Temperature now propagates through resample_model calls"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1456,"cells":{"diff":{"kind":"string","value":"diff --git a/master/buildbot/reporters/generators/utils.py b/master/buildbot/reporters/generators/utils.py\nindex .. 100644\n--- a/master/buildbot/reporters/generators/utils.py\n+++ b/master/buildbot/reporters/generators/utils.py\n@@ -47,7 +47,7 @@ class BuildStatusGeneratorMixin(util.ComparableMixin):\n def check(self):\n self._verify_build_generator_mode(self.mode)\n \n- if '\\n' in self.subject:\n+ if self.subject is not None and '\\n' in self.subject:\n config.error('Newlines are not allowed in message subjects')\n \n list_or_none_params = [\n@@ -165,7 +165,7 @@ class BuildStatusGeneratorMixin(util.ComparableMixin):\n if buildmsg['subject'] is not None:\n subject = buildmsg['subject']\n \n- if subject is None:\n+ if subject is None and self.subject is not None:\n subject = self.subject % {'result': statusToString(results),\n 'projectName': master.config.title,\n 'title': master.config.title,"},"message":{"kind":"string","value":"reporters: Support None subject in BuildStatusGeneratorMixin"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1457,"cells":{"diff":{"kind":"string","value":"diff --git a/netpyne/network.py b/netpyne/network.py\nindex .. 100644\n--- a/netpyne/network.py\n+++ b/netpyne/network.py\n@@ -140,8 +140,7 @@ class Network (object):\n if 'sec' not in connParam: connParam['sec'] = None # if section not specified, make None (will be assigned to first section in cell)\n if 'synMech' not in connParam: connParam['synMech'] = None # if synaptic mechanism not specified, make None (will be assigned to first synaptic mechanism in cell) \n if 'threshold' not in connParam: connParam['threshold'] = self.params['defaultThreshold'] # if no threshold specified, make None (will be assigned default value)\n- if 'seed' not in connParam: connParam['threshold'] = self.params['defaultThreshold'] # if no threshold specified, make None (will be assigned default value)\n-\n+ \n if 'weight' not in connParam: connParam['weight'] = self.params['defaultWeight'] # if no weight, set default\n if 'delay' not in connParam: connParam['delay'] = self.params['defaultDelay'] # if no delay, set default\n if 'synsPerConn' not in connParam: connParam['synsPerConn'] = 1 # if no delay, set default"},"message":{"kind":"string","value":"removed 'seed' param from conns in network.py (bug)"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1458,"cells":{"diff":{"kind":"string","value":"diff --git a/consul/base.py b/consul/base.py\nindex .. 100644\n--- a/consul/base.py\n+++ b/consul/base.py\n@@ -233,9 +233,8 @@ class Consul(object):\n *token* is an optional `ACL token`_ to apply to this request.\n \n *keys* is a boolean which, if True, says to return a flat list of\n- keys without values or other metadata.\n-\n- *separator* is used to list only up to a given separator character.\n+ keys without values or other metadata. *separator* can be used\n+ with *keys* to list keys only up to a given separator character.\n \n *dc* is the optional datacenter that you wish to communicate with.\n If None is provided, defaults to the agent's datacenter."},"message":{"kind":"string","value":"update docstring to make it clear seperator is used with the keys argument"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1459,"cells":{"diff":{"kind":"string","value":"diff --git a/treeherder/log_parser/artifactbuildercollection.py b/treeherder/log_parser/artifactbuildercollection.py\nindex .. 100644\n--- a/treeherder/log_parser/artifactbuildercollection.py\n+++ b/treeherder/log_parser/artifactbuildercollection.py\n@@ -91,7 +91,7 @@ BuildbotPerformanceDataArtifactBuilder\n # Temporary annotation of log size to help set thresholds in bug 1295997.\n newrelic.agent.add_custom_parameter(\n 'unstructured_log_size',\n- response.headers.get('Content-Length', 'Unknown')\n+ int(response.headers.get('Content-Length', -1))\n )\n newrelic.agent.add_custom_parameter(\n 'unstructured_log_encoding',"},"message":{"kind":"string","value":"Bug - Send the unstructured log size to New Relic as ints New Relic Insights doesn't coerce strings to integers, so doesn't allow the graphing of custom attributes sent as strings. HTTP headers are always exposed as strings, even for fields that are expected to represent numbers, so we must explicitly cast Content-Length."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1460,"cells":{"diff":{"kind":"string","value":"diff --git a/setup.py b/setup.py\nindex .. 100755\n--- a/setup.py\n+++ b/setup.py\n@@ -15,6 +15,21 @@ setup(name='salt',\n author='Thomas S Hatch',\n author_email='thatch45@gmail.com',\n url='https://github.com/thatch45/salt',\n+ classifiers = [\n+ 'Programming Language :: Python',\n+ 'Programming Language :: Cython',\n+ 'Programming Language :: Python :: 2.6',\n+ 'Development Status :: 4 - Beta',\n+ 'Environment :: Console',\n+ 'Intended Audience :: Developers',\n+ 'Intended Audience :: Information Technology',\n+ 'Intended Audience :: System Administrators',\n+ 'License :: OSI Approved :: Apache Software License',\n+ 'Operating System :: POSIX :: Linux',\n+ 'Topic :: System :: Monitoring',\n+ 'Topic :: System :: Clustering',\n+ 'Topic :: System :: Distributed Computing',\n+ ],\n packages=['salt',\n 'salt.modules',\n 'salt.cli',"},"message":{"kind":"string","value":"Add classifiers to the setup.py"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1461,"cells":{"diff":{"kind":"string","value":"diff --git a/taskqueue/aws_queue_api.py b/taskqueue/aws_queue_api.py\nindex .. 100644\n--- a/taskqueue/aws_queue_api.py\n+++ b/taskqueue/aws_queue_api.py\n@@ -102,7 +102,7 @@ class AWSTaskQueueAPI(object):\n 'All'\n ],\n VisibilityTimeout=visibility_timeout,\n- WaitTimeSeconds=0,\n+ WaitTimeSeconds=20,\n )\n \n if 'Messages' not in resp:"},"message":{"kind":"string","value":"fix: use sec wait time to avoid polling 0s on tasks for AWS"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1462,"cells":{"diff":{"kind":"string","value":"diff --git a/hupper/winapi.py b/hupper/winapi.py\nindex .. 100644\n--- a/hupper/winapi.py\n+++ b/hupper/winapi.py\n@@ -13,8 +13,8 @@ class JobObjectInfoType(object):\n GroupInformation = 11\n \n \n-class JOBOBJECTLIMIT(object):\n- JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE = 0x2000\n+class JobObjectLimit(object):\n+ KILL_ON_JOB_CLOSE = 0x2000\n \n \n class IO_COUNTERS(ctypes.Structure):\n@@ -141,7 +141,7 @@ class ProcessGroup(object):\n self.h_job = CreateJobObject(None, None)\n \n info = JOBOBJECT_BASIC_LIMIT_INFORMATION()\n- info.LimitFlags = JOBOBJECTLIMIT.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE\n+ info.LimitFlags = JobObjectLimit.KILL_ON_JOB_CLOSE\n \n extended_info = JOBOBJECT_EXTENDED_LIMIT_INFORMATION()\n extended_info.BasicLimitInformation = info\n@@ -149,7 +149,7 @@ class ProcessGroup(object):\n SetInformationJobObject(\n self.h_job,\n JobObjectInfoType.ExtendedLimitInformation,\n- extended_info,\n+ ctypes.pointer(extended_info),\n ctypes.sizeof(extended_info),\n )"},"message":{"kind":"string","value":"cleanup names and pass a pointer explicitly"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1463,"cells":{"diff":{"kind":"string","value":"diff --git a/ohapi/api.py b/ohapi/api.py\nindex .. 100644\n--- a/ohapi/api.py\n+++ b/ohapi/api.py\n@@ -326,6 +326,24 @@ def handle_error(r, expected_code):\n def upload_aws(target_filepath, metadata, access_token, base_url=OH_BASE_URL,\n remote_file_info=None, project_member_id=None,\n max_bytes=MAX_FILE_DEFAULT):\n+ \"\"\"\n+ Upload a file to AWS. To learn more about Open Humans OAuth2 projects, go\n+ to: https://www.openhumans.org/direct-sharing/oauth2-features/.\n+\n+ :param target_filepath: This field is the filepath of the file to be\n+ uploaded\n+ :param metadata: This field is a python dictionary with keys filename,\n+ description and tags for single user upload and filename,\n+ project member id, description and tags for multiple user upload.\n+ :param access_token: This is user specific access token/master token.\n+ :param base_url: It is this URL `https://www.openhumans.org`.\n+ :param remote_file_info: This field is for for checking if a file with\n+ matching name and file size already exists. Its default value is none.\n+ :param project_member_id: This field is the list of project member id of\n+ all members of a project. Its default value is None.\n+ :param max_bytes: This field is the maximum file size a user can upload.\n+ It's default value is 128m.\n+ \"\"\"\n if remote_file_info:\n filesize = os.stat(target_filepath).st_size\n if process_info(remote_file_info, filesize, target_filepath) is False:"},"message":{"kind":"string","value":"Added documentation for upload_aws (#) * project_member_id optional in upload function * resolving code climate issue * project_member_id optional in upload function * fix hound errors * Update api.py * optional project_member_id * Hound fixes and other refactoring * Hound fixes * Hound fixes * Hound fixes * Hound fixes * added tests for get_page * hound fix * added doc for upload_aws * minor fixes"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1464,"cells":{"diff":{"kind":"string","value":"diff --git a/alerta/top.py b/alerta/top.py\nindex .. 100644\n--- a/alerta/top.py\n+++ b/alerta/top.py\n@@ -1,4 +1,3 @@\n-#!/usr/bin/env python\n \n import sys\n import time\n@@ -389,7 +388,8 @@ class Screen(object):\n elif key in 'oO':\n self.dedup_by = \"origin\"\n elif key in 'qQ':\n- exit_handler()\n+ self._reset()\n+ sys.exit(0)\n \n # def _handle_movement_key(self, key):\n # # Highlight the corresponding node in the list\n@@ -419,7 +419,9 @@ class Screen(object):\n def exit_handler(signum, frame):\n \n logging.debug('Received Signal %s (%s)' % (signum, frame))\n- screen._reset()\n+ curses.echo()\n+ curses.nocbreak()\n+ curses.endwin()\n sys.exit(0)\n \n # Register exit signals"},"message":{"kind":"string","value":"make screen sane after exiting"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1465,"cells":{"diff":{"kind":"string","value":"diff --git a/devassistant/command_runners.py b/devassistant/command_runners.py\nindex .. 100644\n--- a/devassistant/command_runners.py\n+++ b/devassistant/command_runners.py\n@@ -594,7 +594,7 @@ class SCLCommandRunner(CommandRunner):\n @classmethod\n def run(cls, c):\n c.kwargs['__scls__'].append(c.comm_type.split()[1:])\n- retval = lang.run_section(c.comm,\n+ retval = lang.run_section(c.input_res,\n c.kwargs,\n runner=c.kwargs['__assistant__'])\n c.kwargs['__scls__'].pop()"},"message":{"kind":"string","value":"Fix SCLCommandRunner with new Yaml syntax"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1466,"cells":{"diff":{"kind":"string","value":"diff --git a/python/orca/src/bigdl/orca/common.py b/python/orca/src/bigdl/orca/common.py\nindex .. 100644\n--- a/python/orca/src/bigdl/orca/common.py\n+++ b/python/orca/src/bigdl/orca/common.py\n@@ -155,7 +155,7 @@ def init_orca_context(cluster_mode=\"local\", cores=2, memory=\"2g\", num_nodes=1,\n executor_memory=memory, **spark_args)\n elif cluster_mode == \"standalone\":\n for key in [\"driver_cores\", \"driver_memory\", \"extra_executor_memory_for_ray\",\n- \"extra_python_lib\", \"jars\", \"master\"]:\n+ \"extra_python_lib\", \"jars\", \"master\", \"enable_numa_binding\"]:\n if key in kwargs:\n spark_args[key] = kwargs[key]\n from zoo import init_spark_standalone"},"message":{"kind":"string","value":"Support numa binding in init_spark_standalone (#) * support numa binding in init_spark_standalone * add doc and add to orca context * address comments * address comments * update scripts * hyperthreading * fix"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1467,"cells":{"diff":{"kind":"string","value":"diff --git a/spur/results.py b/spur/results.py\nindex .. 100644\n--- a/spur/results.py\n+++ b/spur/results.py\n@@ -1,8 +1,9 @@\n def result(return_code, output, stderr_output, allow_error=False):\n+ result = ExecutionResult(return_code, output, stderr_output)\n if allow_error or return_code == 0:\n- return ExecutionResult(return_code, output, stderr_output)\n+ return result\n else:\n- raise RunProcessError(return_code, output, stderr_output)\n+ raise result.to_error()\n \n \n class RunProcessError(RuntimeError):\n@@ -20,3 +21,10 @@ class ExecutionResult(object):\n self.return_code = return_code\n self.output = output\n self.stderr_output = stderr_output\n+ \n+ def to_error(self):\n+ return RunProcessError(\n+ self.return_code,\n+ self.output, \n+ self.stderr_output\n+ )"},"message":{"kind":"string","value":"Move logic for creating RunProcessError to ExecutionResult.to_error"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1468,"cells":{"diff":{"kind":"string","value":"diff --git a/digsandpaper/search_server.py b/digsandpaper/search_server.py\nindex .. 100644\n--- a/digsandpaper/search_server.py\n+++ b/digsandpaper/search_server.py\n@@ -111,10 +111,10 @@ def _index_fields(request):\n if (request.headers['Content-Type'] == 'application/x-gzip'):\n gz_data_as_file = StringIO.StringIO(request.data)\n uncompressed = gzip.GzipFile(fileobj=gz_data_as_file, mode='rb')\n- jls = uncompressed.read().decode('utf-8') \n+ jls = uncompressed.read()\n elif (request.headers['Content-Type'] == 'application/json' or\n request.headers['Content-Type'] == 'application/x-jsonlines'):\n- jls = request.data.decode('utf-8')\n+ jls = request.data\n else:\n return \"Only supported content types are application/x-gzip, application/json and application/x-jsonlines\", status.HTTP_400_BAD_REQUEST\n reader = codecs.getreader('utf-8')"},"message":{"kind":"string","value":"Remove unnecessary utf-8 decode"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1469,"cells":{"diff":{"kind":"string","value":"diff --git a/tests/settings/__init__.py b/tests/settings/__init__.py\nindex .. 100644\n--- a/tests/settings/__init__.py\n+++ b/tests/settings/__init__.py\n@@ -17,3 +17,5 @@ INSTALLED_APPS = [\n ]\n \n TEST_RUNNER = 'tests.runners.MutantTestSuiteRunner'\n+\n+SILENCED_SYSTEM_CHECKS = ['1_7.W001']"},"message":{"kind":"string","value":"Silenced a system check."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1470,"cells":{"diff":{"kind":"string","value":"diff --git a/setup.py b/setup.py\nindex .. 100644\n--- a/setup.py\n+++ b/setup.py\n@@ -29,7 +29,7 @@ def fontbakery_scripts():\n \n setup(\n name=\"fontbakery\",\n- version='0.3.1',\n+ version='0.3.2',\n url='https://github.com/googlefonts/fontbakery/',\n description='Font Bakery is a set of command-line tools'\n ' for testing font projects',"},"message":{"kind":"string","value":"version bump in preparation for an imminent pypi release"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1471,"cells":{"diff":{"kind":"string","value":"diff --git a/setup.py b/setup.py\nindex .. 100644\n--- a/setup.py\n+++ b/setup.py\n@@ -12,7 +12,7 @@ def read(fname):\n \n install_requires = [\n 'Django>=1.4',\n- 'PyScss>=1.2.0,<=1.3.0',\n+ 'PyScss>=1.2.0,<1.3.0',\n ]\n tests_require = [\n 'Pillow',"},"message":{"kind":"string","value":"Fix bad pinning. We don't want to install Really fixes #."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1472,"cells":{"diff":{"kind":"string","value":"diff --git a/dedupe/core.py b/dedupe/core.py\nindex .. 100644\n--- a/dedupe/core.py\n+++ b/dedupe/core.py\n@@ -32,13 +32,14 @@ def randomPairs(n_records, sample_size):\n n = int(n_records * (n_records - 1) / 2)\n \n if sample_size >= n :\n- random_pairs = numpy.arange(n)\n+ random_pairs = numpy.arange(n, dtype='uint')\n else:\n- random_pairs = numpy.array(random.sample(range(n), sample_size))\n+ random_pairs = numpy.array(random.sample(range(n), sample_size),\n+ dtype='uint')\n \n b = 1 - 2 * n_records\n \n- i = numpy.floor((-b - numpy.sqrt(b ** 2 - 8 * random_pairs)) / 2).astype('uint')\n+ i = numpy.floor((-b - 2 * numpy.sqrt(2 * (n - random_pairs) + 0.25)) / 2).astype('uint')\n j = numpy.rint(random_pairs + i * (b + i + 2) / 2 + 1).astype('uint')\n \n return zip(i, j)"},"message":{"kind":"string","value":"try to avoid numeric overflow in randomPairs"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1473,"cells":{"diff":{"kind":"string","value":"diff --git a/rstcheck/__init__.py b/rstcheck/__init__.py\nindex .. 100755\n--- a/rstcheck/__init__.py\n+++ b/rstcheck/__init__.py\n@@ -397,6 +397,8 @@ def _get_directives_and_roles_from_sphinx() -> typing.Tuple[typing.List[str], ty\n sphinx_roles += list(\n sphinx.application.docutils.roles._roles # pylint: disable=protected-access\n )\n+ sphinx_directives.remove(\"code\")\n+ sphinx_directives.remove(\"code-block\")\n \n return (sphinx_directives, sphinx_roles)"},"message":{"kind":"string","value":"remove code and code-block from sphinx ignore list"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1474,"cells":{"diff":{"kind":"string","value":"diff --git a/tests/attrel/check_attrel.py b/tests/attrel/check_attrel.py\nindex .. 100755\n--- a/tests/attrel/check_attrel.py\n+++ b/tests/attrel/check_attrel.py\n@@ -12,6 +12,7 @@ def check_attrel(attrel_cls, filename, max_discrep_percentage,\n max_errors=0, verbose=False):\n reader = csv.reader(open(filename))\n attrel = attrel_cls()\n+ context_params = set(AttRelContext.__slots__)\n \n linenum = 1\n errors = 0\n@@ -41,7 +42,7 @@ def check_attrel(attrel_cls, filename, max_discrep_percentage,\n damping = float(value)\n elif param == 'component_type':\n component_type = getattr(const.IMC, value)\n- elif hasattr(context, param):\n+ elif param in context_params:\n # value is context object attribute\n if param == 'site_vs30type':\n value = getattr(const.VS30T, value)"},"message":{"kind":"string","value":"tests/attrel/check_attrel: better way to check if csv column has value from calculation context"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1475,"cells":{"diff":{"kind":"string","value":"diff --git a/builder.py b/builder.py\nindex .. 100755\n--- a/builder.py\n+++ b/builder.py\n@@ -1585,6 +1585,11 @@ class Sample(Pmag_object):\n if self.site.er_data['site_' + dtype]:\n value = self.site.er_data['site_' + dtype]\n self.er_data['sample_' + dtype] = value\n+ for dtype in ['_lat', '_lon']:\n+ if 'sample' + dtype in self.er_data.keys():\n+ if not self.er_data['sample' + dtype]:\n+ if 'site' + dtype in self.site.er_data.keys():\n+ self.er_data['sample' + dtype] = self.site.er_data['site' + dtype]"},"message":{"kind":"string","value":"automatically grab site latitudes/longitudes and apply them to samples if the samples don't have latitudes/longitudes of their own"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1476,"cells":{"diff":{"kind":"string","value":"diff --git a/test/testutils.py b/test/testutils.py\nindex .. 100644\n--- a/test/testutils.py\n+++ b/test/testutils.py\n@@ -28,7 +28,7 @@ def gen_random_name():\n \n \n def gen_random_version():\n- return random.choice(string.digits) + '.' + random.choice(string.digits)\n+ return ''.join(random.choice(string.digits)for _ in range(10)) + '.' + ''.join(random.choice(string.digits) for _ in range(10))\n \n \n def assert_raises_valueerror(api, function, **kwargs):"},"message":{"kind":"string","value":"ensure that the chance of versions collision is mitigated"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1477,"cells":{"diff":{"kind":"string","value":"diff --git a/allennlp/commands/main.py b/allennlp/commands/main.py\nindex .. 100644\n--- a/allennlp/commands/main.py\n+++ b/allennlp/commands/main.py\n@@ -5,8 +5,11 @@ import allennlp.commands.serve as serve\n import allennlp.commands.predict as predict\n import allennlp.commands.train as train\n import allennlp.commands.evaluate as evaluate\n+from allennlp.common.checks import ensure_pythonhashseed_set\n \n def main(raw_args: Sequence[str]) -> None:\n+ ensure_pythonhashseed_set()\n+\n parser = argparse.ArgumentParser(description=\"Run AllenNLP\", usage='%(prog)s [command]')\n subparsers = parser.add_subparsers(title='Commands', metavar='')"},"message":{"kind":"string","value":"Add check for PYTHONHASHSEED to run. (#)"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1478,"cells":{"diff":{"kind":"string","value":"diff --git a/rest_framework_nested/__init__.py b/rest_framework_nested/__init__.py\nindex .. 100644\n--- a/rest_framework_nested/__init__.py\n+++ b/rest_framework_nested/__init__.py\n@@ -12,3 +12,4 @@\n # See the License for the specific language governing permissions and\n # limitations under the License.\n \n+__version__ = '0.12.0'"},"message":{"kind":"string","value":"__init__/__version__ is the new version place"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1479,"cells":{"diff":{"kind":"string","value":"diff --git a/eth_utils/encoding.py b/eth_utils/encoding.py\nindex .. 100644\n--- a/eth_utils/encoding.py\n+++ b/eth_utils/encoding.py\n@@ -1,10 +1,6 @@\n-import math\n-\n-\n def int_to_big_endian(value):\n- byte_length = max(math.ceil(value.bit_length() / 8), 1)\n- return value.to_bytes(byte_length, byteorder='big')\n+ return value.to_bytes((value.bit_length() + 7) // 8 or 1, 'big')\n \n \n def big_endian_to_int(value):\n- return int.from_bytes(value, byteorder='big')\n+ return int.from_bytes(value, 'big')"},"message":{"kind":"string","value":"Faster int_to_big_endian implementation"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1480,"cells":{"diff":{"kind":"string","value":"diff --git a/horizon/loaders.py b/horizon/loaders.py\nindex .. 100644\n--- a/horizon/loaders.py\n+++ b/horizon/loaders.py\n@@ -29,7 +29,7 @@ class TemplateLoader(BaseLoader):\n is_usable = True\n \n def get_template_sources(self, template_name):\n- bits = template_name.split(os.path.sep, 2)\n+ bits = template_name.split('/', 2)\n if len(bits) == 3:\n dash_name, panel_name, remainder = bits\n key = os.path.join(dash_name, panel_name)"},"message":{"kind":"string","value":"Always split template names on forward slash Template names in Django always use forward slash, even on Windows, so to parse them properly, split on forward slash instead of os.path.sep which is '\\\\' on Windows. Change-Id: Ibbfaa1efedcbac5dbe Closes-Bug: #"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1481,"cells":{"diff":{"kind":"string","value":"diff --git a/edc_permissions/permissions_inspector.py b/edc_permissions/permissions_inspector.py\nindex .. 100644\n--- a/edc_permissions/permissions_inspector.py\n+++ b/edc_permissions/permissions_inspector.py\n@@ -169,6 +169,12 @@ class PermissionsInspector:\n return {'unexpected': [x for x in existing if x not in defaults],\n 'missing': [x for x in defaults if x not in existing]}\n \n+ def remove_codenames(self, group_name=None, codenames=None):\n+ group = self.group_model_cls().objects.get(name=group_name)\n+ deleted = group.permissions.filter(\n+ group__name=group_name, codename__in=codenames).delete()\n+ return deleted\n+\n def validate_pii(self):\n \"\"\"Ensure PII codenames not in any other group.\n \"\"\""},"message":{"kind":"string","value":"add method to inspector to delete unused codenames"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1482,"cells":{"diff":{"kind":"string","value":"diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py\nindex .. 100644\n--- a/salt/modules/zypper.py\n+++ b/salt/modules/zypper.py\n@@ -195,6 +195,7 @@ class Zypper(object):\n \n # Zypper call will stuck here waiting, if another zypper hangs until forever.\n # However, Zypper lock needs to be always respected.\n+ was_blocked = False\n while True:\n log.debug(\"Calling Zypper: \" + ' '.join(self.__cmd))\n self.__call_result = __salt__['cmd.run_all'](self.__cmd, **kwargs)\n@@ -224,7 +225,13 @@ class Zypper(object):\n __salt__['event.fire_master'](data, self.TAG_BLOCKED)\n log.debug(\"Fired a Zypper blocked event to the master with the data: {0}\".format(str(data)))\n time.sleep(5)\n+ if not was_blocked:\n+ was_blocked = True\n \n+ if was_blocked:\n+ __salt__['event.fire_master']({'success': not len(self.error_msg),\n+ 'info': self.error_msg or 'Zypper has been released'},\n+ self.TAG_RELEASED)\n if self.error_msg:\n raise CommandExecutionError('Zypper command failure: {0}'.format(self.error_msg))"},"message":{"kind":"string","value":"Fire an event about released Zypper with its result"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1483,"cells":{"diff":{"kind":"string","value":"diff --git a/userena/urls.py b/userena/urls.py\nindex .. 100644\n--- a/userena/urls.py\n+++ b/userena/urls.py\n@@ -102,7 +102,7 @@ urlpatterns = patterns('',\n name='userena_profile_edit'),\n \n # View profiles\n- url(r'^(?P(?!signout|signup|signin)[\\@\\.\\w-]+)/$',\n+ url(r'^(?P(?!(signout|signup|signin)/)[\\@\\.\\w-]+)/$',\n userena_views.profile_detail,\n name='userena_profile_detail'),\n url(r'^page/(?P[0-9]+)/$',"},"message":{"kind":"string","value":"urls: update negative lookahead assertion in userena_profile_detail pattern Fixes # by allowing user profile detail urls to contain username **starting** with 'signup', 'signout', or 'signin' strings"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1484,"cells":{"diff":{"kind":"string","value":"diff --git a/napalm/junos/junos.py b/napalm/junos/junos.py\nindex .. 100644\n--- a/napalm/junos/junos.py\n+++ b/napalm/junos/junos.py\n@@ -948,7 +948,6 @@ class JunOSDriver(NetworkDriver):\n 'type': py23_compat.text_type,\n 'apply_groups': list,\n 'remove_private_as': bool,\n- 'cluster': py23_compat.text_type,\n 'multipath': bool,\n 'multihop_ttl': int\n }"},"message":{"kind":"string","value":"remove cluster from the output Conflicts: \tnapalm/junos/junos.py"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1485,"cells":{"diff":{"kind":"string","value":"diff --git a/matplotlib2tikz.py b/matplotlib2tikz.py\nindex .. 100644\n--- a/matplotlib2tikz.py\n+++ b/matplotlib2tikz.py\n@@ -1198,6 +1198,12 @@ def _draw_path(obj, data, path,\n ):\n '''Adds code for drawing an ordinary path in PGFPlots (TikZ).\n '''\n+ if 'draw=white' in draw_options and 'fill opacity=0' in draw_options:\n+ # For some reasons, matplotlib sometimes adds void paths with only\n+ # consist of one point, are white, and have no opacity. To not let\n+ # those clutter the output TeX file, bail out here.\n+ return data, ''\n+\n nodes = []\n prev = None\n for vert, code in path.iter_segments():"},"message":{"kind":"string","value":"don't convert void paths introduced by matplotlib"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1486,"cells":{"diff":{"kind":"string","value":"diff --git a/sandstone/scripts/run_client_tests.py b/sandstone/scripts/run_client_tests.py\nindex .. 100644\n--- a/sandstone/scripts/run_client_tests.py\n+++ b/sandstone/scripts/run_client_tests.py\n@@ -36,6 +36,7 @@ dep_list = [\n 'ui.router',\n 'sandstone.acemodes',\n 'ui.bootstrap',\n+ 'sandstone.broadcastservice'\n ]\n \n for spec in settings.APP_SPECIFICATIONS:"},"message":{"kind":"string","value":"Added broadcastservice to client test dep list."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1487,"cells":{"diff":{"kind":"string","value":"diff --git a/scripts/experiments/run_ace2.py b/scripts/experiments/run_ace2.py\nindex .. 100755\n--- a/scripts/experiments/run_ace2.py\n+++ b/scripts/experiments/run_ace2.py\n@@ -622,13 +622,20 @@ class SrlExpParamsRunner(ExpParamsRunner):\n inference=\"BP\",\n cacheType=\"NONE\",\n useRelationsForNePairs=False)\n+ # Use best hyperparameters from ace-pm13_014.\n+ defaults.update(adaGradInitialSumSquares=1,\n+ embScalar=1,\n+ adaGradEta=0.1,\n+ l2variance=400000,\n+ sgdAutoSelectLr=False,\n+ sgdNumPasses=20,\n+ )\n defaults.set_incl_name(\"testPredOut\", False)\n \n- # Train on all domains.\n- train = get_annotation_as_train(ace05_all_nobctest) # TODO: This should be all domains\n- dev = ReExpParams(propTrainAsDev=0.05)\n- test = get_annotation_as_test(ace05_bc_test)\n- exp_train = defaults + train + dev + test\n+ # Train on all domains except bc_test. Use bc_test as dev.\n+ train = get_annotation_as_train(ace05_all_nobctest)\n+ dev = get_annotation_as_dev(ace05_bc_test)\n+ exp_train = defaults + train + dev\n exp_train.update(pipeOut=\"pipe.binary.gz\")\n root.add_dependent(exp_train)"},"message":{"kind":"string","value":"Updating hyperparams for ace-agiga2"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1488,"cells":{"diff":{"kind":"string","value":"diff --git a/lib/autokey/scripting/engine.py b/lib/autokey/scripting/engine.py\nindex .. 100644\n--- a/lib/autokey/scripting/engine.py\n+++ b/lib/autokey/scripting/engine.py\n@@ -19,7 +19,7 @@ from collections.abc import Iterable\n \n from typing import Tuple, Optional, List, Union\n \n-from autokey import model, iomediator\n+from autokey import model, iomediator, configmanager\n \n \n class Engine:\n@@ -217,6 +217,12 @@ Folders created within temporary folders must themselves be set temporary\")\n self.check_abbreviation_unique(abbreviations)\n if not replaceExistingHotkey:\n self.check_hotkey_unique(hotkey)\n+ else:\n+ existing_item = self.get_item_with_hotkey(hotkey)\n+ if not isinstance(existing_item, configmanager.configmanager.GlobalHotkey):\n+ existing_item.unset_hotkey()\n+\n+\n \n self.monitor.suspend()\n try:\n@@ -459,6 +465,13 @@ Folders created within temporary folders must themselves be set temporary\")\n if not self.configManager.check_hotkey_unique(modifiers, hotkey[1], None, None)[0]:\n raise ValueError(\"The specified hotkey and modifier combination is already in use: {}\".format(hotkey))\n \n+ def get_item_with_hotkey(self, hotkey):\n+ if not hotkey:\n+ return\n+ modifiers = sorted(hotkey[0])\n+ return self.configManager.get_item_with_hotkey(modifiers, hotkey[1])\n+\n+\n \n def validateAbbreviations(abbreviations):\n if abbreviations is None:"},"message":{"kind":"string","value":"Allow new phrases to override existing hotkey"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1489,"cells":{"diff":{"kind":"string","value":"diff --git a/fandjango/middleware.py b/fandjango/middleware.py\nindex .. 100644\n--- a/fandjango/middleware.py\n+++ b/fandjango/middleware.py\n@@ -97,6 +97,8 @@ class FacebookMiddleware():\n user.oauth_token.save()\n \n user.save()\n+ finally:\n+ user.oauth_token.extend()\n \n request.facebook.user = user"},"message":{"kind":"string","value":"Extend the OAuth token for new and existing users alike"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1490,"cells":{"diff":{"kind":"string","value":"diff --git a/turbolift/utils.py b/turbolift/utils.py\nindex .. 100644\n--- a/turbolift/utils.py\n+++ b/turbolift/utils.py\n@@ -580,7 +580,7 @@ def restor_perms(local_file, headers):\n \n os.chmod(\n local_file,\n- int(headers['x-object-meta-perms'])\n+ int(headers['x-object-meta-perms'], 8)\n )\n \n # Lookup user and group name and restore them."},"message":{"kind":"string","value":"change int to oct for perms restor"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1491,"cells":{"diff":{"kind":"string","value":"diff --git a/setup.py b/setup.py\nindex .. 100644\n--- a/setup.py\n+++ b/setup.py\n@@ -35,7 +35,6 @@ setup(\n url='https://github.com/DMSC-Instrument-Data/lewis',\n author='Michael Hart, Michael Wedel, Owen Arnold',\n author_email='Michael Hart , '\n- 'Michael Wedel , '\n 'Owen Arnold ',\n license='GPL v3',\n classifiers=["},"message":{"kind":"string","value":"Removed email as it is no longer valid"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1492,"cells":{"diff":{"kind":"string","value":"diff --git a/panels/_version.py b/panels/_version.py\nindex .. 100644\n--- a/panels/_version.py\n+++ b/panels/_version.py\n@@ -1,2 +1,2 @@\n # Versions compliant with PEP 440 https://www.python.org/dev/peps/pep-0440\n-__version__ = \"0.0.18\"\n+__version__ = \"0.0.19\""},"message":{"kind":"string","value":"Update version number to "},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1493,"cells":{"diff":{"kind":"string","value":"diff --git a/Lib/glyphsLib/classes.py b/Lib/glyphsLib/classes.py\nindex .. 100755\n--- a/Lib/glyphsLib/classes.py\n+++ b/Lib/glyphsLib/classes.py\n@@ -1734,6 +1734,17 @@ class GSComponent(GSBase):\n self.transform = [affine[0], affine[1], affine[3], affine[4], affine[2], affine[5]]\n \n @property\n+ def componentName(self):\n+ return self.name\n+ @componentName.setter\n+ def componentName(self, value):\n+ self.name = value\n+\n+ @property\n+ def component(self):\n+ return self.parent.parent.parent.glyphs[self.name]\n+\n+ @property\n def layer(self):\n return self.parent.parent.parent.glyphs[self.name].layers[self.parent.layerId]"},"message":{"kind":"string","value":"Implemented GSComponent.component and .componentName"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1494,"cells":{"diff":{"kind":"string","value":"diff --git a/dividebatur/senatecount.py b/dividebatur/senatecount.py\nindex .. 100755\n--- a/dividebatur/senatecount.py\n+++ b/dividebatur/senatecount.py\n@@ -48,6 +48,11 @@ class SenateCountPost2015:\n return Ticket((PreferenceFlow(tuple(prefs)), ))\n \n def btl_flow(form):\n+ if self.s282_candidates:\n+ # s282: only 273(7) to (30) apply, so don't exclude informal BTL votes\n+ min_prefs = 1\n+ else:\n+ min_prefs = 6\n by_pref = {}\n for pref, candidate_id in zip(form, self.flows.btl):\n if pref is None:\n@@ -66,7 +71,7 @@ class SenateCountPost2015:\n continue\n prefs.append((len(prefs) + 1, candidate_id))\n # must have unique prefs for 1..6, or informal\n- if len(prefs) < 6:\n+ if len(prefs) < min_prefs:\n return None\n return Ticket((PreferenceFlow(tuple(prefs)), ))"},"message":{"kind":"string","value":"fix s implementation talking with deanashley on twitter, it seems BTL ballots shouldn't be excluded if informal (no clear 1-6) after the renumbering is applied. s actually skips the exclusion of informal ballots step. (thanks Dean!)"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1495,"cells":{"diff":{"kind":"string","value":"diff --git a/marathon/models/app.py b/marathon/models/app.py\nindex .. 100644\n--- a/marathon/models/app.py\n+++ b/marathon/models/app.py\n@@ -94,7 +94,7 @@ class MarathonApp(MarathonResource):\n tasks_healthy=None, task_kill_grace_period_seconds=None, tasks_unhealthy=None, upgrade_strategy=None,\n unreachable_strategy=None, uris=None, user=None, version=None, version_info=None,\n ip_address=None, fetch=None, task_stats=None, readiness_checks=None,\n- readiness_check_results=None, secrets=None, port_definitions=None, residency=None, gpus=None):\n+ readiness_check_results=None, secrets=None, port_definitions=None, residency=None, gpus=None, networks=None):\n \n # self.args = args or []\n self.accepted_resource_roles = accepted_resource_roles\n@@ -184,6 +184,8 @@ class MarathonApp(MarathonResource):\n else MarathonAppVersionInfo.from_json(version_info)\n self.task_stats = task_stats if (isinstance(task_stats, MarathonTaskStats) or task_stats is None) \\\n else MarathonTaskStats.from_json(task_stats)\n+ self.networks = networks\n+ \n \n def add_env(self, key, value):\n self.env[key] = value"},"message":{"kind":"string","value":"Adding the key \"networks\" in the JSON received of marathon"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1496,"cells":{"diff":{"kind":"string","value":"diff --git a/artist/multi_plot.py b/artist/multi_plot.py\nindex .. 100644\n--- a/artist/multi_plot.py\n+++ b/artist/multi_plot.py\n@@ -31,7 +31,7 @@ class MultiPlot:\n self.subplots = []\n for i in range(rows):\n for j in range(columns):\n- self.subplots.append(SubPlot(i, j))\n+ self.subplots.append(SubPlotContainer(i, j))\n \n def set_empty(self, row, column):\n subplot = self.get_subplot_at(row, column)\n@@ -253,7 +253,7 @@ class MultiPlot:\n return 'normal', 'normal'\n \n \n-class SubPlot:\n+class SubPlotContainer:\n def __init__(self, row, column):\n self.row = row\n self.column = column"},"message":{"kind":"string","value":"Renamed SubPlot -> SubPlotContainer"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1497,"cells":{"diff":{"kind":"string","value":"diff --git a/contentfiles/storage.py b/contentfiles/storage.py\nindex .. 100644\n--- a/contentfiles/storage.py\n+++ b/contentfiles/storage.py\n@@ -44,4 +44,5 @@ class PrivateStorage(ContentFilesMixin, LibCloudPrivateStorage):\n def url(self, name):\n protocol = 'https' if CONTENTFILES_SSL else 'http'\n return '%s://%s/%s/%s/%s' % (\n- protocol, self.driver.connection.host, self.bucket, self.path_name, name)\n+ protocol, self.driver.connection.host, self.bucket, self.path_name,\n+ urllib.parse.quote(name))"},"message":{"kind":"string","value":"Didn't quote the private storage file name"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1498,"cells":{"diff":{"kind":"string","value":"diff --git a/cassandra/cluster.py b/cassandra/cluster.py\nindex .. 100644\n--- a/cassandra/cluster.py\n+++ b/cassandra/cluster.py\n@@ -317,7 +317,7 @@ class Cluster(object):\n \n if not self._is_setup:\n self.load_balancing_policy.populate(\n- weakref.proxy(self), self.metadata.getAllHosts())\n+ weakref.proxy(self), self.metadata.all_hosts())\n self._is_setup = True\n \n if self.control_connection:"},"message":{"kind":"string","value":"Fix bad load balancing policy population"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1499,"cells":{"diff":{"kind":"string","value":"diff --git a/setup.py b/setup.py\nindex .. 100644\n--- a/setup.py\n+++ b/setup.py\n@@ -1,4 +1,5 @@\n #!/usr/bin/env python\n+# -*- coding: utf-8 -*-\n \n '''\n Execution:"},"message":{"kind":"string","value":"should i add unicode support to setup.py? sure why not? what could possibly go wrong?"},"diff_languages":{"kind":"string","value":"py"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":14,"numItemsPerPage":100,"numTotalItems":278877,"offset":1400,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1NjcxNjIyMCwic3ViIjoiL2RhdGFzZXRzL2hrczM1MGQvZ2l0LWRpZmYtdG8tY29tbWl0LWdlbW1hLTMtMjcwbSIsImV4cCI6MTc1NjcxOTgyMCwiaXNzIjoiaHR0cHM6Ly9odWdnaW5nZmFjZS5jbyJ9.2smZmXfsMoy4LrlgsRvcMBAHqpwImGdnuDLv0QUzzlvB_npb_90YbJTQYiaq8uU-Vhon1koCQLevGxwidcZBBw","displayUrls":true},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/tests/test_collections.py b/tests/test_collections.py index <HASH>..<HASH> 100644 --- a/tests/test_collections.py +++ b/tests/test_collections.py @@ -91,7 +91,7 @@ class TestCollection: with pytest.raises(Exception): api.Event.create(b, '2cd64f22-2222-44f5-bc45-53440af38cec', ( 'BEGIN:VCALENDAR\r\nVERSION:2.0\r\nPRODID:+//Yo\r\nBEGIN:VEVENT\r\nDTSTAMP:20170324T164' + - '747Z\r\nUID:2cd64f22-b51c-44f5-bc45-53440af38cec\r\nDTSTART;VALUE\u003dDATE:20170324' + + '747Z\r\nUID:2cd64f22-2222-44f5-bc45-53440af38cec\r\nDTSTART;VALUE\u003dDATE:20170324' + '\r\nDTEND;VALUE\u003dDATE:20170325\r\nSUMMARY:Feed cat\r\nSTATUS:CONFIRMED\r\nTRANSP:' + 'TRANSPARENT\r\nEND:VEVENT\r\nEND:VCALENDAR\r\n'))
Test: Fix uid inconsistency.
py
diff --git a/src/toil/__init__.py b/src/toil/__init__.py index <HASH>..<HASH> 100644 --- a/src/toil/__init__.py +++ b/src/toil/__init__.py @@ -27,6 +27,23 @@ def toilPackageDirPath(): assert result.endswith('/toil') return result -def resolveEntryPoint(entryPoint): - return os.path.join(os.path.dirname(sys.executable),entryPoint) +def resolveEntryPoint(entryPoint): + """ + Returns the path to the given entry point (see setup.py) that *should* work on a worker. The + return value may be an absolute or a relative path. + """ + if hasattr(sys, 'real_prefix'): + # Inside a virtualenv we will use absolute paths to the entrypoints. For clusters this + # means that if Toil is installed in a virtualenv on the leader, it must be installed in + # a virtualenv located at the same path on the worker. + path = os.path.join(os.path.dirname(sys.executable), entryPoint) + assert os.path.isfile(path) + assert os.access(path, os.X_OK) + return path + else: + # Outside a virtualenv it is hard to predict where the entry points got installed. It is + # the reponsibility of the user to ensure that they are present on PATH and point to the + # correct version of Toil. This is still better than an absolute path because it gives + # the user control over Toil's location on both leader and workers. + return entryPoint
Don't use absolute paths for entry points outside of virtualenvs (resolves #<I>)
py
diff --git a/distribution/bin/tag-missing-milestones.py b/distribution/bin/tag-missing-milestones.py index <HASH>..<HASH> 100755 --- a/distribution/bin/tag-missing-milestones.py +++ b/distribution/bin/tag-missing-milestones.py @@ -27,6 +27,7 @@ if len(sys.argv) != 5: sys.stderr.write(" It is also necessary to set a GIT_TOKEN environment variable containing a personal access token.\n") sys.exit(1) +expected_apache_html_url_prefix = "https://github.com/apache/incubator-druid/pull/" github_username = sys.argv[1] previous_release_commit = sys.argv[2] @@ -47,6 +48,9 @@ for sha in all_commits.splitlines(): print("Retrieved {} pull requests associated to commit {}".format(len(pull_requests), sha)) for pr in pull_requests: pr_number = pr['number'] + if expected_apache_html_url_prefix not in pr['html_url']: + print("Skipping Pull Request {} associatd with commit {} since the PR is not from the Apache repo.".format(pr_number, sha)) + continue if pr['milestone'] is None: print("Tagging Pull Request {} with milestone {}".format(pr_number, milestone)) url = "https://api.github.com/repos/apache/incubator-druid/issues/{}".format(pr_number)
Skip non-Apache repo PRs in milestone tagging script (#<I>)
py
diff --git a/esda/shape.py b/esda/shape.py index <HASH>..<HASH> 100644 --- a/esda/shape.py +++ b/esda/shape.py @@ -124,7 +124,7 @@ def isoperimetric_quotient(collection): pp = (a_d) / (a_c) = (a_d) / ((p_d / (2*\pi))^2 * \pi) = (a_d) / (p_d**2 / (4\PI)) """ - ga = _cast(ga) + ga = _cast(collection) return (4 * numpy.pi * pygeos.area(ga)) / (pygeos.measurement.length(ga) ** 2)
fix the ga/collection in ipq
py
diff --git a/teneto/classes/bids.py b/teneto/classes/bids.py index <HASH>..<HASH> 100644 --- a/teneto/classes/bids.py +++ b/teneto/classes/bids.py @@ -202,7 +202,7 @@ class TenetoBIDS: if 'weight-var' in params.keys(): if params['weight-var'] == 'from-subject-fc': fc_dir = base_dir + '/fc/' - fc = os.listdir(fc_dir) + fc = os.listdir(fc_di r) i = 0 for ff in fc: if ff.split('_fc.npy')[0] in f: @@ -273,10 +273,8 @@ class TenetoBIDS: report += '<img src=' + os.path.abspath(confound_report_figdir) + '/' + c + '.png><br><br>' report += '</body></html>' - with open(confound_report_dir + save_name + '_confoundcorr.html', 'w') as file: - file.write(report) - - file.close() + with open(confound_report_dir + save_name + '_confoundcorr.html', 'w') as file: + file.write(report) def make_functional_connectivity(self,njobs=None,returngroup=False,file_hdr=None,file_idx=None):
added possibility to supress report (fixed bug)
py
diff --git a/udata/search/__init__.py b/udata/search/__init__.py index <HASH>..<HASH> 100644 --- a/udata/search/__init__.py +++ b/udata/search/__init__.py @@ -185,7 +185,7 @@ def suggest(q, field, size=10): result = s.execute_suggest() try: return result.suggestions[0]['options'] - except IndexError: + except (IndexError, AttributeError): return []
Do not fail when no suggestion is available
py
diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate_commands/metadata.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate_commands/metadata.py index <HASH>..<HASH> 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate_commands/metadata.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate_commands/metadata.py @@ -174,6 +174,11 @@ VALID_UNIT_NAMES = { 'exacore', 'build', 'prediction', + 'watt', + 'kilowatt', + 'megawatt', + 'gigawatt', + 'terawatt', 'heap', 'volume', }
add watt unit (#<I>)
py
diff --git a/lettuce/core.py b/lettuce/core.py index <HASH>..<HASH> 100644 --- a/lettuce/core.py +++ b/lettuce/core.py @@ -298,7 +298,8 @@ class Step(object): def elsewhere(step): # actual step behavior, maybe. - This will raise error (thus halting execution of the step) if a subordinate step fails. + This will raise the error of the first failing step (thus halting + execution of the step) if a subordinate step fails. """ lines = string.split('\n') @@ -312,7 +313,7 @@ class Step(object): else: self.passed = False self.failed = True - assert not steps_failed, "Subordinate steps failed for this step." + assert not steps_failed, steps_failed[0].why.exception def run(self, ignore_case): """Runs a step, trying to resolve it on available step
When a subordinate step fails, raise the exception of that step rather than a generic 'Subordinate steps failed for this step.' message.
py
diff --git a/master/buildbot/test/unit/test_process_buildstep.py b/master/buildbot/test/unit/test_process_buildstep.py index <HASH>..<HASH> 100644 --- a/master/buildbot/test/unit/test_process_buildstep.py +++ b/master/buildbot/test/unit/test_process_buildstep.py @@ -219,11 +219,15 @@ class TestBuildStep(steps.BuildStepMixin, config.ConfigErrorsMixin, unittest.Tes d.addCallback(lambda _: self.assertTrue(called[0])) return d + @defer.inlineCallbacks def test_hideStepIf_fails(self): # 0/0 causes DivideByZeroError, which should be flagged as an exception + self._setupWaterfallTest( - lambda: 0 / 0, False, expectedResult=EXCEPTION) - return self.runStep() + lambda x, y: 0 / 0, False, expectedResult=EXCEPTION) + self.step.addLogWithFailure = mock.Mock() + yield self.runStep() + self.assertEqual(len(self.flushLoggedErrors(ZeroDivisionError)), 1) @compat.usesFlushLoggedErrors def test_hideStepIf_Callable_Exception(self):
fix hideStepIf unit tests actually test that an exception is hideStepIf will be catched in error logs
py
diff --git a/GPy/testing/examples_tests.py b/GPy/testing/examples_tests.py index <HASH>..<HASH> 100644 --- a/GPy/testing/examples_tests.py +++ b/GPy/testing/examples_tests.py @@ -29,11 +29,11 @@ def checkgrads_generator(model): """ def model_checkgrads(model): - assert model.checkgrad() is True + assert model.checkgrad() def model_instance(model): - assert model.checkgrad() is True + assert isinstance(model, GPy.core.model) def test_models(): @@ -45,7 +45,7 @@ def test_models(): print "MODULE", module_examples print "Before" print inspect.getmembers(module_examples, predicate=inspect.isfunction) - functions = [ func for func in inspect.getmembers(module_examples, predicate=inspect.isfunction) if func[0].startswith('_') is False ] + functions = [ func for func in inspect.getmembers(module_examples, predicate=inspect.isfunction) if func[0].startswith('_') is False ][::-1] print "After" print functions for example in functions: @@ -72,3 +72,4 @@ def test_models(): if __name__ == "__main__": print "Running unit tests, please be (very) patient..." + unittest.main()
Should now test all (although upon error it stops trying to generate any more)
py
diff --git a/mdf_toolbox/globus_search/search_helper.py b/mdf_toolbox/globus_search/search_helper.py index <HASH>..<HASH> 100644 --- a/mdf_toolbox/globus_search/search_helper.py +++ b/mdf_toolbox/globus_search/search_helper.py @@ -424,7 +424,7 @@ class SearchHelper(): dict: The full mapping for the index. """ return (self.__search_client.get( - "/unstable/index/{}/mapping".format(mdf_toolbox.translate_index(self.index))) + "/beta/index/{}/mapping".format(mdf_toolbox.translate_index(self.index))) ["mappings"]) # ************************************************************************************
Update for Search change - unstable features now beta
py
diff --git a/tests/test_features.py b/tests/test_features.py index <HASH>..<HASH> 100644 --- a/tests/test_features.py +++ b/tests/test_features.py @@ -814,7 +814,7 @@ def test_fourier_tempogram_invert(sr, hop_length, win_length, center, window): odf_inv = librosa.istft(tempogram, hop_length=1, center=center, window=window, length=len(odf)) - assert np.allclose(odf[sl], odf_inv[sl]) + assert np.allclose(odf_inv[sl], odf[sl]) def test_cens():
trying to fix fourier tempogram inversion test
py
diff --git a/tests/test_symmetric.py b/tests/test_symmetric.py index <HASH>..<HASH> 100644 --- a/tests/test_symmetric.py +++ b/tests/test_symmetric.py @@ -70,6 +70,17 @@ class SymmetricTests(unittest.TestCase): plaintext = symmetric.rc2_cbc_pkcs5_decrypt(key, ciphertext, iv) self.assertEqual(data, plaintext) + def test_rc2_40_encrypt_decrypt(self): + key = util.rand_bytes(5) + data = b'This is data to encrypt' + + iv, ciphertext = symmetric.rc2_cbc_pkcs5_encrypt(key, data, None) + self.assertNotEqual(data, ciphertext) + self.assertEqual(byte_cls, type(ciphertext)) + + plaintext = symmetric.rc2_cbc_pkcs5_decrypt(key, ciphertext, iv) + self.assertEqual(data, plaintext) + def test_des_encrypt_decrypt(self): key = util.rand_bytes(8) data = b'This is data to encrypt'
Add a test for RC2 with a <I> bit key
py
diff --git a/pyrogram/client/client.py b/pyrogram/client/client.py index <HASH>..<HASH> 100644 --- a/pyrogram/client/client.py +++ b/pyrogram/client/client.py @@ -1677,7 +1677,7 @@ class Client(Methods, BaseClient): file_part += 1 if progress: - progress(self, min(file_part * part_size, file_size), file_size, *progress_args) + progress(min(file_part * part_size, file_size), file_size, *progress_args) except Client.StopTransmission: raise except Exception as e: @@ -1808,7 +1808,6 @@ class Client(Methods, BaseClient): if progress: progress( - self, min(offset, file_size) if file_size != 0 else offset, @@ -1891,7 +1890,6 @@ class Client(Methods, BaseClient): if progress: progress( - self, min(offset, file_size) if file_size != 0 else offset,
Don't pass the client to progress callbacks anymore
py
diff --git a/niworkflows/interfaces/tests/test_bids.py b/niworkflows/interfaces/tests/test_bids.py index <HASH>..<HASH> 100644 --- a/niworkflows/interfaces/tests/test_bids.py +++ b/niworkflows/interfaces/tests/test_bids.py @@ -519,7 +519,7 @@ def test_DerivativesDataSink_data_dtype_source( def make_empty_nii_with_dtype(fname, dtype): Path(fname).parent.mkdir(exist_ok=True, parents=True) - size = (30, 30, 30, 10) + size = (2, 3, 4, 5) nb.Nifti1Image(np.zeros(size, dtype=dtype), np.eye(4)).to_filename(fname)
Simplify test_DerivativesDataSink_data_dtype_source
py
diff --git a/masonite/drivers/SessionCookieDriver.py b/masonite/drivers/SessionCookieDriver.py index <HASH>..<HASH> 100644 --- a/masonite/drivers/SessionCookieDriver.py +++ b/masonite/drivers/SessionCookieDriver.py @@ -85,7 +85,7 @@ class SessionCookieDriver(SessionContract, BaseDriver): bool -- If the key was deleted or not """ - data = self.__collect_data() + self.__collect_data() if self.request.get_cookie('s_{}'.format(key)): self.request.delete_cookie('s_{}'.format(key))
fix F<I> local variable 'data' is assigned to but never used
py
diff --git a/tests/test_signature_parser.py b/tests/test_signature_parser.py index <HASH>..<HASH> 100644 --- a/tests/test_signature_parser.py +++ b/tests/test_signature_parser.py @@ -79,9 +79,9 @@ class StrategyGenerator(Parser): """ if len(toks) == 5 and toks[1] == '{' and toks[4] == '}': - return strategies.dictionaries(keys=toks[2], values=toks[3]) + return strategies.dictionaries(keys=toks[2], values=toks[3], max_size=20) elif len(toks) == 2: - return strategies.lists(elements=toks[1]) + return strategies.lists(elements=toks[1], max_size=20) else: # pragma: no cover raise ValueError("unexpected tokens")
Shorten lists in generated values in strategy.
py
diff --git a/lib/svtplay_dl/utils/__init__.py b/lib/svtplay_dl/utils/__init__.py index <HASH>..<HASH> 100644 --- a/lib/svtplay_dl/utils/__init__.py +++ b/lib/svtplay_dl/utils/__init__.py @@ -117,7 +117,9 @@ def select_quality(options, streams): # Extract protocol prio, in the form of "hls,hds,http,rtmp", # we want it as a list - proto_prio = (options.stream_prio or '').split() or None + proto_prio = None + if options.stream_prio: + proto_prio = options.stream_prio.split(',') return [x for x in prio_streams(streams, protocol_prio=proto_prio)
select_quality: fix argument parsing Instead of parsing the argument to --stream-prio as a comma separated listed, it was accidentally handled as a space separated list.
py
diff --git a/python/jsbeautifier/unpackers/packer.py b/python/jsbeautifier/unpackers/packer.py index <HASH>..<HASH> 100644 --- a/python/jsbeautifier/unpackers/packer.py +++ b/python/jsbeautifier/unpackers/packer.py @@ -80,6 +80,7 @@ class Unbaser(object): """Functor for a given base. Will efficiently convert strings to natural numbers.""" ALPHABET = { + 53 : '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQ', 59 : '0123456789abcdefghijklmnopqrstuvwABCDEFGHIJKLMNOPQRSTUVWXYZ', 62 : '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ', 95 : (' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ'
More radix stuff Not much idea where all the radixes come from (dean.edwards.name doesn't seem to make them?) probably will need to extract radix params from the actual code.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -17,6 +17,5 @@ setup( # Any requirements here, e.g. "Django >= 1.1.1" install_requires=[ 'django', - 'django-adminlte2' ], )
Removing errant requirements in setup.py
py
diff --git a/riak/test_server.py b/riak/test_server.py index <HASH>..<HASH> 100644 --- a/riak/test_server.py +++ b/riak/test_server.py @@ -125,6 +125,7 @@ class TestServer(object): def prepare(self): if not self._prepared: + self.touch_ssl_distribution_args() self.create_temp_directories() self._riak_script = os.path.join(self._temp_bin, "riak") self.write_riak_script() @@ -243,6 +244,14 @@ class TestServer(object): app_config.write(erlang_config(self.app_config)) app_config.write(".") + def touch_ssl_distribution_args(self): + # To make sure that the ssl_distribution.args file is present, + # the control script in the source node has to have been run at + # least once. Running the `chkconfig` command is innocuous + # enough to accomplish this without other side-effects. + script = os.path.join(self.bin_dir, "riak") + Popen([script, "chkconfig"]).wait() + def _kv_backend(self): return self.app_config["riak_kv"]["storage_backend"]
Touch the ssl_distribution.args_file by running `riak chkconfig`. Closes #<I>.
py
diff --git a/harvesters/core.py b/harvesters/core.py index <HASH>..<HASH> 100755 --- a/harvesters/core.py +++ b/harvesters/core.py @@ -327,6 +327,7 @@ class Component2D(ComponentBase): # if self._part: count = self._part.data_size + count //= component_per_bytes data_offset = self._part.data_offset else: count = self.width * self.height @@ -336,7 +337,7 @@ class Component2D(ComponentBase): # Convert the Python's built-in bytes array to a Numpy array. self._data = np.frombuffer( self._buffer.raw_buffer, - count=count // component_per_bytes, + count=count, dtype=dtype, offset=data_offset )
Resolve issue #<I>
py
diff --git a/command/build_ext.py b/command/build_ext.py index <HASH>..<HASH> 100644 --- a/command/build_ext.py +++ b/command/build_ext.py @@ -146,8 +146,7 @@ class build_ext (Command): # Setup the CCompiler object that we'll use to do all the # compiling and linking - self.compiler = new_compiler (plat=os.environ.get ('PLAT'), - verbose=self.verbose, + self.compiler = new_compiler (verbose=self.verbose, dry_run=self.dry_run, force=self.force) if self.include_dirs is not None:
Took out what looks like old debugging code that probably should never have been checked in: was passing the PLAT environment variable as the 'plat' argument to 'new_compiler()'.
py
diff --git a/jose/jwt.py b/jose/jwt.py index <HASH>..<HASH> 100644 --- a/jose/jwt.py +++ b/jose/jwt.py @@ -10,6 +10,7 @@ from six import string_types from jose import jws +from .exceptions import JWSError from .exceptions import JWTClaimsError from .exceptions import JWTError from .exceptions import ExpiredSignatureError @@ -112,12 +113,14 @@ def decode(token, key, algorithms=None, options=None, audience=None, issuer=None defaults.update(options) verify_signature = defaults.get('verify_signature', True) - payload = jws.verify(token, key, algorithms, verify=verify_signature) + + try: + payload = jws.verify(token, key, algorithms, verify=verify_signature) + except JWSError as e: + raise JWTError(e) try: claims = json.loads(payload.decode('utf-8')) - except (TypeError, binascii.Error): - raise JWTError('Invalid payload padding') except ValueError as e: raise JWTError('Invalid payload string: %s' % e)
Catch `JWSError`s in `jwt.decode()` So far exceptions raised in `jws.verify()` weren't caught in the above function, which led to it raising (undocumented) exceptions from the underlying module. This commit transforms said exceptions. This includes cases of invalid payload padding, error handling for which had previously been attached to the `json.loads()` call.
py
diff --git a/Lib/extractor/formats/ttx.py b/Lib/extractor/formats/ttx.py index <HASH>..<HASH> 100644 --- a/Lib/extractor/formats/ttx.py +++ b/Lib/extractor/formats/ttx.py @@ -1,12 +1,12 @@ from extractor.formats.opentype import extractOpenTypeInfo, extractOpenTypeGlyphs, extractOpenTypeKerning def isTTX(pathOrFile): - from fontTools.ttLib import TTFont, TTLibError + from fontTools.ttLib import TTFont try: font = TTFont() font.importXML(pathOrFile) del font - except TTLibError: + except Exception: return False return True
import xml can raise all sort of exceptions apart from TTLibError
py
diff --git a/mautrix/bridge/user.py b/mautrix/bridge/user.py index <HASH>..<HASH> 100644 --- a/mautrix/bridge/user.py +++ b/mautrix/bridge/user.py @@ -84,11 +84,14 @@ class BaseUser(ABC): current_dms = {} if replace: # Filter away all existing DM statuses with bridge users - current_dms = {user: rooms for user, rooms in current_dms.items() - if not self.bridge.is_bridge_ghost(user)} + filtered_dms = {user: rooms for user, rooms in current_dms.items() + if not self.bridge.is_bridge_ghost(user)} + else: + filtered_dms = current_dms # Add DM statuses for all rooms in our database - current_dms.update(dms) - await puppet.intent.set_account_data(EventType.DIRECT, current_dms) + new_dms = {**filtered_dms, **dms} + if current_dms != new_dms: + await puppet.intent.set_account_data(EventType.DIRECT, new_dms) def _track_metric(self, metric: Gauge, value: bool) -> None: if self._metric_value[metric] != value:
Only send new m.direct account data if something changed
py
diff --git a/py3status/modules/imap.py b/py3status/modules/imap.py index <HASH>..<HASH> 100644 --- a/py3status/modules/imap.py +++ b/py3status/modules/imap.py @@ -189,6 +189,9 @@ class Py3status: socket.write(b'DONE\r\n') # important! response = socket.read(4096).decode(encoding='ascii') expected_response = (command_tag + b' OK Idle completed').decode(encoding='ascii') + if response.lower().startswith('* '.lower()): # '* OK Still here', mostly + # sometimes, more messages come in between reading and DONEing; so read them again + response = socket.read(4096).decode(encoding='ascii') if not response.lower().startswith(expected_response.lower()): raise imaplib.IMAP4.abort("While terminating IDLE: " + response)
catch 'OK still there' (_now_ i'm done) when a change comes in, we send 'DONE', but sometimes more messages or the heartbeat come through right between those statements. then, we'll read again as to not fill up the logs with unnecessary 'recoverable errors'
py
diff --git a/photutils/aperture_core.py b/photutils/aperture_core.py index <HASH>..<HASH> 100644 --- a/photutils/aperture_core.py +++ b/photutils/aperture_core.py @@ -25,6 +25,7 @@ from .utils.wcs_helpers import (skycoord_to_pixel_scale_angle, assert_angle, from astropy import __version__ as astropy_version if version.LooseVersion(astropy_version) > version.LooseVersion('1.0'): from astropy.wcs.utils import skycoord_to_pixel + from astropy.nddata import support_nddata skycoord_to_pixel_mode = 'all' else: from .extern.wcs_utils import skycoord_to_pixel @@ -1147,6 +1148,7 @@ class RectangularAnnulus(PixelAperture): return flux +@support_nddata def aperture_photometry(data, apertures, unit=None, wcs=None, error=None, effective_gain=None, mask=None, method='exact', subpixels=5, pixelwise_error=True):
decorating aperture_photometry() with support_nddata
py
diff --git a/msvc9compiler.py b/msvc9compiler.py index <HASH>..<HASH> 100644 --- a/msvc9compiler.py +++ b/msvc9compiler.py @@ -292,7 +292,6 @@ def query_vcvarsall(version, arch="x86"): result[key] = removeDuplicates(value) finally: - popen.stdin.close() popen.stdout.close() popen.stderr.close()
Merged revisions <I> via svnmerge from svn+ssh://<EMAIL>/python/branches/py3k ........ r<I> | eric.araujo | <I>-<I>-<I> <I>:<I>:<I> <I> (ven., <I> nov. <I>) | 2 lines And now for something completely different: Finish fixing #<I> again. ........
py
diff --git a/openquake/hazardlib/shakemap.py b/openquake/hazardlib/shakemap.py index <HASH>..<HASH> 100644 --- a/openquake/hazardlib/shakemap.py +++ b/openquake/hazardlib/shakemap.py @@ -195,11 +195,9 @@ def amplify_gmfs(imts, vs30s, gmfs): Amplify the ground shaking depending on the vs30s """ n = len(vs30s) - for i, im in enumerate(imts): - for iloc in range(n): - gmfs[i * n + iloc] = amplify_ground_shaking( - im.period, vs30s[iloc], gmfs[i * n + iloc]) - return gmfs + out = [amplify_ground_shaking(im.period, vs30s[i], gmfs[m * n + i]) + for m, im in enumerate(imts) for i in range(n)] + return numpy.array(out) def amplify_ground_shaking(T, vs30, gmvs):
Minor refactoring Former-commit-id: <I>ea3b<I>ef<I>e7ced<I>cb<I>a8d<I>cd1fc9df
py
diff --git a/sos/plugins/ipmitool.py b/sos/plugins/ipmitool.py index <HASH>..<HASH> 100644 --- a/sos/plugins/ipmitool.py +++ b/sos/plugins/ipmitool.py @@ -26,14 +26,22 @@ class IpmiTool(Plugin, RedHatPlugin, DebianPlugin): packages = ('ipmitool',) def setup(self): + result = self.get_command_output("ipmitool -I usb mc info") + have_usbintf = result['status'] + + if not have_usbintf: + cmd = "ipmitool -I usb" + else: + cmd = "ipmitool" + self.add_cmd_output([ - "ipmitool sel info", - "ipmitool sel list", - "ipmitool sensor list", - "ipmitool chassis status", - "ipmitool fru print", - "ipmitool mc info", - "ipmitool sdr info" + "%s sel info" % cmd, + "%s sel list" % cmd, + "%s sensor list" % cmd, + "%s chassis status" % cmd, + "%s fru print" % cmd, + "%s mc info" % cmd, + "%s sdr info" % cmd ]) # vim: set et ts=4 sw=4 :
[ipmitool] use usb interface if available SOSREPORT generally uses the default interface (/dev/ipmi0) while executing ipmitool command, which is quite slow as compare to usb interface. This usb interface uses the virtual device exposed from BMC like in OpenPower system AMI exposes virtual USB interface. IPMITOOL command with usb interface sends the data to the kernel drivers which inturn sends it to the BMC. This patch enables ipmitool command to use usb interface if available. With this
py
diff --git a/openid/interface.py b/openid/interface.py index <HASH>..<HASH> 100644 --- a/openid/interface.py +++ b/openid/interface.py @@ -60,7 +60,7 @@ class ValidLogin(ConsumerResponse): if ret is None: return False - return ret[1] == server_id + return ret[1] == self.identity class InvalidLogin(ConsumerResponse): """This subclass is used when the login wasn't valid."""
[project @ Bug fix in consumer library]
py
diff --git a/future/tests/test_int.py b/future/tests/test_int.py index <HASH>..<HASH> 100644 --- a/future/tests/test_int.py +++ b/future/tests/test_int.py @@ -299,6 +299,12 @@ class IntTestCases(unittest.TestCase): self.assertEqual(int(x), 100, msg=msg) self.assertEqual(int(x, 2), 4, msg=msg) + def test_newint_of_newstr(self): + a = str(u'123') + b = int(a) + self.assertEqual(b, 123) + self.assertTrue(isinstance(b, int)) + def test_string_float(self): self.assertRaises(ValueError, int, '1.2')
Add a test for newint(newstr(u'<I>')), which worked in <I> but is failing now ...
py
diff --git a/tests/test_views.py b/tests/test_views.py index <HASH>..<HASH> 100644 --- a/tests/test_views.py +++ b/tests/test_views.py @@ -67,6 +67,26 @@ class GenerateDataTests(TestCase): actual_list = self.mixin.generate_data(values_list_queryset, fields) assert list(actual_list) == list(expected_list) + def test_follows_foreign_key_with_model_queryset(self): + fields = ('title', 'author__name') + queryset = MockModel.objects.all() + expected_list = [ + (self.mock.title, self.author.name), + (self.mock2.title, self.author.name), + ] + actual_list = self.mixin.generate_data(queryset, fields) + assert list(actual_list) == list(expected_list) + + def test_follows_foreign_key_with_values_list_queryset(self): + fields = ('title', 'author__name') + values_list_queryset = MockModel.objects.all().values_list() + expected_list = [ + (self.mock.title, self.author.name), + (self.mock2.title, self.author.name), + ] + actual_list = self.mixin.generate_data(values_list_queryset, fields) + assert list(actual_list) == list(expected_list) + def test_reverse_ordering_when_fields_specified(self): fields = ('title', 'id') actual_list = self.mixin.generate_data(self.queryset, fields)
Test that we can extract data by following a ForeignKey relation.
py
diff --git a/stone/target/obj_c_types.py b/stone/target/obj_c_types.py index <HASH>..<HASH> 100644 --- a/stone/target/obj_c_types.py +++ b/stone/target/obj_c_types.py @@ -995,7 +995,6 @@ class ObjCTypesGenerator(ObjCBaseGenerator): import_classes = [ fmt_routes_class(namespace.name), fmt_route_obj_class(namespace.name), - 'DBTransportClient', 'DBStoneBase', 'DBRequestErrors', ]
[Obj-C] removed transport client import.
py
diff --git a/geomet/tests/wkt_test.py b/geomet/tests/wkt_test.py index <HASH>..<HASH> 100644 --- a/geomet/tests/wkt_test.py +++ b/geomet/tests/wkt_test.py @@ -112,10 +112,10 @@ class LineStringTestCase(unittest.TestCase): self.assertEqual(expected, wkt.dumps(ls, decimals=3)) def test_loads_linestring_2d(self): - ls = 'LINESTRING (0 1, 2 3, 4 5)' - expected = dict(type='LineString', coordinates=[[0.0, 1.0], - [2.0, 3.0], - [4.0, 5.0]]) + ls = 'LINESTRING (0 -1, -2 -3, -4 5)' + expected = dict(type='LineString', coordinates=[[0.0, -1.0], + [-2.0, -3.0], + [-4.0, 5.0]]) self.assertEqual(expected, wkt.loads(ls)) def test_loads_linestring_3d(self):
tests/wkt_test: Added negative test values to test_loads_linestring_2d.
py
diff --git a/vprof/runtime_profile.py b/vprof/runtime_profile.py index <HASH>..<HASH> 100644 --- a/vprof/runtime_profile.py +++ b/vprof/runtime_profile.py @@ -80,3 +80,9 @@ class RuntimeProfile(base_profile.BaseProfile): 'totalCalls': cprofile_stats.total_calls, 'callStats': self._transform_stats(cprofile_stats) } + + def run(self): + """Runs profile and returns collected stats.""" + runtime_stats = {} + self.collect_stats(runtime_stats) + return runtime_stats
Collect runtime stats in vprof process.
py
diff --git a/tests/test_generators.py b/tests/test_generators.py index <HASH>..<HASH> 100644 --- a/tests/test_generators.py +++ b/tests/test_generators.py @@ -318,10 +318,10 @@ class TestDoped(unittest.TestCase): bqm = dimod.generators.random.doped(0.3, 100, seed=506) total = len(bqm.quadratic) afm = sum([val == 1 for val in bqm.quadratic.values()]) - self.assertAlmostEqual(afm / total, 0.3) + self.assertAlmostEqual(afm / total, 0.3, places=2) def test_correct_ratio_fm(self): bqm = dimod.generators.random.doped(0.3, 100, seed=506, fm=False) total = len(bqm.quadratic) fm = sum([val == -1 for val in bqm.quadratic.values()]) - self.assertAlmostEqual(fm / total, 0.3) + self.assertAlmostEqual(fm / total, 0.3, places=2)
change decimal places for doping tests
py
diff --git a/azure-sdk-testutils/setup.py b/azure-sdk-testutils/setup.py index <HASH>..<HASH> 100644 --- a/azure-sdk-testutils/setup.py +++ b/azure-sdk-testutils/setup.py @@ -11,4 +11,5 @@ setup( author_email='[email protected]', url='https://github.com/Azure/azure-sdk-for-python', packages=find_packages(), + long_description="Specific tools for Azure SDK for Python testing" )
Update setup.py (#<I>)
py
diff --git a/rest_framework_json_api/utils.py b/rest_framework_json_api/utils.py index <HASH>..<HASH> 100644 --- a/rest_framework_json_api/utils.py +++ b/rest_framework_json_api/utils.py @@ -191,7 +191,10 @@ def get_related_resource_type(relation): # Django 1.7 relation_model = parent_model_relation.related.model elif hasattr(parent_model_relation, 'field'): - relation_model = parent_model_relation.field.related.model + try: + relation_model = parent_model_relation.field.remote_field.model + except AttributeError: + relation_model = parent_model_relation.field.related.model else: return get_related_resource_type(parent_model_relation) return get_resource_type_from_model(relation_model)
try to use field.remote_field instead of field.related (#<I>)
py
diff --git a/python/rosette/api.py b/python/rosette/api.py index <HASH>..<HASH> 100644 --- a/python/rosette/api.py +++ b/python/rosette/api.py @@ -111,16 +111,16 @@ def _retrying_request(op, url, data, headers): if status < 500: if not REUSE_CONNECTION: HTTP_CONNECTION.close() - return rdata, status - if rdata is not None: - try: - the_json = _my_loads(rdata) - if "message" in the_json: - message = the_json["message"] - if "code" in the_json: - code = the_json["code"] - except: - pass + return rdata, status + if rdata is not None: + try: + the_json = _my_loads(rdata) + if "message" in the_json: + message = the_json["message"] + if "code" in the_json: + code = the_json["code"] + except: + pass # If there are issues connecting to the API server, # try to regenerate the connection as long as there are # still retries left.
Updated python api again
py
diff --git a/autotweet/learning.py b/autotweet/learning.py index <HASH>..<HASH> 100644 --- a/autotweet/learning.py +++ b/autotweet/learning.py @@ -149,9 +149,12 @@ class DataCollection(object): """ if not grams: - grams = session.query(Gram).all() + grams = session.query(Gram) for gram in grams: + orig_idf = gram.idf gram.idf = self._get_idf(session, gram) + logger.debug('Recalculating {} {} -> {}'.format( + gram.gram, orig_idf, gram.idf)) def get_count(self): """Get count of :class:`Document`.
Enhance logging for recalculation
py
diff --git a/api2.py b/api2.py index <HASH>..<HASH> 100644 --- a/api2.py +++ b/api2.py @@ -23,11 +23,17 @@ class Canteen(Api2Entity): self.fromJsonDict(values) @staticmethod - def find(ids=None): + def find(ids=None, near=None): + recvCanteens = lambda **kwargs: list(map(lambda c: Canteen(values=c), + Canteen().request('canteens', params=kwargs))) if ids: ids = ','.join(map(lambda i: str(i), ids)) - cs = Canteen().request('canteens', params = { 'ids': ids }) - return list(map(lambda c: Canteen(values=c), cs)) + return recvCanteens(ids=ids) + if near is not None: + params = { 'near[lat]': near[0], 'near[lng]': near[1] } + if len(near) > 2: + params['near[dist]'] = near[2] + return recvCanteens(**params) raise NotImplemented def __str__(self):
api2: support canteens near filtering
py
diff --git a/benchexec/runexecutor.py b/benchexec/runexecutor.py index <HASH>..<HASH> 100644 --- a/benchexec/runexecutor.py +++ b/benchexec/runexecutor.py @@ -364,6 +364,7 @@ class RunExecutor(): stdin=DEVNULL, stdout=outputFile, stderr=outputFile, env=runningEnv, cwd=workingDir, + close_fds=True, preexec_fn=preSubprocess) except OSError as e:
Prevent inheriting file descriptors to the benchmarked tool. This is the default since Python <I>, but we want it always.
py
diff --git a/tools/dfu.py b/tools/dfu.py index <HASH>..<HASH> 100755 --- a/tools/dfu.py +++ b/tools/dfu.py @@ -60,6 +60,10 @@ def build(file,targets,device=DEFAULT_DEVICE): for t,target in enumerate(targets): tdata = b'' for image in target: + # pad image to 8 bytes (needed at least for L476) + pad = (8 - len(image['data']) % 8 ) % 8 + image['data'] = image['data'] + bytes(bytearray(8)[0:pad]) + # tdata += struct.pack('<2I',image['address'],len(image['data']))+image['data'] tdata = struct.pack('<6sBI255s2I',b'Target',0,1, b'ST...',len(tdata),len(target)) + tdata data += tdata
tools/dfu.py: Pad image data to 8 byte alignment to support L<I>.
py
diff --git a/sat_image/image.py b/sat_image/image.py index <HASH>..<HASH> 100644 --- a/sat_image/image.py +++ b/sat_image/image.py @@ -1,3 +1,4 @@ +# coding: utf-8 # ============================================================================================= # Copyright 2017 dgketchum #
added '# coding: utf-8' to top of file, a doc string was raising a SyntaxError; preparing to change Raster.bounds call from transform=transform to transform=affine, transform is a list, affine an affine object
py
diff --git a/proj/__init__.py b/proj/__init__.py index <HASH>..<HASH> 100755 --- a/proj/__init__.py +++ b/proj/__init__.py @@ -63,7 +63,10 @@ def archive(folder, dry_run=False): def _last_modified(folder): try: - return max(_time_modified(f) for f in _iter_files(folder)) + return max( + _time_modified(f) for f in _iter_files(folder) if not os.path.islink(f) + ) + except ValueError: bail("no files in folder: " + folder)
Avoid timestamping symlinks, which can be broken
py
diff --git a/sitetree/management/commands/sitetreeload.py b/sitetree/management/commands/sitetreeload.py index <HASH>..<HASH> 100644 --- a/sitetree/management/commands/sitetreeload.py +++ b/sitetree/management/commands/sitetreeload.py @@ -79,8 +79,14 @@ class Command(BaseCommand): tree_item_parents = defaultdict(list) tree_items_new_indexes = {} + try: + allow_migrate = router.allow_migrate + except AttributeError: + # Django < 1.7 + allow_migrate = router.allow_syncdb + for obj in objects: - if router.allow_syncdb(using, obj.object.__class__): + if allow_migrate(using, obj.object.__class__): if isinstance(obj.object, (MODEL_TREE_CLASS, MODEL_TREE_ITEM_CLASS)): if isinstance(obj.object, MODEL_TREE_CLASS): trees.append(obj.object)
fix sitetreeload management command for py<I> and django <I>
py
diff --git a/sh.py b/sh.py index <HASH>..<HASH> 100644 --- a/sh.py +++ b/sh.py @@ -987,8 +987,6 @@ class OProc(object): def setwinsize(fd): rows, cols = OProc._default_window_size TIOCSWINSZ = getattr(termios, 'TIOCSWINSZ', -2146929561) - if TIOCSWINSZ == 2148037735: # L is not required in Python >= 2.2. - TIOCSWINSZ = -2146929561 # Same bits, but with sign. s = struct.pack('HHHH', rows, cols, 0, 0) fcntl.ioctl(fd, TIOCSWINSZ, s)
Pull in pexpect issue#<I> See discussion in <URL>
py
diff --git a/web3/contract.py b/web3/contract.py index <HASH>..<HASH> 100644 --- a/web3/contract.py +++ b/web3/contract.py @@ -474,6 +474,10 @@ class Contract: encode_abi(cls.web3, constructor_abi, arguments, data=cls.bytecode) ) else: + if args is not None or kwargs is not None: + msg = "Constructor args were provided, but no constructor function was provided." + raise TypeError(msg) + deploy_data = to_hex(cls.bytecode) return deploy_data
bug: raise exception instead of silently ignoring unnecessary args
py
diff --git a/mapchete/formats/default/raster_file.py b/mapchete/formats/default/raster_file.py index <HASH>..<HASH> 100644 --- a/mapchete/formats/default/raster_file.py +++ b/mapchete/formats/default/raster_file.py @@ -114,7 +114,8 @@ class InputData(base.InputData): ) # If soucre and target CRSes differ, segmentize and reproject if inp_crs != out_crs: - segmentize = _get_segmentize_value(self.path, self.pyramid) + # estimate segmentize value (raster pixel size * tile size) + segmentize = inp.transform[0] * self.pyramid.tile_size ogr_bbox = ogr.CreateGeometryFromWkb(bbox.wkb) ogr_bbox.Segmentize(segmentize) self._bbox_cache[str(out_crs)] = reproject_geometry(
avoid opening file again with rasterio
py
diff --git a/python/setup.py b/python/setup.py index <HASH>..<HASH> 100644 --- a/python/setup.py +++ b/python/setup.py @@ -14,7 +14,7 @@ setup( author_email="[email protected]", url="http://github.com/jbenet/nanotime/tree/master/python", keywords=["nanotime", "nanosecond", "time precision", "64bit time"], - packages=["nanotime"], + modules=["nanotime"], install_requires=[], license="MIT License" )
it is now a module not a whole package
py
diff --git a/vel/launcher.py b/vel/launcher.py index <HASH>..<HASH> 100644 --- a/vel/launcher.py +++ b/vel/launcher.py @@ -16,7 +16,7 @@ def main(): parser.add_argument('varargs', nargs='*', metavar='VARARGS', help='Extra options to the command') parser.add_argument('-r', '--run_number', default=0, help="A run number") parser.add_argument('-d', '--device', default='cuda', help="A device to run the model on") - parser.add_argument('-s', '--seed', default=dtm.date.today().year, help="Random seed for the project") + parser.add_argument('-s', '--seed', type=int, default=dtm.date.today().year, help="Random seed for the project") parser.add_argument('--reset', action='store_true', default=False, help="Overwrite existing model storage") args = parser.parse_args()
Force seed to be an int.
py
diff --git a/lenses/setter.py b/lenses/setter.py index <HASH>..<HASH> 100644 --- a/lenses/setter.py +++ b/lenses/setter.py @@ -4,6 +4,12 @@ import copy @singledispatch def setitem_immutable(self, key, value): + '''Takes an object, a key, and a value and produces a new object + that is a copy of the original but with `value` as the new value of + `key`. + + setitem_immutable(obj, key, obj[key]) == obj + ''' try: self._lens_setitem except AttributeError: @@ -22,6 +28,12 @@ def _tuple_setitem_immutable(self, key, value): @singledispatch def setattr_immutable(self, name, value): + '''Takes an object, a string, and a value and produces a new object + that is a copy of the original but with the attribute called `name` + set to `value`. + + setattr_immutable(obj, 'attr', obj.attr) == obj + ''' try: self._lens_setattr except AttributeError:
added simple docstrings to setter functions
py
diff --git a/conversejs/xmpp.py b/conversejs/xmpp.py index <HASH>..<HASH> 100644 --- a/conversejs/xmpp.py +++ b/conversejs/xmpp.py @@ -212,7 +212,4 @@ def change_password(xmpp_account, new_password): logger.error('Unable to connect to XMPP server.') return False - xmpp_account.password = new_password - xmpp_account.save() - return True
Removing XMPPAccount password changing during change_password
py
diff --git a/models.py b/models.py index <HASH>..<HASH> 100644 --- a/models.py +++ b/models.py @@ -229,7 +229,7 @@ class MixtureDistribution(Mixture, GibbsSampling, Distribution): This makes a Mixture act like a Distribution for use in other compound models ''' - def resample(self,data,niter=25): + def resample(self,data,niter=25,temp=None): # doesn't keep a reference to the data like a model would assert isinstance(data,list) or isinstance(data,np.ndarray) @@ -240,11 +240,11 @@ class MixtureDistribution(Mixture, GibbsSampling, Distribution): self.add_data(data) for itr in range(niter): - self.resample_model() + self.resample_model(temp=temp) self.labels_list.pop() else: - self.resample_model() + self.resample_model(temp=temp) def max_likelihood(self,data,weights=None): if weights is not None: @@ -313,9 +313,9 @@ class FrozenMixtureDistribution(MixtureDistribution): weights=self.weights, likelihoods=self._likelihoods)) - def resample_model(self): + def resample_model(self, temp=None): for l in self.labels_list: - l.resample() + l.resample(temp=temp) self.weights.resample([l.z for l in self.labels_list]) def log_likelihood(self,x):
Temperature now propagates through resample_model calls
py
diff --git a/master/buildbot/reporters/generators/utils.py b/master/buildbot/reporters/generators/utils.py index <HASH>..<HASH> 100644 --- a/master/buildbot/reporters/generators/utils.py +++ b/master/buildbot/reporters/generators/utils.py @@ -47,7 +47,7 @@ class BuildStatusGeneratorMixin(util.ComparableMixin): def check(self): self._verify_build_generator_mode(self.mode) - if '\n' in self.subject: + if self.subject is not None and '\n' in self.subject: config.error('Newlines are not allowed in message subjects') list_or_none_params = [ @@ -165,7 +165,7 @@ class BuildStatusGeneratorMixin(util.ComparableMixin): if buildmsg['subject'] is not None: subject = buildmsg['subject'] - if subject is None: + if subject is None and self.subject is not None: subject = self.subject % {'result': statusToString(results), 'projectName': master.config.title, 'title': master.config.title,
reporters: Support None subject in BuildStatusGeneratorMixin
py
diff --git a/netpyne/network.py b/netpyne/network.py index <HASH>..<HASH> 100644 --- a/netpyne/network.py +++ b/netpyne/network.py @@ -140,8 +140,7 @@ class Network (object): if 'sec' not in connParam: connParam['sec'] = None # if section not specified, make None (will be assigned to first section in cell) if 'synMech' not in connParam: connParam['synMech'] = None # if synaptic mechanism not specified, make None (will be assigned to first synaptic mechanism in cell) if 'threshold' not in connParam: connParam['threshold'] = self.params['defaultThreshold'] # if no threshold specified, make None (will be assigned default value) - if 'seed' not in connParam: connParam['threshold'] = self.params['defaultThreshold'] # if no threshold specified, make None (will be assigned default value) - + if 'weight' not in connParam: connParam['weight'] = self.params['defaultWeight'] # if no weight, set default if 'delay' not in connParam: connParam['delay'] = self.params['defaultDelay'] # if no delay, set default if 'synsPerConn' not in connParam: connParam['synsPerConn'] = 1 # if no delay, set default
removed 'seed' param from conns in network.py (bug)
py
diff --git a/consul/base.py b/consul/base.py index <HASH>..<HASH> 100644 --- a/consul/base.py +++ b/consul/base.py @@ -233,9 +233,8 @@ class Consul(object): *token* is an optional `ACL token`_ to apply to this request. *keys* is a boolean which, if True, says to return a flat list of - keys without values or other metadata. - - *separator* is used to list only up to a given separator character. + keys without values or other metadata. *separator* can be used + with *keys* to list keys only up to a given separator character. *dc* is the optional datacenter that you wish to communicate with. If None is provided, defaults to the agent's datacenter.
update docstring to make it clear seperator is used with the keys argument
py
diff --git a/treeherder/log_parser/artifactbuildercollection.py b/treeherder/log_parser/artifactbuildercollection.py index <HASH>..<HASH> 100644 --- a/treeherder/log_parser/artifactbuildercollection.py +++ b/treeherder/log_parser/artifactbuildercollection.py @@ -91,7 +91,7 @@ BuildbotPerformanceDataArtifactBuilder # Temporary annotation of log size to help set thresholds in bug 1295997. newrelic.agent.add_custom_parameter( 'unstructured_log_size', - response.headers.get('Content-Length', 'Unknown') + int(response.headers.get('Content-Length', -1)) ) newrelic.agent.add_custom_parameter( 'unstructured_log_encoding',
Bug <I> - Send the unstructured log size to New Relic as ints New Relic Insights doesn't coerce strings to integers, so doesn't allow the graphing of custom attributes sent as strings. HTTP headers are always exposed as strings, even for fields that are expected to represent numbers, so we must explicitly cast Content-Length.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -15,6 +15,21 @@ setup(name='salt', author='Thomas S Hatch', author_email='[email protected]', url='https://github.com/thatch45/salt', + classifiers = [ + 'Programming Language :: Python', + 'Programming Language :: Cython', + 'Programming Language :: Python :: 2.6', + 'Development Status :: 4 - Beta', + 'Environment :: Console', + 'Intended Audience :: Developers', + 'Intended Audience :: Information Technology', + 'Intended Audience :: System Administrators', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: POSIX :: Linux', + 'Topic :: System :: Monitoring', + 'Topic :: System :: Clustering', + 'Topic :: System :: Distributed Computing', + ], packages=['salt', 'salt.modules', 'salt.cli',
Add classifiers to the setup.py
py
diff --git a/taskqueue/aws_queue_api.py b/taskqueue/aws_queue_api.py index <HASH>..<HASH> 100644 --- a/taskqueue/aws_queue_api.py +++ b/taskqueue/aws_queue_api.py @@ -102,7 +102,7 @@ class AWSTaskQueueAPI(object): 'All' ], VisibilityTimeout=visibility_timeout, - WaitTimeSeconds=0, + WaitTimeSeconds=20, ) if 'Messages' not in resp:
fix: use <I> sec wait time to avoid polling 0s on tasks for AWS
py
diff --git a/hupper/winapi.py b/hupper/winapi.py index <HASH>..<HASH> 100644 --- a/hupper/winapi.py +++ b/hupper/winapi.py @@ -13,8 +13,8 @@ class JobObjectInfoType(object): GroupInformation = 11 -class JOBOBJECTLIMIT(object): - JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE = 0x2000 +class JobObjectLimit(object): + KILL_ON_JOB_CLOSE = 0x2000 class IO_COUNTERS(ctypes.Structure): @@ -141,7 +141,7 @@ class ProcessGroup(object): self.h_job = CreateJobObject(None, None) info = JOBOBJECT_BASIC_LIMIT_INFORMATION() - info.LimitFlags = JOBOBJECTLIMIT.JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE + info.LimitFlags = JobObjectLimit.KILL_ON_JOB_CLOSE extended_info = JOBOBJECT_EXTENDED_LIMIT_INFORMATION() extended_info.BasicLimitInformation = info @@ -149,7 +149,7 @@ class ProcessGroup(object): SetInformationJobObject( self.h_job, JobObjectInfoType.ExtendedLimitInformation, - extended_info, + ctypes.pointer(extended_info), ctypes.sizeof(extended_info), )
cleanup names and pass a pointer explicitly
py
diff --git a/ohapi/api.py b/ohapi/api.py index <HASH>..<HASH> 100644 --- a/ohapi/api.py +++ b/ohapi/api.py @@ -326,6 +326,24 @@ def handle_error(r, expected_code): def upload_aws(target_filepath, metadata, access_token, base_url=OH_BASE_URL, remote_file_info=None, project_member_id=None, max_bytes=MAX_FILE_DEFAULT): + """ + Upload a file to AWS. To learn more about Open Humans OAuth2 projects, go + to: https://www.openhumans.org/direct-sharing/oauth2-features/. + + :param target_filepath: This field is the filepath of the file to be + uploaded + :param metadata: This field is a python dictionary with keys filename, + description and tags for single user upload and filename, + project member id, description and tags for multiple user upload. + :param access_token: This is user specific access token/master token. + :param base_url: It is this URL `https://www.openhumans.org`. + :param remote_file_info: This field is for for checking if a file with + matching name and file size already exists. Its default value is none. + :param project_member_id: This field is the list of project member id of + all members of a project. Its default value is None. + :param max_bytes: This field is the maximum file size a user can upload. + It's default value is 128m. + """ if remote_file_info: filesize = os.stat(target_filepath).st_size if process_info(remote_file_info, filesize, target_filepath) is False:
Added documentation for upload_aws (#<I>) * project_member_id optional in upload function * resolving code climate issue * project_member_id optional in upload function * fix hound errors * Update api.py * optional project_member_id * Hound fixes and other refactoring * Hound fixes * Hound fixes * Hound fixes * Hound fixes * added tests for get_page * hound fix * added doc for upload_aws * minor fixes
py
diff --git a/alerta/top.py b/alerta/top.py index <HASH>..<HASH> 100644 --- a/alerta/top.py +++ b/alerta/top.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python import sys import time @@ -389,7 +388,8 @@ class Screen(object): elif key in 'oO': self.dedup_by = "origin" elif key in 'qQ': - exit_handler() + self._reset() + sys.exit(0) # def _handle_movement_key(self, key): # # Highlight the corresponding node in the list @@ -419,7 +419,9 @@ class Screen(object): def exit_handler(signum, frame): logging.debug('Received Signal %s (%s)' % (signum, frame)) - screen._reset() + curses.echo() + curses.nocbreak() + curses.endwin() sys.exit(0) # Register exit signals
make screen sane after exiting
py
diff --git a/devassistant/command_runners.py b/devassistant/command_runners.py index <HASH>..<HASH> 100644 --- a/devassistant/command_runners.py +++ b/devassistant/command_runners.py @@ -594,7 +594,7 @@ class SCLCommandRunner(CommandRunner): @classmethod def run(cls, c): c.kwargs['__scls__'].append(c.comm_type.split()[1:]) - retval = lang.run_section(c.comm, + retval = lang.run_section(c.input_res, c.kwargs, runner=c.kwargs['__assistant__']) c.kwargs['__scls__'].pop()
Fix SCLCommandRunner with new Yaml syntax
py
diff --git a/python/orca/src/bigdl/orca/common.py b/python/orca/src/bigdl/orca/common.py index <HASH>..<HASH> 100644 --- a/python/orca/src/bigdl/orca/common.py +++ b/python/orca/src/bigdl/orca/common.py @@ -155,7 +155,7 @@ def init_orca_context(cluster_mode="local", cores=2, memory="2g", num_nodes=1, executor_memory=memory, **spark_args) elif cluster_mode == "standalone": for key in ["driver_cores", "driver_memory", "extra_executor_memory_for_ray", - "extra_python_lib", "jars", "master"]: + "extra_python_lib", "jars", "master", "enable_numa_binding"]: if key in kwargs: spark_args[key] = kwargs[key] from zoo import init_spark_standalone
Support numa binding in init_spark_standalone (#<I>) * support numa binding in init_spark_standalone * add doc and add to orca context * address comments * address comments * update scripts * hyperthreading * fix
py
diff --git a/spur/results.py b/spur/results.py index <HASH>..<HASH> 100644 --- a/spur/results.py +++ b/spur/results.py @@ -1,8 +1,9 @@ def result(return_code, output, stderr_output, allow_error=False): + result = ExecutionResult(return_code, output, stderr_output) if allow_error or return_code == 0: - return ExecutionResult(return_code, output, stderr_output) + return result else: - raise RunProcessError(return_code, output, stderr_output) + raise result.to_error() class RunProcessError(RuntimeError): @@ -20,3 +21,10 @@ class ExecutionResult(object): self.return_code = return_code self.output = output self.stderr_output = stderr_output + + def to_error(self): + return RunProcessError( + self.return_code, + self.output, + self.stderr_output + )
Move logic for creating RunProcessError to ExecutionResult.to_error
py
diff --git a/digsandpaper/search_server.py b/digsandpaper/search_server.py index <HASH>..<HASH> 100644 --- a/digsandpaper/search_server.py +++ b/digsandpaper/search_server.py @@ -111,10 +111,10 @@ def _index_fields(request): if (request.headers['Content-Type'] == 'application/x-gzip'): gz_data_as_file = StringIO.StringIO(request.data) uncompressed = gzip.GzipFile(fileobj=gz_data_as_file, mode='rb') - jls = uncompressed.read().decode('utf-8') + jls = uncompressed.read() elif (request.headers['Content-Type'] == 'application/json' or request.headers['Content-Type'] == 'application/x-jsonlines'): - jls = request.data.decode('utf-8') + jls = request.data else: return "Only supported content types are application/x-gzip, application/json and application/x-jsonlines", status.HTTP_400_BAD_REQUEST reader = codecs.getreader('utf-8')
Remove unnecessary utf-8 decode
py
diff --git a/tests/settings/__init__.py b/tests/settings/__init__.py index <HASH>..<HASH> 100644 --- a/tests/settings/__init__.py +++ b/tests/settings/__init__.py @@ -17,3 +17,5 @@ INSTALLED_APPS = [ ] TEST_RUNNER = 'tests.runners.MutantTestSuiteRunner' + +SILENCED_SYSTEM_CHECKS = ['1_7.W001']
Silenced a system check.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -29,7 +29,7 @@ def fontbakery_scripts(): setup( name="fontbakery", - version='0.3.1', + version='0.3.2', url='https://github.com/googlefonts/fontbakery/', description='Font Bakery is a set of command-line tools' ' for testing font projects',
version bump in preparation for an imminent pypi release
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ def read(fname): install_requires = [ 'Django>=1.4', - 'PyScss>=1.2.0,<=1.3.0', + 'PyScss>=1.2.0,<1.3.0', ] tests_require = [ 'Pillow',
Fix bad pinning. We don't want to install <I> Really fixes #<I>.
py
diff --git a/dedupe/core.py b/dedupe/core.py index <HASH>..<HASH> 100644 --- a/dedupe/core.py +++ b/dedupe/core.py @@ -32,13 +32,14 @@ def randomPairs(n_records, sample_size): n = int(n_records * (n_records - 1) / 2) if sample_size >= n : - random_pairs = numpy.arange(n) + random_pairs = numpy.arange(n, dtype='uint') else: - random_pairs = numpy.array(random.sample(range(n), sample_size)) + random_pairs = numpy.array(random.sample(range(n), sample_size), + dtype='uint') b = 1 - 2 * n_records - i = numpy.floor((-b - numpy.sqrt(b ** 2 - 8 * random_pairs)) / 2).astype('uint') + i = numpy.floor((-b - 2 * numpy.sqrt(2 * (n - random_pairs) + 0.25)) / 2).astype('uint') j = numpy.rint(random_pairs + i * (b + i + 2) / 2 + 1).astype('uint') return zip(i, j)
try to avoid numeric overflow in randomPairs
py
diff --git a/rstcheck/__init__.py b/rstcheck/__init__.py index <HASH>..<HASH> 100755 --- a/rstcheck/__init__.py +++ b/rstcheck/__init__.py @@ -397,6 +397,8 @@ def _get_directives_and_roles_from_sphinx() -> typing.Tuple[typing.List[str], ty sphinx_roles += list( sphinx.application.docutils.roles._roles # pylint: disable=protected-access ) + sphinx_directives.remove("code") + sphinx_directives.remove("code-block") return (sphinx_directives, sphinx_roles)
remove code and code-block from sphinx ignore list
py
diff --git a/tests/attrel/check_attrel.py b/tests/attrel/check_attrel.py index <HASH>..<HASH> 100755 --- a/tests/attrel/check_attrel.py +++ b/tests/attrel/check_attrel.py @@ -12,6 +12,7 @@ def check_attrel(attrel_cls, filename, max_discrep_percentage, max_errors=0, verbose=False): reader = csv.reader(open(filename)) attrel = attrel_cls() + context_params = set(AttRelContext.__slots__) linenum = 1 errors = 0 @@ -41,7 +42,7 @@ def check_attrel(attrel_cls, filename, max_discrep_percentage, damping = float(value) elif param == 'component_type': component_type = getattr(const.IMC, value) - elif hasattr(context, param): + elif param in context_params: # value is context object attribute if param == 'site_vs30type': value = getattr(const.VS30T, value)
tests/attrel/check_attrel: better way to check if csv column has value from calculation context
py
diff --git a/builder.py b/builder.py index <HASH>..<HASH> 100755 --- a/builder.py +++ b/builder.py @@ -1585,6 +1585,11 @@ class Sample(Pmag_object): if self.site.er_data['site_' + dtype]: value = self.site.er_data['site_' + dtype] self.er_data['sample_' + dtype] = value + for dtype in ['_lat', '_lon']: + if 'sample' + dtype in self.er_data.keys(): + if not self.er_data['sample' + dtype]: + if 'site' + dtype in self.site.er_data.keys(): + self.er_data['sample' + dtype] = self.site.er_data['site' + dtype]
automatically grab site latitudes/longitudes and apply them to samples if the samples don't have latitudes/longitudes of their own
py
diff --git a/test/testutils.py b/test/testutils.py index <HASH>..<HASH> 100644 --- a/test/testutils.py +++ b/test/testutils.py @@ -28,7 +28,7 @@ def gen_random_name(): def gen_random_version(): - return random.choice(string.digits) + '.' + random.choice(string.digits) + return ''.join(random.choice(string.digits)for _ in range(10)) + '.' + ''.join(random.choice(string.digits) for _ in range(10)) def assert_raises_valueerror(api, function, **kwargs):
ensure that the chance of versions collision is mitigated
py
diff --git a/allennlp/commands/main.py b/allennlp/commands/main.py index <HASH>..<HASH> 100644 --- a/allennlp/commands/main.py +++ b/allennlp/commands/main.py @@ -5,8 +5,11 @@ import allennlp.commands.serve as serve import allennlp.commands.predict as predict import allennlp.commands.train as train import allennlp.commands.evaluate as evaluate +from allennlp.common.checks import ensure_pythonhashseed_set def main(raw_args: Sequence[str]) -> None: + ensure_pythonhashseed_set() + parser = argparse.ArgumentParser(description="Run AllenNLP", usage='%(prog)s [command]') subparsers = parser.add_subparsers(title='Commands', metavar='')
Add check for PYTHONHASHSEED to run. (#<I>)
py
diff --git a/rest_framework_nested/__init__.py b/rest_framework_nested/__init__.py index <HASH>..<HASH> 100644 --- a/rest_framework_nested/__init__.py +++ b/rest_framework_nested/__init__.py @@ -12,3 +12,4 @@ # See the License for the specific language governing permissions and # limitations under the License. +__version__ = '0.12.0'
__init__/__version__ is the new version place
py
diff --git a/eth_utils/encoding.py b/eth_utils/encoding.py index <HASH>..<HASH> 100644 --- a/eth_utils/encoding.py +++ b/eth_utils/encoding.py @@ -1,10 +1,6 @@ -import math - - def int_to_big_endian(value): - byte_length = max(math.ceil(value.bit_length() / 8), 1) - return value.to_bytes(byte_length, byteorder='big') + return value.to_bytes((value.bit_length() + 7) // 8 or 1, 'big') def big_endian_to_int(value): - return int.from_bytes(value, byteorder='big') + return int.from_bytes(value, 'big')
Faster int_to_big_endian implementation
py
diff --git a/horizon/loaders.py b/horizon/loaders.py index <HASH>..<HASH> 100644 --- a/horizon/loaders.py +++ b/horizon/loaders.py @@ -29,7 +29,7 @@ class TemplateLoader(BaseLoader): is_usable = True def get_template_sources(self, template_name): - bits = template_name.split(os.path.sep, 2) + bits = template_name.split('/', 2) if len(bits) == 3: dash_name, panel_name, remainder = bits key = os.path.join(dash_name, panel_name)
Always split template names on forward slash Template names in Django always use forward slash, even on Windows, so to parse them properly, split on forward slash instead of os.path.sep which is '\\' on Windows. Change-Id: Ib<I>b<I>f<I>aa1e<I>fed<I>cbac5d<I>b<I>e Closes-Bug: #<I>
py
diff --git a/edc_permissions/permissions_inspector.py b/edc_permissions/permissions_inspector.py index <HASH>..<HASH> 100644 --- a/edc_permissions/permissions_inspector.py +++ b/edc_permissions/permissions_inspector.py @@ -169,6 +169,12 @@ class PermissionsInspector: return {'unexpected': [x for x in existing if x not in defaults], 'missing': [x for x in defaults if x not in existing]} + def remove_codenames(self, group_name=None, codenames=None): + group = self.group_model_cls().objects.get(name=group_name) + deleted = group.permissions.filter( + group__name=group_name, codename__in=codenames).delete() + return deleted + def validate_pii(self): """Ensure PII codenames not in any other group. """
add method to inspector to delete unused codenames
py
diff --git a/salt/modules/zypper.py b/salt/modules/zypper.py index <HASH>..<HASH> 100644 --- a/salt/modules/zypper.py +++ b/salt/modules/zypper.py @@ -195,6 +195,7 @@ class Zypper(object): # Zypper call will stuck here waiting, if another zypper hangs until forever. # However, Zypper lock needs to be always respected. + was_blocked = False while True: log.debug("Calling Zypper: " + ' '.join(self.__cmd)) self.__call_result = __salt__['cmd.run_all'](self.__cmd, **kwargs) @@ -224,7 +225,13 @@ class Zypper(object): __salt__['event.fire_master'](data, self.TAG_BLOCKED) log.debug("Fired a Zypper blocked event to the master with the data: {0}".format(str(data))) time.sleep(5) + if not was_blocked: + was_blocked = True + if was_blocked: + __salt__['event.fire_master']({'success': not len(self.error_msg), + 'info': self.error_msg or 'Zypper has been released'}, + self.TAG_RELEASED) if self.error_msg: raise CommandExecutionError('Zypper command failure: {0}'.format(self.error_msg))
Fire an event about released Zypper with its result
py
diff --git a/userena/urls.py b/userena/urls.py index <HASH>..<HASH> 100644 --- a/userena/urls.py +++ b/userena/urls.py @@ -102,7 +102,7 @@ urlpatterns = patterns('', name='userena_profile_edit'), # View profiles - url(r'^(?P<username>(?!signout|signup|signin)[\@\.\w-]+)/$', + url(r'^(?P<username>(?!(signout|signup|signin)/)[\@\.\w-]+)/$', userena_views.profile_detail, name='userena_profile_detail'), url(r'^page/(?P<page>[0-9]+)/$',
urls: update negative lookahead assertion in userena_profile_detail pattern Fixes #<I> by allowing user profile detail urls to contain username **starting** with 'signup', 'signout', or 'signin' strings
py
diff --git a/napalm/junos/junos.py b/napalm/junos/junos.py index <HASH>..<HASH> 100644 --- a/napalm/junos/junos.py +++ b/napalm/junos/junos.py @@ -948,7 +948,6 @@ class JunOSDriver(NetworkDriver): 'type': py23_compat.text_type, 'apply_groups': list, 'remove_private_as': bool, - 'cluster': py23_compat.text_type, 'multipath': bool, 'multihop_ttl': int }
remove cluster from the output Conflicts: napalm/junos/junos.py
py
diff --git a/matplotlib2tikz.py b/matplotlib2tikz.py index <HASH>..<HASH> 100644 --- a/matplotlib2tikz.py +++ b/matplotlib2tikz.py @@ -1198,6 +1198,12 @@ def _draw_path(obj, data, path, ): '''Adds code for drawing an ordinary path in PGFPlots (TikZ). ''' + if 'draw=white' in draw_options and 'fill opacity=0' in draw_options: + # For some reasons, matplotlib sometimes adds void paths with only + # consist of one point, are white, and have no opacity. To not let + # those clutter the output TeX file, bail out here. + return data, '' + nodes = [] prev = None for vert, code in path.iter_segments():
don't convert void paths introduced by matplotlib
py
diff --git a/sandstone/scripts/run_client_tests.py b/sandstone/scripts/run_client_tests.py index <HASH>..<HASH> 100644 --- a/sandstone/scripts/run_client_tests.py +++ b/sandstone/scripts/run_client_tests.py @@ -36,6 +36,7 @@ dep_list = [ 'ui.router', 'sandstone.acemodes', 'ui.bootstrap', + 'sandstone.broadcastservice' ] for spec in settings.APP_SPECIFICATIONS:
Added broadcastservice to client test dep list.
py
diff --git a/scripts/experiments/run_ace2.py b/scripts/experiments/run_ace2.py index <HASH>..<HASH> 100755 --- a/scripts/experiments/run_ace2.py +++ b/scripts/experiments/run_ace2.py @@ -622,13 +622,20 @@ class SrlExpParamsRunner(ExpParamsRunner): inference="BP", cacheType="NONE", useRelationsForNePairs=False) + # Use best hyperparameters from ace-pm13_014. + defaults.update(adaGradInitialSumSquares=1, + embScalar=1, + adaGradEta=0.1, + l2variance=400000, + sgdAutoSelectLr=False, + sgdNumPasses=20, + ) defaults.set_incl_name("testPredOut", False) - # Train on all domains. - train = get_annotation_as_train(ace05_all_nobctest) # TODO: This should be all domains - dev = ReExpParams(propTrainAsDev=0.05) - test = get_annotation_as_test(ace05_bc_test) - exp_train = defaults + train + dev + test + # Train on all domains except bc_test. Use bc_test as dev. + train = get_annotation_as_train(ace05_all_nobctest) + dev = get_annotation_as_dev(ace05_bc_test) + exp_train = defaults + train + dev exp_train.update(pipeOut="pipe.binary.gz") root.add_dependent(exp_train)
Updating hyperparams for ace-agiga2
py
diff --git a/lib/autokey/scripting/engine.py b/lib/autokey/scripting/engine.py index <HASH>..<HASH> 100644 --- a/lib/autokey/scripting/engine.py +++ b/lib/autokey/scripting/engine.py @@ -19,7 +19,7 @@ from collections.abc import Iterable from typing import Tuple, Optional, List, Union -from autokey import model, iomediator +from autokey import model, iomediator, configmanager class Engine: @@ -217,6 +217,12 @@ Folders created within temporary folders must themselves be set temporary") self.check_abbreviation_unique(abbreviations) if not replaceExistingHotkey: self.check_hotkey_unique(hotkey) + else: + existing_item = self.get_item_with_hotkey(hotkey) + if not isinstance(existing_item, configmanager.configmanager.GlobalHotkey): + existing_item.unset_hotkey() + + self.monitor.suspend() try: @@ -459,6 +465,13 @@ Folders created within temporary folders must themselves be set temporary") if not self.configManager.check_hotkey_unique(modifiers, hotkey[1], None, None)[0]: raise ValueError("The specified hotkey and modifier combination is already in use: {}".format(hotkey)) + def get_item_with_hotkey(self, hotkey): + if not hotkey: + return + modifiers = sorted(hotkey[0]) + return self.configManager.get_item_with_hotkey(modifiers, hotkey[1]) + + def validateAbbreviations(abbreviations): if abbreviations is None:
Allow new phrases to override existing hotkey
py
diff --git a/fandjango/middleware.py b/fandjango/middleware.py index <HASH>..<HASH> 100644 --- a/fandjango/middleware.py +++ b/fandjango/middleware.py @@ -97,6 +97,8 @@ class FacebookMiddleware(): user.oauth_token.save() user.save() + finally: + user.oauth_token.extend() request.facebook.user = user
Extend the OAuth token for new and existing users alike
py
diff --git a/turbolift/utils.py b/turbolift/utils.py index <HASH>..<HASH> 100644 --- a/turbolift/utils.py +++ b/turbolift/utils.py @@ -580,7 +580,7 @@ def restor_perms(local_file, headers): os.chmod( local_file, - int(headers['x-object-meta-perms']) + int(headers['x-object-meta-perms'], 8) ) # Lookup user and group name and restore them.
change int to oct for perms restor
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -35,7 +35,6 @@ setup( url='https://github.com/DMSC-Instrument-Data/lewis', author='Michael Hart, Michael Wedel, Owen Arnold', author_email='Michael Hart <[email protected]>, ' - 'Michael Wedel <[email protected]>, ' 'Owen Arnold <[email protected]>', license='GPL v3', classifiers=[
Removed email as it is no longer valid
py
diff --git a/panels/_version.py b/panels/_version.py index <HASH>..<HASH> 100644 --- a/panels/_version.py +++ b/panels/_version.py @@ -1,2 +1,2 @@ # Versions compliant with PEP 440 https://www.python.org/dev/peps/pep-0440 -__version__ = "0.0.18" +__version__ = "0.0.19"
Update version number to <I>
py
diff --git a/Lib/glyphsLib/classes.py b/Lib/glyphsLib/classes.py index <HASH>..<HASH> 100755 --- a/Lib/glyphsLib/classes.py +++ b/Lib/glyphsLib/classes.py @@ -1734,6 +1734,17 @@ class GSComponent(GSBase): self.transform = [affine[0], affine[1], affine[3], affine[4], affine[2], affine[5]] @property + def componentName(self): + return self.name + @componentName.setter + def componentName(self, value): + self.name = value + + @property + def component(self): + return self.parent.parent.parent.glyphs[self.name] + + @property def layer(self): return self.parent.parent.parent.glyphs[self.name].layers[self.parent.layerId]
Implemented GSComponent.component and .componentName
py
diff --git a/dividebatur/senatecount.py b/dividebatur/senatecount.py index <HASH>..<HASH> 100755 --- a/dividebatur/senatecount.py +++ b/dividebatur/senatecount.py @@ -48,6 +48,11 @@ class SenateCountPost2015: return Ticket((PreferenceFlow(tuple(prefs)), )) def btl_flow(form): + if self.s282_candidates: + # s282: only 273(7) to (30) apply, so don't exclude informal BTL votes + min_prefs = 1 + else: + min_prefs = 6 by_pref = {} for pref, candidate_id in zip(form, self.flows.btl): if pref is None: @@ -66,7 +71,7 @@ class SenateCountPost2015: continue prefs.append((len(prefs) + 1, candidate_id)) # must have unique prefs for 1..6, or informal - if len(prefs) < 6: + if len(prefs) < min_prefs: return None return Ticket((PreferenceFlow(tuple(prefs)), ))
fix s<I> implementation talking with deanashley<I> on twitter, it seems BTL ballots shouldn't be excluded if informal (no clear 1-6) after the renumbering is applied. s<I> actually skips the exclusion of informal ballots step. (thanks Dean!)
py
diff --git a/marathon/models/app.py b/marathon/models/app.py index <HASH>..<HASH> 100644 --- a/marathon/models/app.py +++ b/marathon/models/app.py @@ -94,7 +94,7 @@ class MarathonApp(MarathonResource): tasks_healthy=None, task_kill_grace_period_seconds=None, tasks_unhealthy=None, upgrade_strategy=None, unreachable_strategy=None, uris=None, user=None, version=None, version_info=None, ip_address=None, fetch=None, task_stats=None, readiness_checks=None, - readiness_check_results=None, secrets=None, port_definitions=None, residency=None, gpus=None): + readiness_check_results=None, secrets=None, port_definitions=None, residency=None, gpus=None, networks=None): # self.args = args or [] self.accepted_resource_roles = accepted_resource_roles @@ -184,6 +184,8 @@ class MarathonApp(MarathonResource): else MarathonAppVersionInfo.from_json(version_info) self.task_stats = task_stats if (isinstance(task_stats, MarathonTaskStats) or task_stats is None) \ else MarathonTaskStats.from_json(task_stats) + self.networks = networks + def add_env(self, key, value): self.env[key] = value
Adding the key "networks" in the JSON received of marathon
py
diff --git a/artist/multi_plot.py b/artist/multi_plot.py index <HASH>..<HASH> 100644 --- a/artist/multi_plot.py +++ b/artist/multi_plot.py @@ -31,7 +31,7 @@ class MultiPlot: self.subplots = [] for i in range(rows): for j in range(columns): - self.subplots.append(SubPlot(i, j)) + self.subplots.append(SubPlotContainer(i, j)) def set_empty(self, row, column): subplot = self.get_subplot_at(row, column) @@ -253,7 +253,7 @@ class MultiPlot: return 'normal', 'normal' -class SubPlot: +class SubPlotContainer: def __init__(self, row, column): self.row = row self.column = column
Renamed SubPlot -> SubPlotContainer
py
diff --git a/contentfiles/storage.py b/contentfiles/storage.py index <HASH>..<HASH> 100644 --- a/contentfiles/storage.py +++ b/contentfiles/storage.py @@ -44,4 +44,5 @@ class PrivateStorage(ContentFilesMixin, LibCloudPrivateStorage): def url(self, name): protocol = 'https' if CONTENTFILES_SSL else 'http' return '%s://%s/%s/%s/%s' % ( - protocol, self.driver.connection.host, self.bucket, self.path_name, name) + protocol, self.driver.connection.host, self.bucket, self.path_name, + urllib.parse.quote(name))
Didn't quote the private storage file name
py
diff --git a/cassandra/cluster.py b/cassandra/cluster.py index <HASH>..<HASH> 100644 --- a/cassandra/cluster.py +++ b/cassandra/cluster.py @@ -317,7 +317,7 @@ class Cluster(object): if not self._is_setup: self.load_balancing_policy.populate( - weakref.proxy(self), self.metadata.getAllHosts()) + weakref.proxy(self), self.metadata.all_hosts()) self._is_setup = True if self.control_connection:
Fix bad load balancing policy population
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +# -*- coding: utf-8 -*- ''' Execution:
should i add unicode support to setup.py? sure why not? what could possibly go wrong?
py