{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n \n \"\"\""},"message":{"kind":"string","value":"Fix indentation in Flask example."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":278094,"cells":{"diff":{"kind":"string","value":"diff --git a/setup.py b/setup.py\nindex .. 100644\n--- a/setup.py\n+++ b/setup.py\n@@ -1,6 +1,6 @@\n from distutils.core import setup\n \n-setup(name='pytds',\n+setup(name='python-tds',\n version='0.1',\n description='Python DBAPI driver for MSSQL using pure Python TDS (Tabular Data Stream) protocol implementation',\n author='Mikhail Denisenko',"},"message":{"kind":"string","value":"changed name of package to be able to upload to pypi"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":278095,"cells":{"diff":{"kind":"string","value":"diff --git a/setup.py b/setup.py\nindex .. 100644\n--- a/setup.py\n+++ b/setup.py\n@@ -51,7 +51,7 @@ data_files=[\n ('cherrypy/scaffold', ['cherrypy/scaffold/example.conf',\n 'cherrypy/scaffold/site.conf',\n ]),\n- ('cherrypy/scaffold/static', ['made_with_cherrypy_small.png',\n+ ('cherrypy/scaffold/static', ['cherrypy/scaffold/static/made_with_cherrypy_small.png',\n ]),\n ('cherrypy/test', ['cherrypy/test/style.css',\n 'cherrypy/test/test.pem',"},"message":{"kind":"string","value":"Oops. Buglet in setup.py."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":278096,"cells":{"diff":{"kind":"string","value":"diff --git a/c7n/policy.py b/c7n/policy.py\nindex .. 100644\n--- a/c7n/policy.py\n+++ b/c7n/policy.py\n@@ -425,6 +425,13 @@ class LambdaMode(PolicyExecutionMode):\n TODO: better customization around execution context outputs\n TODO: support centralized lambda exec across accounts.\n \"\"\"\n+\n+ mode = self.policy.data.get('mode', {})\n+ if not bool(mode.get(\"log\", True)):\n+ root = logging.getLogger()\n+ map(root.removeHandler, root.handlers[:])\n+ root.handlers = [logging.NullHandler()]\n+\n resources = self.resolve_resources(event)\n if not resources:\n return resources"},"message":{"kind":"string","value":"policy lambda - allow removing lambda logging handler (#)"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":278097,"cells":{"diff":{"kind":"string","value":"diff --git a/nailgun/entities.py b/nailgun/entities.py\nindex .. 100644\n--- a/nailgun/entities.py\n+++ b/nailgun/entities.py\n@@ -4982,7 +4982,7 @@ class CompliancePolicies(Entity, EntityReadMixin):\n unique=True\n ),\n 'organization': entity_fields.OneToManyField(Organization),\n- 'hosts': entity_fields.ListField()\n+ 'hosts': entity_fields.OneToManyField(Host)\n }\n self._meta = {\n 'api_path': 'api/v2/compliance/policies',"},"message":{"kind":"string","value":"Changed ListField() to OneToManyField(Host)"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":278098,"cells":{"diff":{"kind":"string","value":"diff --git a/src/python/grpcio/commands.py b/src/python/grpcio/commands.py\nindex .. 100644\n--- a/src/python/grpcio/commands.py\n+++ b/src/python/grpcio/commands.py\n@@ -286,10 +286,11 @@ class TestLite(setuptools.Command):\n runner = tests.Runner()\n result = runner.run(loader.suite)\n if not result.wasSuccessful():\n- sys.exit(1)\n+ sys.exit('Test failure')\n \n def _add_eggs_to_path(self):\n \"\"\"Adds all egg files under .eggs to sys.path\"\"\"\n+ # TODO(jtattemusch): there has to be a cleaner way to do this\n import pkg_resources\n eggs_dir = os.path.join(PYTHON_STEM, '../../../.eggs')\n eggs = [os.path.join(eggs_dir, filename)"},"message":{"kind":"string","value":"Address comments and add a TODO."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":278099,"cells":{"diff":{"kind":"string","value":"diff --git a/testproject/testapp/tests/common.py b/testproject/testapp/tests/common.py\nindex .. 100644\n--- a/testproject/testapp/tests/common.py\n+++ b/testproject/testapp/tests/common.py\n@@ -6,6 +6,8 @@ try:\n except ImportError:\n import mock\n \n+__all__ = ['get_user_model', 'IntegrityError', 'mock']\n+\n \n def create_user(**kwargs):\n data = {"},"message":{"kind":"string","value":"Fix tests common F with __all__"},"diff_languages":{"kind":"string","value":"py"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":2780,"numItemsPerPage":100,"numTotalItems":278877,"offset":278000,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1NjU3MTQ1OSwic3ViIjoiL2RhdGFzZXRzL2hrczM1MGQvZ2l0LWRpZmYtdG8tY29tbWl0LWdlbW1hLTMtMjcwbSIsImV4cCI6MTc1NjU3NTA1OSwiaXNzIjoiaHR0cHM6Ly9odWdnaW5nZmFjZS5jbyJ9.jafy8EUMmyqfat-zgDQ_cqZAOQbR4A1OZcBvUbg_7qjYUiRVAtUmnWByTEb9MDAVuYG4lN37nr1J5LoqCzxSAQ","displayUrls":true},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/shipane_sdk/base_manager.py b/shipane_sdk/base_manager.py index <HASH>..<HASH> 100644 --- a/shipane_sdk/base_manager.py +++ b/shipane_sdk/base_manager.py @@ -144,7 +144,7 @@ class StrategyTrader(object): self._shipane_client = Client(self._logger, **config['client']) self._order_id_map = {} self._expire_before = datetime.datetime.combine(datetime.date.today(), datetime.time.min) - self._last_sync_portfolio_fingerprint = {} + self._last_sync_portfolio_fingerprint = None @property def id(self):
Fix sync not performed issue when portfolio becomes empty
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ install_requires = [ setup( name='arcgis-rest-query', - version='0.0.8', + version='0.0.9', description='A tool to download a layer from an ArcGIS web service as GeoJSON', author='Ken Schwencke', author_email='[email protected]',
Bumped up version so we can stop installing wsgiref
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -28,7 +28,7 @@ setup( "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Software Development :: Libraries" ], - install_requires=["click", "jinja2", "six"], + install_requires=["click", "jinja2", "backports.csv"], entry_points={ "console_scripts": [ "csvtotable = csvtotable.cli:cli",
Add missing dependancy to setup.py
py
diff --git a/peri/opt/addsubtract.py b/peri/opt/addsubtract.py index <HASH>..<HASH> 100644 --- a/peri/opt/addsubtract.py +++ b/peri/opt/addsubtract.py @@ -663,7 +663,7 @@ def add_subtract_misfeatured_tile(st, tile, rad='calc', max_iter=3, if len(ainds) > 0: _ = st.obj_remove_particle(ainds) if rinds.size > 0: - for p, r in zip(rpos, rrad): + for p, r in zip(rpos.reshape(-1,3), rrad.reshape(-1)): _ = st.obj_add_particle(p, r) n_added = 0; ainds = [] return n_added, ainds
add_subtract_misfeatured_tile: reshaping rpos, rrad for zipping.
py
diff --git a/visidata/cmdlog.py b/visidata/cmdlog.py index <HASH>..<HASH> 100644 --- a/visidata/cmdlog.py +++ b/visidata/cmdlog.py @@ -302,7 +302,7 @@ def replayOne(vd, r): if vs: vd.push(vs) else: - vs = vd.activeSheet + vs = vd.activeSheet or vd.cmdlog vd.moveToReplayContext(r, vs) @@ -345,7 +345,8 @@ def replay_sync(vd, cmdlog, live=False): cmdlog.cursorRowIndex += 1 prog.addProgress(1) - vd.activeSheet.ensureLoaded() + if vd.activeSheet: + vd.activeSheet.ensureLoaded() vd.sync() while not vd.delay(): pass
[replay] init vd.cmdlog as starter sheet; only load activeSheet, if exists
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -16,4 +16,17 @@ setup(name='django-mailviews', test_suite='mailviews.tests.__main__.__main__', zip_safe=False, license='Apache License 2.0', + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'Framework :: Django', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache License 2.0', + 'Operating System :: OS Independent', + 'Topic :: Software Development', + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.4', + ] )
Add classifiers This makes it clear what Python versions are supported.
py
diff --git a/PyCIM/Test/CIM15Test.py b/PyCIM/Test/CIM15Test.py index <HASH>..<HASH> 100644 --- a/PyCIM/Test/CIM15Test.py +++ b/PyCIM/Test/CIM15Test.py @@ -20,6 +20,8 @@ import unittest +import pytest + from CIM15.IEC61970.Core import \ ConnectivityNode, Terminal @@ -228,7 +230,7 @@ class ACLineSegmentTests(unittest.TestCase): def test_more_than_one_impedance_returns_error(self): per_length_sequence_impedance = PerLengthSequenceImpedance() per_length_phase_impedance = PerLengthPhaseImpedance() - with self.assertRaises(ValueError): + with pytest.raises(ValueError): ac_line_segment = ACLineSegment( PhaseImpedance=per_length_phase_impedance, SequenceImpedance=per_length_sequence_impedance)
Use pytest.raises instead of self.assertRaises. The hope is that pytest's "raises" context manager will work in python <I>, which doesn't ship with unittest's assertRaises as a context manager.
py
diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py index <HASH>..<HASH> 100644 --- a/setuptools/tests/test_easy_install.py +++ b/setuptools/tests/test_easy_install.py @@ -10,6 +10,7 @@ import contextlib import textwrap import tarfile import logging +import itertools import pytest @@ -82,7 +83,8 @@ class TestEasyInstallTest(unittest.TestCase): old_platform = sys.platform try: - name, script = [i for i in next(get_script_args(dist))][0:2] + args = next(get_script_args(dist)) + name, script = itertools.islice(args, 2) finally: sys.platform = old_platform
Slice the iterable rather than converting to a list and slicing that.
py
diff --git a/test/__init__.py b/test/__init__.py index <HASH>..<HASH> 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -153,7 +153,7 @@ class ClientContext(object): # May not have this if OperationFailure was raised earlier. self.cmd_line = self.client.admin.command('getCmdLineOpts') - self.test_commands_enabled = ('testCommandsEnabled=1' + self.test_commands_enabled = ('enableTestCommands=1' in self.cmd_line['argv']) self.is_mongos = (self.ismaster.get('msg') == 'isdbgrid') self.has_ipv6 = self._server_started_with_ipv6()
Fix require_test_commands decorator for unittests.
py
diff --git a/astromodels/functions/function.py b/astromodels/functions/function.py index <HASH>..<HASH> 100644 --- a/astromodels/functions/function.py +++ b/astromodels/functions/function.py @@ -1350,6 +1350,7 @@ class Function3D(Function): return np.squeeze(result).to(self.w_unit) + @memoize def _call_with_units(self, x, y, z, *args, **kwargs): # Gather the current parameters' values with units @@ -1370,7 +1371,7 @@ class Function3D(Function): else: return results - + @memoize def _call_without_units(self, x, y, z, *args, **kwargs): # Gather the current parameters' values without units, which means that the whole computation
Added memoization to 3d functions
py
diff --git a/nanoget/nanoget.py b/nanoget/nanoget.py index <HASH>..<HASH> 100644 --- a/nanoget/nanoget.py +++ b/nanoget/nanoget.py @@ -279,8 +279,11 @@ def get_pID(read): try: return 100 * (1 - read.get_tag("NM") / read.query_alignment_length) except KeyError: - return 100 * (1 - (parse_MD(read.get_tag("MD")) + parse_CIGAR(read.cigartuples)) / - read.query_alignment_length) + try: + return 100 * (1 - (parse_MD(read.get_tag("MD")) + parse_CIGAR(read.cigartuples)) / + read.query_alignment_length) + except KeyError: + return None except ZeroDivisionError: return None
return None if no MD tag is found
py
diff --git a/shinken/satellitelink.py b/shinken/satellitelink.py index <HASH>..<HASH> 100644 --- a/shinken/satellitelink.py +++ b/shinken/satellitelink.py @@ -45,7 +45,7 @@ class SatelliteLink(Item): properties = Item.properties.copy() properties.update({ - 'address': StringProp(fill_brok=['full_status']), + 'address': StringProp(default='localhost', fill_brok=['full_status']), 'timeout': IntegerProp(default=3, fill_brok=['full_status']), 'data_timeout': IntegerProp(default=120, fill_brok=['full_status']), 'check_interval': IntegerProp(default=60, fill_brok=['full_status']),
Simplification: we can use localhost as the default address.
py
diff --git a/splinter/driver/__init__.py b/splinter/driver/__init__.py index <HASH>..<HASH> 100644 --- a/splinter/driver/__init__.py +++ b/splinter/driver/__init__.py @@ -58,6 +58,9 @@ class DriverAPI(object): def find_option_by_value(self, value): raise NotImplementedError + def find_option_by_text(self, text): + raise NotImplementedError + def wait_for_element(self, selector, timeout, interval): raise NotImplementedError
added method find by option text in driver interface class
py
diff --git a/doc/sphinxext/gen_rst.py b/doc/sphinxext/gen_rst.py index <HASH>..<HASH> 100644 --- a/doc/sphinxext/gen_rst.py +++ b/doc/sphinxext/gen_rst.py @@ -19,6 +19,7 @@ import cPickle import urllib2 import gzip import posixpath +import subprocess try: from PIL import Image @@ -716,7 +717,7 @@ def make_thumbnail(in_fname, out_fname, width, height): # software is installed if os.environ.get('SKLEARN_DOC_OPTIPNG', False): try: - os.system("optipng -quiet -o 9 '{0}'".format(out_fname)) + subprocess.call(["optipng", "-quiet", "-o", "9", out_fname]) except Exception: warnings.warn('Install optipng to reduce the size of the generated images')
MAINT use subprocess.call, not os.system The former is safer and faster.
py
diff --git a/bfg9000/builtins/packages.py b/bfg9000/builtins/packages.py index <HASH>..<HASH> 100644 --- a/bfg9000/builtins/packages.py +++ b/bfg9000/builtins/packages.py @@ -25,7 +25,7 @@ def _find_library(env, name, search_dirs): for d in search_dirs: for i in linkers: candidate = i.output_file(os.path.join(d, name)) - if os.path.exists(candidate.path.realize(None)): + if os.path.exists(candidate.link.path.realize(None)): return candidate raise ValueError("unable to find package '{}'".format(name))
Use the import lib when looking up libraries on Windows
py
diff --git a/python/ray/ray_logging.py b/python/ray/ray_logging.py index <HASH>..<HASH> 100644 --- a/python/ray/ray_logging.py +++ b/python/ray/ray_logging.py @@ -165,15 +165,17 @@ def get_worker_log_file_name(worker_type): "please report it to Ray's Github issue.") worker_name = "worker" else: - job_id = ray.JobID.nil() + job_id = "" worker_name = "io_worker" # Make sure these values are set already. assert ray.worker._global_node is not None assert ray.worker.global_worker is not None filename = (f"{worker_name}-" - f"{binary_to_hex(ray.worker.global_worker.worker_id)}-" - f"{job_id}-{os.getpid()}") + f"{binary_to_hex(ray.worker.global_worker.worker_id)}-") + if job_id: + filename += f"{job_id}-" + filename += f"{os.getpid()}" return filename
[Object Spilling] Remove job id from the io worker log name. (#<I>)
py
diff --git a/openquake/hazardlib/tests/geo/mesh_test.py b/openquake/hazardlib/tests/geo/mesh_test.py index <HASH>..<HASH> 100644 --- a/openquake/hazardlib/tests/geo/mesh_test.py +++ b/openquake/hazardlib/tests/geo/mesh_test.py @@ -427,10 +427,10 @@ class MeshJoynerBooreDistanceTestCase(unittest.TestCase): mesh = RectangularMesh(lons, lats) dist = mesh.get_joyner_boore_distance( Mesh.from_points_list([Point(-121.76, 37.23)])) - # this is the distance with Ubuntu 12.04 - numpy.testing.assert_almost_equal(dist, [36.61260128]) - # this is the distance with Ubuntu 14.04 - # numpy.testing.assert_almost_equal(dist, [36.61389245]) + dist_ubuntu_12_04 = 36.61260128 + dist_ubuntu_14_04 = 36.61389245 + self.assertTrue(numpy.allclose(dist, dist_ubuntu_12_04) or + numpy.allclose(dist, dist_ubuntu_14_04)) class RectangularMeshGetMiddlePointTestCase(unittest.TestCase):
Fixed the version test on the JoynerBoore distance
py
diff --git a/bot/action/core/command/throttler/shortlyrepeatedcommand.py b/bot/action/core/command/throttler/shortlyrepeatedcommand.py index <HASH>..<HASH> 100644 --- a/bot/action/core/command/throttler/shortlyrepeatedcommand.py +++ b/bot/action/core/command/throttler/shortlyrepeatedcommand.py @@ -12,12 +12,16 @@ LOG_TAG = FormattedText().bold("THROTTLER") class ShortlyRepeatedCommandThrottler(Throttler): def __init__(self, api: Api): self.api = api + self.command_key_factory = CommandKeyFactory() self.recent_commands = {} + def add_personal_command(self, command: str): + self.command_key_factory.add_personal_command(command) + def should_execute(self, event): current_date = event.message.date self.__cleanup_recent_commands(current_date) - command_key = NonPersonalCommandKey(event) + command_key = self.command_key_factory.get_command_key(event) if command_key not in self.recent_commands: throttling_state = CommandThrottlingState(event) if not throttling_state.has_expired(current_date):
Use CommandKeyFactory in ShortlyRepeatedCommandThrottler
py
diff --git a/topydo/ui/Main.py b/topydo/ui/Main.py index <HASH>..<HASH> 100644 --- a/topydo/ui/Main.py +++ b/topydo/ui/Main.py @@ -14,6 +14,7 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. +import shlex import urwid from six import u @@ -122,7 +123,8 @@ class UIApplication(CLIApplicationBase): Executes a command, given as a string. """ p_output = p_output or self._output - (subcommand, args) = get_subcommand(p_command.split()) + p_command = shlex.split(p_command) + (subcommand, args) = get_subcommand(p_command) try: command = subcommand(
Split a command's arguments with shlex. This fixes the issue that `ls -F "%p %s"` doesn't work, because the double quotes weren't interpreted properly. Reported by @mruwek.
py
diff --git a/thinc/loss.py b/thinc/loss.py index <HASH>..<HASH> 100644 --- a/thinc/loss.py +++ b/thinc/loss.py @@ -1,8 +1,15 @@ import numpy +try: + from cupy import get_array_module +except ImportError: + def get_array_module(*a, **k): + return numpy + def categorical_crossentropy(scores, labels): - target = numpy.zeros(scores.shape, dtype='float32') + xp = get_array_module(scores) + target = xp.zeros(scores.shape, dtype='float32') loss = 0. for i in range(len(labels)): target[i, int(labels[i])] = 1.
Use one-hot representation in categorical cross-entropy
py
diff --git a/pybotvac/account.py b/pybotvac/account.py index <HASH>..<HASH> 100644 --- a/pybotvac/account.py +++ b/pybotvac/account.py @@ -127,11 +127,9 @@ class Account: secret=robot['secret_key'], traits=robot['traits'], endpoint=robot['nucleo_url'])) - except requests.exceptions.HTTPError: + except NeatoRobotException: print ("Your '{}' robot is offline.".format(robot['name'])) continue - except requests.exceptions.ConnectionError: - raise NeatoRobotException("Unable to add robot") self.refresh_persistent_maps() for robot in self._robots:
Fix robot offline (#<I>)
py
diff --git a/src/gnupg.py b/src/gnupg.py index <HASH>..<HASH> 100644 --- a/src/gnupg.py +++ b/src/gnupg.py @@ -90,11 +90,9 @@ import codecs ## See https://code.patternsinthevoid.net/?p=android-locale-hack.git import encodings import locale -import logging import os import re import sys -import tempfile import threading from _parsers import _fix_unsafe, _sanitise, _is_allowed, _sanitise_list
Remove unused logging and tempfile imports from gnupg.py.
py
diff --git a/tests/tests.py b/tests/tests.py index <HASH>..<HASH> 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -31,6 +31,11 @@ def test(func): def output_of_run_is_stored(shell): result = shell.run(["echo", "hello"]) assert_equal("hello\n", result.output) + +@test +def output_is_not_truncated_when_not_ending_in_a_newline(shell): + result = shell.run(["echo", "-n", "hello"]) + assert_equal("hello", result.output) @test def cwd_of_run_can_be_set(shell):
Add test for output that doesn't end in a newline
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -73,7 +73,7 @@ pytest_runner = ['pytest-runner'] if 'ptr' in sys.argv else [] setup_params = dict( name = 'keyring', - version = "2.1", + version = "2.2", description = "Store and access your passwords safely.", url = "http://bitbucket.org/kang/python-keyring-lib", keywords = "keyring Keychain GnomeKeyring Kwallet password storage",
Bumped to <I> in preparation for next release.
py
diff --git a/thumbor/__init__.py b/thumbor/__init__.py index <HASH>..<HASH> 100644 --- a/thumbor/__init__.py +++ b/thumbor/__init__.py @@ -10,4 +10,4 @@ '''This is the main module in thumbor''' -__version__ = "5.0.0-rc1" +__version__ = "5.0.0rc1"
renaming to <I>rc1
py
diff --git a/elastic/parcalc.py b/elastic/parcalc.py index <HASH>..<HASH> 100644 --- a/elastic/parcalc.py +++ b/elastic/parcalc.py @@ -177,12 +177,12 @@ def ParCalculate(systems,calc,cleanup=True,prefix="Calc_"): print len(sys), "Workers started" # Collect the results - res={} + res=[] while len(res)<len(sys) : s=oq.get() - res[s]=s + res.append(s) #print "Got from oq:", s.get_volume(), s.get_isotropic_pressure(s.get_stress()) - return [res[s] for s in sys] + return [r for s in sys for r in res if r==s] # Testing routines using VASP as a calculator in the cluster environment. # TODO: Make it calculator/environment agnostic
Atoms objects are not hashable. Re-implement ordering results using ==.
py
diff --git a/bin/import_swb.py b/bin/import_swb.py index <HASH>..<HASH> 100755 --- a/bin/import_swb.py +++ b/bin/import_swb.py @@ -43,7 +43,7 @@ def maybe_download(archive_url, target_dir, ldc_dataset): ldc_path = archive_url + ldc_dataset if not os.path.exists(target_dir): print('No path "%s" - creating ...' % target_dir) - makedirs(target_dir) + os.makedirs(target_dir) if not os.path.exists(archive_path): print('No archive "%s" - downloading...' % archive_path)
Added `os` import in front of `makedirs`
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -2,12 +2,12 @@ from distutils.core import setup setup( name = 'apiary2postman', packages = ['apiary2postman'], # this must be the same as the name above - version = '0.4.9', + version = '0.5.0', description = 'A tool for converting Blueman API markup from Apiary.io to Postman collection/dumps', author = 'Erik Jonsson Thoren', author_email = '[email protected]', url = 'https://github.com/thecopy/apiary2postman', # use the URL to the github repo - download_url = 'https://github.com/thecopy/apiary2postman/tarball/0.4.9', + download_url = 'https://github.com/thecopy/apiary2postman/tarball/0.5.0', keywords = ['apiary', 'blueman', 'postman'], # arbitrary keywords classifiers = [], entry_points={
setup.py for <I>
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -2,23 +2,14 @@ # http://docs.python.org/distutils/setupscript.html # http://docs.python.org/2/distutils/examples.html -import sys from setuptools import setup, find_packages -import ast +import re import os -name = 'endpoints' -version = '' +name = "endpoints" with open(os.path.join(name, "__init__.py"), 'rU') as f: - for node in (n for n in ast.parse(f.read()).body if isinstance(n, ast.Assign)): - node_name = node.targets[0] - if isinstance(node_name, ast.Name) and node_name.id.startswith('__version__'): - version = node.value.s - break - -if not version: - raise RuntimeError('Unable to find version number') + version = re.search("^__version__\s*=\s*[\'\"]([^\'\"]+)", f.read(), flags=re.I | re.M).group(1) setup(
a little simpler setup.py
py
diff --git a/lavalink/websocket.py b/lavalink/websocket.py index <HASH>..<HASH> 100644 --- a/lavalink/websocket.py +++ b/lavalink/websocket.py @@ -41,9 +41,9 @@ class WebSocket: } try: - self._ws = await self.session.ws_connect('ws://{}:{}'.format(self._host, self._port), - heartbeat=5.0, - headers=headers) + self._ws = await self._session.ws_connect('ws://{}:{}'.format(self._host, self._port), + heartbeat=5.0, + headers=headers) except aiohttp.ClientConnectorError: log.warn('Failed to connect to node `{}`, retrying in 5s...'.format(self._node.name)) await asyncio.sleep(5.0)
My disappointment is immeasureable, and my day is ruined
py
diff --git a/armstrong/dev/tasks/__init__.py b/armstrong/dev/tasks/__init__.py index <HASH>..<HASH> 100644 --- a/armstrong/dev/tasks/__init__.py +++ b/armstrong/dev/tasks/__init__.py @@ -41,8 +41,9 @@ def pip_install(func): def inner(*args, **kwargs): if getattr(fabfile, "pip_install_first", True): with settings(warn_only=True): - local("pip uninstall -y %s" % get_full_name(), capture=False) - local("pip install .", capture=False) + if not os.environ.get("SKIP_INSTALL", True): + local("pip uninstall -y %s" % get_full_name(), capture=False) + local("pip install .", capture=False) func(*args, **kwargs) return inner
Add ENV for turning off re-install
py
diff --git a/zencoder/core.py b/zencoder/core.py index <HASH>..<HASH> 100644 --- a/zencoder/core.py +++ b/zencoder/core.py @@ -2,6 +2,9 @@ import os import httplib2 from urllib import urlencode +# Library version. Should probably be rewritten to match the version in setup.py +lib_version = 0.5; + # Note: I've seen this pattern for dealing with json in different versions of # python in a lot of modules -- if there's a better way, I'd love to use it. try: @@ -59,11 +62,13 @@ class HTTPBackend(object): headers. """ if self.as_xml: - return {'Content-Type': 'application/xml', - 'Accepts': 'application/xml'} - else: - return {'Content-Type': 'application/json', - 'Accepts': 'application/json'} + content_type = 'xml' + else : + content_type = 'json' + + return {'Content-Type': 'application/' + content_type, + 'Accepts': 'application/' + content_type, + 'User-Agent': 'Zencoder-Py v' + str(lib_version)} def encode(self, data): """
Added User-Agent to headers
py
diff --git a/billy/bin/update.py b/billy/bin/update.py index <HASH>..<HASH> 100755 --- a/billy/bin/update.py +++ b/billy/bin/update.py @@ -192,7 +192,7 @@ def _do_reports(abbrev, args): db.reports.save(report, safe=True) -def main(old_scrape_compat=False): +def main(): try: parser = argparse.ArgumentParser( description='update billy data', @@ -285,10 +285,7 @@ def main(old_scrape_compat=False): args.chambers = ['upper', 'lower'] if not args.actions: - if old_scrape_compat: - args.actions = ['scrape'] - else: - args.actions = ['scrape', 'import', 'report'] + args.actions = ['scrape', 'import', 'report'] if not args.types: args.types = ['bills', 'legislators', 'votes', 'committees', @@ -409,8 +406,5 @@ def main(old_scrape_compat=False): sys.exit(1) -def scrape_compat_main(): - main(True) - if __name__ == '__main__': main()
drop old billy-scrape
py
diff --git a/MAVProxy/modules/mavproxy_console.py b/MAVProxy/modules/mavproxy_console.py index <HASH>..<HASH> 100644 --- a/MAVProxy/modules/mavproxy_console.py +++ b/MAVProxy/modules/mavproxy_console.py @@ -88,7 +88,8 @@ class ConsoleModule(mp_module.MPModule): self.menu = MPMenuTop([]) self.add_menu(MPMenuSubMenu('MAVProxy', items=[MPMenuItem('Settings', 'Settings', 'menuSettings'), - MPMenuItem('Map', 'Load Map', '# module load map')])) + MPMenuItem('Show Map', 'Load Map', '# module load map'), + MPMenuItem('Show HUD', 'Load HUD', '# module load horizon')])) self.vehicle_menu = MPMenuSubMenu('Vehicle', items=[]) self.add_menu(self.vehicle_menu)
console: added "Show HUD" to console menu
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -124,8 +124,8 @@ def get_version_info(): vinfo = _version_helper.generate_git_version_info() except: vinfo = vdummy() - vinfo.version = '1.16.dev12' - vinfo.release = 'False' + vinfo.version = '1.16.12' + vinfo.release = 'True' with open('pycbc/version.py', 'w') as f: f.write("# coding: utf-8\n")
Prepare for release (#<I>)
py
diff --git a/mortgagekit/calculator.py b/mortgagekit/calculator.py index <HASH>..<HASH> 100644 --- a/mortgagekit/calculator.py +++ b/mortgagekit/calculator.py @@ -84,7 +84,7 @@ class MortgageCalculator(object): # Calculate our loan princinple. loanAmount = loanPurchaseAmount - downPayment - amountFinancedPercent = loanAmount / loanPurchaseAmount + amountFinancedPercent = loanAmount.amount / loanPurchaseAmount.amount return amountFinancedPercent * 100 def interest_rate_per_payment_frequency(self):
BUGFIX: Fixed the Money / Money exception in calculator.py file.
py
diff --git a/pyforms/gui/Controls/ControlCombo.py b/pyforms/gui/Controls/ControlCombo.py index <HASH>..<HASH> 100755 --- a/pyforms/gui/Controls/ControlCombo.py +++ b/pyforms/gui/Controls/ControlCombo.py @@ -38,17 +38,18 @@ class ControlCombo(ControlBase): self._addingItem = False def _currentIndexChanged(self, index): - self.currentIndexChanged(index) + if not self._addingItem: + item = self._form.comboBox.currentText() + if len(item) >= 1: + ControlBase.value.fset(self, self._items[str(item)]) + self.currentIndexChanged(index) def currentIndexChanged(self, index): """Called when the user chooses an item in the combobox and the selected choice is different from the last one selected. @index: item's index """ - if not self._addingItem: - item = self._form.comboBox.currentText() - if len(item) >= 1: - ControlBase.value.fset(self, self._items[str(item)]) + pass def _activated(self, index): self.activated(index) @@ -98,7 +99,7 @@ class ControlCombo(ControlBase): def currentIndex(self): return self._form.comboBox.currentIndex() - + def count(self): return self._form.comboBox.count()
currentIndexChanged can now be extended by other classes
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -11,9 +11,9 @@ Another Python package with deals with interval arithmetic, this one hopes to be classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers keywords='interval range discontinous-range union intersection', author='Inti Ocean', - author_email='', + author_email='[email protected]', url='intiocean.com', - license='', + license='MIT', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, zip_safe=True,
Put license in setup.py and email
py
diff --git a/holoviews/core/dimension.py b/holoviews/core/dimension.py index <HASH>..<HASH> 100644 --- a/holoviews/core/dimension.py +++ b/holoviews/core/dimension.py @@ -134,6 +134,7 @@ class Dimensioned(param.Parameterized): 'deep_dimensions'] def __init__(self, **params): + self.data = None for group in self._dim_groups[0:2]: if group in params: dimensions = [Dimension(d) if not isinstance(d, Dimension) else d
Declared data attribute on Dimensioned for use by all subclasses
py
diff --git a/source/rafcon/statemachine/states/container_state.py b/source/rafcon/statemachine/states/container_state.py index <HASH>..<HASH> 100644 --- a/source/rafcon/statemachine/states/container_state.py +++ b/source/rafcon/statemachine/states/container_state.py @@ -1769,6 +1769,10 @@ class ContainerState(State): from_state_id = transition.from_state from_outcome_id = transition.from_outcome + if from_state_id == self.state_id: + return False, "from_state_id of transition must not be the container state itself." \ + " In the case of a start transition both the from state and the from_outcome are None." + if from_state_id != self.state_id and from_state_id not in self.states: return False, "from_state not existing"
fix transition checks omit transitions from an outcome of a container state to the same outcome
py
diff --git a/holoviews/core/tree.py b/holoviews/core/tree.py index <HASH>..<HASH> 100644 --- a/holoviews/core/tree.py +++ b/holoviews/core/tree.py @@ -202,6 +202,10 @@ class AttrTree(object): raise AttributeError("%s: Custom paths elements must be capitalized." % identifier) + def _node_repr(self, node): + return '--+' if node.identifier is None else node.identifier + + def _draw_tree(self, node, prefix='', identifier=''): """ Recursive function that builds up an ASCII tree given an @@ -209,7 +213,7 @@ class AttrTree(object): """ children = node.children if isinstance(node, AttrTree) else [] if isinstance(node, AttrTree): - identifier = '--+' if node.identifier is None else node.identifier + identifier = self._node_repr(node) else: identifier = identifier + ' : ' + str(type(node).__name__)
Refactored AttrTree repr to support custom node repr
py
diff --git a/sos/report/plugins/subscription_manager.py b/sos/report/plugins/subscription_manager.py index <HASH>..<HASH> 100644 --- a/sos/report/plugins/subscription_manager.py +++ b/sos/report/plugins/subscription_manager.py @@ -45,8 +45,8 @@ class SubscriptionManager(Plugin, RedHatPlugin): self.add_cmd_output(["rct cat-cert %s" % cert for cert in certs]) def postproc(self): - passwdreg = r"(proxy_password(\s)*=(\s)*)(.*)" - repl = r"\1 ********" + passwdreg = r"(proxy_password(\s)*=(\s)*)(\S+)\n" + repl = r"\1********\n" self.do_path_regex_sub("/etc/rhsm/rhsm.conf", passwdreg, repl) # vim: et ts=4 sw=4
[subscription_manager] scrub passwords on one line only Fix regexp to scrub proxy_password on one line and don't obfuscate subsequent line when the password is empty. Closes: #<I> Resolves: #<I>
py
diff --git a/km.py b/km.py index <HASH>..<HASH> 100755 --- a/km.py +++ b/km.py @@ -78,9 +78,18 @@ class KM: @classmethod def request(cls,type,data,update=True): query = [] - data.update({'_k': cls._key, '_t': cls.now().strftime('%s') }) + + # if user has defined their own _t, then include necessary _d + if '_t' in data: + data['_d'] = 1 + else: + data['_t'] = cls.now().strftime('%s') + + # add customer key to data sent + data['_k'] = cls._key + if update: - data.update({'_p': cls._id}) + data['_p'] = cls._id for key,val in data.items(): query.append( urllib.quote(str(key)) + '=' + urllib.quote(str(val)) )
adding ability for user to override timestamp in python library
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -6,10 +6,7 @@ import os from pip.req import parse_requirements from pip.download import PipSession -try: - from setuptools import setup -except ImportError: - from distutils.core import setup +from setuptools import setup, find_packages # IPython 6.0+ does not support Python 2.6, 2.7, 3.0, 3.1, or 3.2 if sys.version_info < (3,3): @@ -29,9 +26,7 @@ setup( version='0.19', author='Rick Gerkin', author_email='[email protected]', - packages=['sciunit', - 'sciunit.scores', - 'sciunit.unit_test'], + packages=find_packages(), url='http://sciunit.scidash.org', license='MIT', description='A test-driven framework for formally validating scientific models against data.',
Use find_packages() to determine packages to install
py
diff --git a/sos/plugins/networking.py b/sos/plugins/networking.py index <HASH>..<HASH> 100644 --- a/sos/plugins/networking.py +++ b/sos/plugins/networking.py @@ -108,7 +108,8 @@ class Networking(Plugin): "ip neigh show", "nmcli general status", "nmcli connection show", - "nmcli device status" + "nmcli device status", + "biosdevname -d" ]) ip_link_result=self.call_ext_prog("ip -o link") if ip_link_result['status'] == 0:
[networking] add biosdevname output Suggested by Bryan Quigley. Fixes #<I>.
py
diff --git a/dedupe/api.py b/dedupe/api.py index <HASH>..<HASH> 100644 --- a/dedupe/api.py +++ b/dedupe/api.py @@ -1032,6 +1032,9 @@ class ActiveMatching(Matching): Class for training a matcher. """ + active_learner: labeler.DisagreementLearner | None + training_pairs: TrainingData + def __init__( self, variable_definition: Collection[VariableDefinition], @@ -1066,10 +1069,7 @@ class ActiveMatching(Matching): super().__init__(num_cores, in_memory, **kwargs) self.data_model = datamodel.DataModel(variable_definition) - - self.training_pairs: TrainingData self.training_pairs = {"distinct": [], "match": []} - self.active_learner: labeler.DisagreementLearner | None self.classifier = sklearn.model_selection.GridSearchCV( estimator=sklearn.linear_model.LogisticRegression(), param_grid={"C": [0.00001, 0.0001, 0.001, 0.01, 0.1, 1, 10]},
Move type hints to class from __init__ I don't think they do anything when they're in the __init__? At least VSCode's syntax highlighting doesn't work.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,18 +1,18 @@ -from distutils.core import setup +from setuptools import setup with open('requirements.txt') as f: required = f.read().splitlines() setup( - name='WebUI', - version='0.2.0', - author='Nick Johnstone', - author_email='[email protected]', - packages=['webui'], - scripts=['examples/test_app.py'], - url='https://github.com/Widdershin/WebUI/', - license='MIT', - description='WebUI lets you create first class desktop applications in Python with HTML/CSS', - long_description=open('README.rst').read(), - install_requires=required, - ) \ No newline at end of file + name='WebUI', + version='0.2.0', + author='Nick Johnstone', + author_email='[email protected]', + packages=['webui'], + scripts=['examples/test_app.py'], + url='https://github.com/Widdershin/WebUI/', + license='MIT', + description='WebUI lets you create first class desktop applications in Python with HTML/CSS', + long_description=open('README.rst').read(), + install_requires=required, +)
updated import in setup.py from distutils.core to setuptools
py
diff --git a/api/symboltable.py b/api/symboltable.py index <HASH>..<HASH> 100644 --- a/api/symboltable.py +++ b/api/symboltable.py @@ -428,6 +428,8 @@ class SymbolTable(object): entry.type_ = type_ entry.scope = SCOPE.global_ if self.current_scope == self.global_scope else SCOPE.local + entry.callable = False + entry.class_ = CLASS.var # Make it a variable if entry.type_ != type_: if not implicit and entry.type_ is not None:
Adds class and callable settings for variable declaration.
py
diff --git a/topydo/lib/TodoListBase.py b/topydo/lib/TodoListBase.py index <HASH>..<HASH> 100644 --- a/topydo/lib/TodoListBase.py +++ b/topydo/lib/TodoListBase.py @@ -173,6 +173,12 @@ class TodoListBase(object): self._todos = [] self.dirty = True + def replace(self, p_todos): + """ Replaces whole todolist with todo objects supplied as p_todos. """ + self.erase() + self.add_todos(p_todos) + self.dirty = True + def count(self): """ Returns the number of todos on this list. """ return len(self._todos)
Add "replace" method for TodoList objects This will be used to recreate TodoList from a list of Todo objects.
py
diff --git a/pyani/pyani_orm.py b/pyani/pyani_orm.py index <HASH>..<HASH> 100644 --- a/pyani/pyani_orm.py +++ b/pyani/pyani_orm.py @@ -42,8 +42,6 @@ This SQLAlchemy-based ORM replaces the previous SQL-based module """ -import os - from collections import namedtuple import numpy as np @@ -500,7 +498,7 @@ def add_run_genomes(session, run, indir, classpath=None, labelpath=None): indesc = read_fasta_description(fastafile) except Exception: raise PyaniORMException("Could not read genome files for database import") - abspath = os.path.abspath(fastafile) + abspath = fastafile.resolve() genome_len = get_genome_length(abspath) # If the genome is not already in the database, add it as a Genome object
convert pyani/pyani_orm.py from os.path to Pathlib
py
diff --git a/incuna_auth/middleware/login_required.py b/incuna_auth/middleware/login_required.py index <HASH>..<HASH> 100644 --- a/incuna_auth/middleware/login_required.py +++ b/incuna_auth/middleware/login_required.py @@ -9,6 +9,7 @@ from django.utils.translation import ugettext_lazy as _ EXEMPT_URLS = [re.compile('^%s$' % settings.LOGIN_URL.lstrip('/')), re.compile('^%s$' % settings.LOGOUT_URL.lstrip('/'))] EXEMPT_URLS += [re.compile(expr) for expr in getattr(settings, 'LOGIN_EXEMPT_URLS', [])] PROTECTED_URLS = [re.compile(expr) for expr in getattr(settings, 'LOGIN_PROTECTED_URLS', [r'^'])] +SEND_MESSAGE = getattr(settings, 'LOGIN_REQUIRED_SEND_MESSAGE', True) class LoginRequiredMiddleware: @@ -51,5 +52,6 @@ class LoginRequiredMiddleware: return # Add a message, and redirect to login. - messages.info(request, _('You must be logged in to view this page.')) + if SEND_MESSAGE: + messages.info(request, _('You must be logged in to view this page.')) return HttpResponseRedirect(settings.LOGIN_URL + '?next=' + request.path_info)
Allow the login required message to be disabled.
py
diff --git a/rest_framework_json_api/utils.py b/rest_framework_json_api/utils.py index <HASH>..<HASH> 100644 --- a/rest_framework_json_api/utils.py +++ b/rest_framework_json_api/utils.py @@ -2,6 +2,7 @@ Utils. """ import copy + import inflection from django.conf import settings from django.utils import six, encoding @@ -421,7 +422,7 @@ def extract_included(fields, resource, resource_instance, included_resources): continue try: - included_resources.remove(field_name) + included_resources.copy().remove(field_name) except ValueError: # Skip fields not in requested included resources continue
Copy included_resources before removing items from it Fixes #<I>
py
diff --git a/codekit/cli/github_tag_version.py b/codekit/cli/github_tag_version.py index <HASH>..<HASH> 100755 --- a/codekit/cli/github_tag_version.py +++ b/codekit/cli/github_tag_version.py @@ -389,19 +389,31 @@ def tag_gh_repos( continue except github.RateLimitExceededException: raise - except (github.GithubException, GitTagExistsError) as e: - yikes = pygithub.CaughtRepositoryError(repo['repo'], e) - + except GitTagExistsError as e: + # if force_tag is set, and the tag already exists, set + # update_tag and fall through. Otherwise, treat it as any other + # exception. if force_tag: update_tag = True elif fail_fast: + raise + else: + problems.append(e) + error(e) + continue + except github.GithubException as e: + yikes = pygithub.CaughtRepositoryError(repo['repo'], e) + + if fail_fast: raise yikes from None else: problems.append(yikes) + error(yikes) continue # tags are created/updated past this point if dry_run: + debug(' (noop)') continue try:
pedantically handle GitTagExistsError exceptions As they should be handled differently than Caught*Errors.
py
diff --git a/win32_event_log/datadog_checks/win32_event_log/check.py b/win32_event_log/datadog_checks/win32_event_log/check.py index <HASH>..<HASH> 100644 --- a/win32_event_log/datadog_checks/win32_event_log/check.py +++ b/win32_event_log/datadog_checks/win32_event_log/check.py @@ -288,6 +288,12 @@ class Win32EventLogCheck(AgentCheck): while True: # https://docs.microsoft.com/en-us/windows/win32/api/winevt/nf-winevt-evtnext # http://timgolden.me.uk/pywin32-docs/win32evtlog__EvtNext_meth.html + # + # An error saying EvtNext: The operation identifier is not valid happens + # when you call the method and there are no events to read (i.e. polling). + # There is an unreleased upstream contribution to return + # an empty tuple instead https://github.com/mhammond/pywin32/pull/1648 + # For the moment is logged as a debug line. try: events = win32evtlog.EvtNext(self._subscription, self._payload_size) except pywintypes.error as e:
Clarifiying comment in code (#<I>)
py
diff --git a/dvc/version.py b/dvc/version.py index <HASH>..<HASH> 100644 --- a/dvc/version.py +++ b/dvc/version.py @@ -7,7 +7,7 @@ import os import subprocess -_BASE_VERSION = "0.54.0" +_BASE_VERSION = "0.54.1" def _generate_version(base_version):
dvc: bump to <I>
py
diff --git a/plenum/cli/cli.py b/plenum/cli/cli.py index <HASH>..<HASH> 100644 --- a/plenum/cli/cli.py +++ b/plenum/cli/cli.py @@ -96,6 +96,7 @@ class Cli: name = 'plenum' properName = 'Plenum' fullName = 'Plenum protocol' + githubUrl = 'https://github.com/evernym/plenum' NodeClass = Node ClientClass = Client @@ -252,12 +253,16 @@ class Cli: self.print("Node registry loaded.") self.showNodeRegistry() else: - self.print("No information is found which can be used to connect to" - " the Sovrin nodes. This indicates an error. Check if " - "the file containing genesis transactions is present " - "in your base direcory which can be found in the config " - "as `baseDir`, if not then get this file from the github" - " repository and paste it in location `baseDir`") + msg = """ + No information is found which can be used to connect to the nodes. + This indicates an error. Check if the file containing genesis + transactions (which has name specified in config as + `poolTransactionsFile`) is present in your base directory + which can be found in the config as `baseDir`, if not then get + this file from the github repository under `/data` and paste it + in location `baseDir`. The github url repository is at {} + """.format(self.githubUrl) + self.print(msg) self.print("Type 'help' for more information.") self.print("Running {} {}\n".format(self.properName,
updating the message that appears when the code is missing the genesis file
py
diff --git a/f90nml/namelist.py b/f90nml/namelist.py index <HASH>..<HASH> 100644 --- a/f90nml/namelist.py +++ b/f90nml/namelist.py @@ -566,7 +566,7 @@ class Namelist(OrderedDict): # Strip metadata label for repeat groups if grp_name.startswith('_grp_'): - grp_name = grp_name.lstrip('_grp_').rsplit('_', 1)[0] + grp_name = grp_name[5:].rsplit('_', 1)[0] if self.uppercase: grp_name = grp_name.upper()
Bugfix: Replace 'lstrip' with slice Incorrectly used `lstrip` to remove a leading string, when I should have explicitly removed the substring with an indexed slice.
py
diff --git a/tests/unit/states/npm_test.py b/tests/unit/states/npm_test.py index <HASH>..<HASH> 100644 --- a/tests/unit/states/npm_test.py +++ b/tests/unit/states/npm_test.py @@ -181,6 +181,7 @@ class NpmTestCase(TestCase): comt = ('Package {0} is not in the cache'.format(non_cached_pkg)) pkg_ret.update({'name': non_cached_pkg, 'result': True, 'comment': comt}) self.assertDictEqual(npm.cache_cleaned(non_cached_pkg), pkg_ret) + pkg_ret.update({'name': name}) with patch.dict(npm.__opts__, {'test': True}): comt = ('Cached packages set to be removed')
Reset the name to valid package in npm cache_cleaned unit test
py
diff --git a/fermipy/utils.py b/fermipy/utils.py index <HASH>..<HASH> 100644 --- a/fermipy/utils.py +++ b/fermipy/utils.py @@ -443,13 +443,19 @@ def apply_minmax_selection(val, val_minmax): return (min_cut and max_cut) -def create_source_name(skydir): +def create_source_name(skydir, floor=True, prefix='PS'): hms = skydir.icrs.ra.hms dms = skydir.icrs.dec.dms - return 'PS J%02.f%04.1f%+03.f%02.f' % (hms.h, - hms.m + hms.s / 60., - dms.d, - np.abs(dms.m + dms.s / 60.)) + + if floor: + ra_ms = np.floor(10.*(hms.m + hms.s / 60.))/10. + dec_ms = np.floor(np.abs(dms.m + dms.s / 60.)) + else: + ra_ms = (hms.m + hms.s / 60.) + dec_ms = np.abs(dms.m + dms.s / 60.) + + return '%s J%02.f%04.1f%+03.f%02.f' % (prefix, hms.h, ra_ms, + dms.d, dec_ms) def create_model_name(src):
Fix source naming to adhere to IAU convention.
py
diff --git a/photons/lights.py b/photons/lights.py index <HASH>..<HASH> 100644 --- a/photons/lights.py +++ b/photons/lights.py @@ -333,12 +333,17 @@ class Apa102Driver: import mraa self.spiDev = mraa.Spi(0) self.spiDev.frequency(freqs) + self.brightness = 0xff + + def setGlobalBrightness(self, brightness): + if brightness >= 0 and brightness <= 255: + self.brightness = brightness def update(self, ledsData): data = bytearray() data[:4] = [0x00, 0x00, 0x00, 0x00] for rgb in ledsData: - data.append(0xff) + data.append(self.brightness) # apa102 is GBR because THINGS data.extend([rgb[1], rgb[2], rgb[0]])
add global brightness method to apa<I> driver
py
diff --git a/photutils/segmentation/properties.py b/photutils/segmentation/properties.py index <HASH>..<HASH> 100644 --- a/photutils/segmentation/properties.py +++ b/photutils/segmentation/properties.py @@ -1606,6 +1606,9 @@ class SourceProperties: return None # use circular aperture with radius=self.kron_params[2] xypos = (self.xcentroid.value, self.ycentroid.value) + values = (xypos[0], xypos[1], self.kron_params[2]) + if np.any(~np.isfinite(values)): + return None aperture = CircularAperture(xypos, r=self.kron_params[2]) else: radius = self.kron_radius.value * self.kron_params[1]
Fix Kron aperture for completely masked sources
py
diff --git a/h2o-py/tests/testdir_algos/deepwater/pyunit_custom_lenet_mnist_deepwater.py b/h2o-py/tests/testdir_algos/deepwater/pyunit_custom_lenet_mnist_deepwater.py index <HASH>..<HASH> 100644 --- a/h2o-py/tests/testdir_algos/deepwater/pyunit_custom_lenet_mnist_deepwater.py +++ b/h2o-py/tests/testdir_algos/deepwater/pyunit_custom_lenet_mnist_deepwater.py @@ -48,8 +48,8 @@ def deepwater_custom_lenet_mnist(): model = H2ODeepWaterEstimator(epochs=50, rate=1e-3, mini_batch_size=32, network='user', network_definition_file="/tmp/symbol_lenet-py.json", - #image_shape=[28,28], channels=1, - score_interval=0, train_samples_per_iteration=1000) + image_shape=[28,28], channels=1, + score_interval=0, train_samples_per_iteration=1000, gpu=False) model.train(x=[0],y=resp, training_frame=train, validation_frame=test) model.show()
Update image shape for custom lenet mnist example (not yet working).
py
diff --git a/deployutils/__init__.py b/deployutils/__init__.py index <HASH>..<HASH> 100644 --- a/deployutils/__init__.py +++ b/deployutils/__init__.py @@ -22,4 +22,4 @@ # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -__version__ = '0.6.3-dev' +__version__ = '0.6.3'
releases <I> to pypi
py
diff --git a/tools/licensescheck.py b/tools/licensescheck.py index <HASH>..<HASH> 100644 --- a/tools/licensescheck.py +++ b/tools/licensescheck.py @@ -14,6 +14,9 @@ prunelist = ('hsqldb19b3', 'projectfile', 'xml') +# pro directories to skip +prolist = ('org/voltdb/management') + def verifyLicense(f, content, approvedLicensesJavaC, approvedLicensesPython): if f.endswith('.py'): if not content.startswith("#"): @@ -101,6 +104,11 @@ def processFile(f, approvedLicensesJavaC, approvedLicensesPython): return 0 def processAllFiles(d, approvedLicensesJavaC, approvedLicensesPython): + # Skip files in pro + for p in prolist: + if d.endswith(p): + return 0 + files = os.listdir(d) errcount = 0 for f in [f for f in files if not f.startswith('.') and f not in prunelist]:
Skip pro directories when doing license check.
py
diff --git a/ibis/expr/analysis.py b/ibis/expr/analysis.py index <HASH>..<HASH> 100644 --- a/ibis/expr/analysis.py +++ b/ibis/expr/analysis.py @@ -179,7 +179,11 @@ def substitute(fn, expr): except IbisTypeError: return expr else: - return new_node.to_expr() + # unfortunately we can't use `.to_expr()` here because it's not backend + # aware, and some backends have their own ir.Table subclasses, like + # impala. There's probably a design flaw in the modeling of + # backend-specific expressions. + return type(expr)(new_node) def substitute_parents(expr):
chore: use type(node) and explain why in a comment
py
diff --git a/myawis/__init__.py b/myawis/__init__.py index <HASH>..<HASH> 100644 --- a/myawis/__init__.py +++ b/myawis/__init__.py @@ -12,6 +12,11 @@ try: except ImportError: from urllib.parse import quote, urlencode +URLINFO_RESPONSE_GROUPS = ",".join( + ["RelatedLinks", "Categories", "Rank", "ContactInfo", "RankByCountry", + "UsageStats", "Speed", "Language", "OwnedDomains", "LinksInCount", + "SiteData", "AdultContent"]) + class CallAwis(object):
Add constant with UrlInfo response groups
py
diff --git a/drf_auto_endpoint/adapters.py b/drf_auto_endpoint/adapters.py index <HASH>..<HASH> 100644 --- a/drf_auto_endpoint/adapters.py +++ b/drf_auto_endpoint/adapters.py @@ -94,9 +94,9 @@ class EmberAdapter(BaseAdapter): config['fields'] = adapted for i, fs in enumerate(config['fieldsets']): for j, f in enumerate(fs['fields']): - new_field = { - 'name': f['key'] if 'key' in f else f['name'] - } + new_field = f + if 'key' in 'f': + new_field['name'] = new_field.pop('key') fs['fields'][j] = new_field config['fieldsets'][i] = fs
:bug: fix fieldsets for EmberAdpater
py
diff --git a/imgur-python/main.py b/imgur-python/main.py index <HASH>..<HASH> 100644 --- a/imgur-python/main.py +++ b/imgur-python/main.py @@ -219,11 +219,10 @@ def handle_unauthorized_commands(factory, action): req = factory.build_request(('album', id)) if action == 'get-comment': - (item_hash, cid) = sys.argv[2:4] - req = factory.build_request(('gallery', item_hash, 'comments', cid)) + cid = sys.argv[2] + req = factory.build_request(('comment', cid)) if action == 'get-gallery': - imgur = factory.build_api() id = sys.argv[2] req = factory.build_request(('gallery', id))
Fixing get-comment and removing extra variable
py
diff --git a/phy/gui/dock.py b/phy/gui/dock.py index <HASH>..<HASH> 100644 --- a/phy/gui/dock.py +++ b/phy/gui/dock.py @@ -130,12 +130,15 @@ class DockWindow(QtGui.QMainWindow): action.setShortcut(key) self.addAction(action) self._actions[name] = action + if callback: + setattr(self, name, callback) return action def remove_action(self, name): """Remove an action.""" self.removeAction(self._actions[name]) del self._actions[name] + delattr(self, name) def remove_actions(self): names = sorted(self._actions.keys()) @@ -146,7 +149,6 @@ class DockWindow(QtGui.QMainWindow): """Decorator to add a global keyboard shortcut.""" def wrap(func): self.add_action(name, shortcut=key, callback=func) - setattr(self, name, func) return wrap # Views
WIP: add_action() now sets the callback as attribute.
py
diff --git a/tests/test__implicitization.py b/tests/test__implicitization.py index <HASH>..<HASH> 100644 --- a/tests/test__implicitization.py +++ b/tests/test__implicitization.py @@ -18,7 +18,7 @@ from tests import utils FLOAT64 = np.float64 # pylint: disable=no-member -LOCAL_EPS = 0.5**26 # sqrt(machine precision) +LOCAL_EPS = 0.5**25 # 2 * sqrt(machine precision) class Test__evaluate3(unittest.TestCase):
Increasing error threshold for implicitization unit tests. Succeeds on one machine but not on another: <URL>
py
diff --git a/km3pipe/io/aanet.py b/km3pipe/io/aanet.py index <HASH>..<HASH> 100644 --- a/km3pipe/io/aanet.py +++ b/km3pipe/io/aanet.py @@ -342,7 +342,9 @@ def get_aanet_header(event_file): fields, values = [s.split() for s in line.split(':')] for field in fields: for value in values: - d[field+'-'+value] = event_file.header.get_field(field, value) + if field == "physics" and value == "date": # segfaults + continue + d[field+'-'+value] = header.get_field(field, value) return d
Omit physics-date, since it segfaults
py
diff --git a/tests/providers/test_easydns.py b/tests/providers/test_easydns.py index <HASH>..<HASH> 100644 --- a/tests/providers/test_easydns.py +++ b/tests/providers/test_easydns.py @@ -10,7 +10,7 @@ import pytest class EasyDnsProviderTests(TestCase, IntegrationTests): Provider = Provider - provider_name = 'nsone' + provider_name = 'easydns' domain = 'capsulecd.com' provider_opts = {'api_endpoint': 'http://sandbox.rest.easydns.net'} def _filter_headers(self):
fix easydns provider name.
py
diff --git a/pytest_cov.py b/pytest_cov.py index <HASH>..<HASH> 100644 --- a/pytest_cov.py +++ b/pytest_cov.py @@ -79,6 +79,13 @@ class CovPlugin(object): # slave is started in pytest hook def start(self, controller_cls, config=None, nodeid=None): + if config is None: + # fake config option for cov_core + class Config(object): + option = self.options + + config = Config() + self.cov_controller = controller_cls( self.options.cov_source, self.options.cov_report or ['term'],
Fixed handling of coveragerc in DistMaster.
py
diff --git a/mutagen/id3.py b/mutagen/id3.py index <HASH>..<HASH> 100644 --- a/mutagen/id3.py +++ b/mutagen/id3.py @@ -1289,7 +1289,7 @@ class PRIV(Frame): def _pprint(self): isascii = ord(max(self.data)) < 128 if isascii: return "%s=%s" % (self.owner, self.data) - else: return "%s (%d bytes)" % len(self.data) + else: return "%s (%d bytes)" % (self.owner, len(self.data)) class SIGN(Frame): "Signature frame"
PRIV#_pprint: Handle non-ASCII case more sanely.
py
diff --git a/spacy/compat.py b/spacy/compat.py index <HASH>..<HASH> 100644 --- a/spacy/compat.py +++ b/spacy/compat.py @@ -53,9 +53,9 @@ is_osx = sys.platform == 'darwin' if is_python2: import imp bytes_ = str - unicode_ = unicode - basestring_ = basestring - input_ = raw_input + unicode_ = unicode # noqa: F821 + basestring_ = basestring # noqa: F821 + input_ = raw_input # noqa: F821 json_dumps = lambda data: ujson.dumps(data, indent=2, escape_forward_slashes=False).decode('utf8') path2str = lambda path: str(path).decode('utf8')
Add noqa to Python 2 compat variables of built-ins (see #<I>)
py
diff --git a/pymatbridge/version.py b/pymatbridge/version.py index <HASH>..<HASH> 100644 --- a/pymatbridge/version.py +++ b/pymatbridge/version.py @@ -2,10 +2,10 @@ # Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" _version_major = 0 -_version_minor = 5 -_version_micro = '' # use '' for first of series, number for 1 and above -_version_extra = 'dev' -#_version_extra = '' # Uncomment this for full releases +_version_minor = 4 +_version_micro = 1#'' # use '' for first of series, number for 1 and above + #_version_extra = 'dev' +_version_extra = '' # Uncomment this for full releases # Construct full version string from these. _ver = [_version_major, _version_minor]
Fumbled that release. Oops. Here's another one: <I>.
py
diff --git a/spacy/tests/test_download.py b/spacy/tests/test_download.py index <HASH>..<HASH> 100644 --- a/spacy/tests/test_download.py +++ b/spacy/tests/test_download.py @@ -5,6 +5,7 @@ from ..download import download, get_compatibility, get_version, check_error_dep import pytest [email protected] def test_download_fetch_compatibility(): compatibility = get_compatibility() assert type(compatibility) == dict
Mark compatibility table test as slow (temporary) Prevent Travis from running test test until models repo is published
py
diff --git a/python/ray/monitor.py b/python/ray/monitor.py index <HASH>..<HASH> 100644 --- a/python/ray/monitor.py +++ b/python/ray/monitor.py @@ -461,7 +461,7 @@ class Monitor(object): result = pipe.hget(local_scheduler_id, "gpus_in_use") gpus_in_use = (dict() if result is None else - json.loads(result)) + json.loads(result.decode("ascii"))) driver_id_hex = binary_to_hex(driver_id) if driver_id_hex in gpus_in_use:
Pass string into json.loads, not bytes object. (#<I>)
py
diff --git a/ethereum/blocks.py b/ethereum/blocks.py index <HASH>..<HASH> 100644 --- a/ethereum/blocks.py +++ b/ethereum/blocks.py @@ -426,6 +426,10 @@ class Block(rlp.Serializable): raise ValueError("Block's gaslimit is inconsistent with its parent's gaslimit") if self.difficulty != calc_difficulty(parent, self.timestamp): raise ValueError("Block's difficulty is inconsistent with its parent's difficulty") + if self.gas_used > self.gas_limit: + raise ValueError("Gas used exceeds gas limit") + if self.timestamp <= parent.header.timestamp: + raise ValueError("Timestamp equal to or before parent") for uncle in uncles: assert isinstance(uncle, BlockHeader)
Added two more header consistency checks to match YP
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -50,7 +50,7 @@ setup(name = "isochrones", ], install_requires=['pandas>=0.14','astropy>=0.3','emcee>=2.0', 'numpy>=1.9', 'tables>=3.0', - 'asciitree', 'corner', 'astroquery', 'numba', + 'asciitree', 'corner', 'astroquery', 'configobj'], zip_safe=False )
removed numba requirement because it breaks shit to do it this way
py
diff --git a/utils/inertia.py b/utils/inertia.py index <HASH>..<HASH> 100644 --- a/utils/inertia.py +++ b/utils/inertia.py @@ -294,16 +294,16 @@ def principals(geom, masses, on_tol=_DEF.Orthonorm_Tol): top = ETT.Linear ## end if else: - if abs(moments[0] - moments[1]) < PRM.Equal_Moment_Tol: + if abs((moments[1] / moments[0]) - 1.0) < PRM.Equal_Moment_Tol: # Spherical or oblate symmetrical - if abs(moments[1] - moments[2]) < PRM.Equal_Moment_Tol: + if abs((moments[2] / moments[1]) - 1.0) < PRM.Equal_Moment_Tol: top = ETT.Spherical else: top = ETT.SymmOblate ## end if else: # Prolate symmetrical or Asymmetric - if abs(moments[1] - moments[2]) < PRM.Equal_Moment_Tol: + if abs((moments[2] / moments[1]) - 1.0) < PRM.Equal_Moment_Tol: top = ETT.SymmProlate else: top = ETT.Asymmetrical
utils.inertia: Corrected top checking to relative Accidentally implemented top type checking as a tolerance on the absolute values. This commit fixes it, by changing it to a tolerance on the relative deviation of the two moments being compared.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -2,6 +2,7 @@ import glob import sys from setuptools import setup, Extension +from distutils.command.build_ext import build_ext sources = ['pycrfsuite/_pycrfsuite.cpp', 'pycrfsuite/trainer_wrapper.cpp'] @@ -25,10 +26,17 @@ includes = [ if sys.platform == 'win32': includes.extend(['crfsuite/win32', 'include']) +class build_ext_check_gcc(build_ext): + def build_extensions(self): + c = self.compiler + if c.compiler_type == 'unix' and 'gcc' in c.compiler: + for e in self.extensions: + e.extra_compile_args=['-std=c99'] + build_ext.build_extensions(self) + ext_modules = [Extension('pycrfsuite._pycrfsuite', include_dirs=includes, language='c++', - extra_compile_args=['-std=c99'], sources=sources )] @@ -62,5 +70,6 @@ setup( ], zip_safe=False, packages=['pycrfsuite'], - ext_modules=ext_modules + ext_modules=ext_modules, + cmdclass={ 'build_ext': build_ext_check_gcc} )
customize build_ext to check complier
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ setup(name='inflect', "Programming Language :: Python", "Development Status :: 3 - Alpha", "Intended Audience :: Developers", - "License :: OSI Approved :: GNU General Public License (GPL)", + "License :: OSI Approved :: GNU Affero General Public License v3" "Natural Language :: English", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries :: Python Modules",
had wrong licence in setup.py. fixed: now affero
py
diff --git a/tunic/core.py b/tunic/core.py index <HASH>..<HASH> 100644 --- a/tunic/core.py +++ b/tunic/core.py @@ -30,8 +30,8 @@ try: except ImportError as e: if os.getenv('READTHEDOCS', None) != 'True': raise - run = lambda: None - sudo = lambda: None + run = None + sudo = None PERMS_FILE_DEFAULT = 'u+rw,g+rw,o+r' PERMS_DIR_DEFAULT = 'u+rwx,g+rws,o+rx'
Don't bother trying to make fake functions when fabric is not importable on RTD
py
diff --git a/tools/interop_matrix/client_matrix.py b/tools/interop_matrix/client_matrix.py index <HASH>..<HASH> 100644 --- a/tools/interop_matrix/client_matrix.py +++ b/tools/interop_matrix/client_matrix.py @@ -159,6 +159,10 @@ LANG_RELEASE_MATRIX = { ('v1.27.1', ReleaseInfo(runtimes=['go1.11'])), ('v1.28.0', ReleaseInfo(runtimes=['go1.11'])), ('v1.29.0', ReleaseInfo(runtimes=['go1.11'])), + ('v1.30.0', ReleaseInfo(runtimes=['go1.11'])), + ('v1.31.1', ReleaseInfo(runtimes=['go1.11'])), + ('v1.32.0', ReleaseInfo(runtimes=['go1.11'])), + ('v1.33.1', ReleaseInfo(runtimes=['go1.11'])), ]), 'java': OrderedDict([
Add <I> release of grpc-go to interop matrix Also add the missing old releases
py
diff --git a/src/python/turicreate/data_structures/sgraph.py b/src/python/turicreate/data_structures/sgraph.py index <HASH>..<HASH> 100644 --- a/src/python/turicreate/data_structures/sgraph.py +++ b/src/python/turicreate/data_structures/sgraph.py @@ -284,7 +284,7 @@ class SGraph(object): See Also -------- - edges + Edge Examples -------- @@ -337,7 +337,8 @@ class SGraph(object): See Also -------- - vertices + Vertex + Examples -------- @@ -370,7 +371,7 @@ class SGraph(object): See Also -------- - vertices, edges + Vertex, Edge Examples -------- @@ -415,7 +416,7 @@ class SGraph(object): See Also -------- - vertices, get_edges + Vertex, get_edges Examples -------- @@ -514,7 +515,7 @@ class SGraph(object): See Also -------- - edges, get_vertices + Edge, get_vertices Examples -------- @@ -615,7 +616,7 @@ class SGraph(object): See Also -------- - vertices, SFrame, add_edges + Vertex, SFrame, add_edges Notes ----- @@ -685,7 +686,7 @@ class SGraph(object): See Also -------- - edges, SFrame, add_vertices + Edge, SFrame, add_vertices Notes -----
Fix "See Also" links for SGraph (#<I>)
py
diff --git a/svgpathtools/document.py b/svgpathtools/document.py index <HASH>..<HASH> 100644 --- a/svgpathtools/document.py +++ b/svgpathtools/document.py @@ -41,6 +41,7 @@ import xml.etree.ElementTree as etree from xml.etree.ElementTree import Element, SubElement, register_namespace from xml.dom.minidom import parseString import warnings +from io import StringIO from tempfile import gettempdir from time import time @@ -257,6 +258,18 @@ class Document: self.root = self.tree.getroot() + @staticmethod + def from_svg_string(svg_string): + """Factory method for creating a document from a string holding a svg + object + """ + # wrap string into StringIO object + svg_file_obj = StringIO(svg_string) + # reset cursor to the beginning of the buffer + svg_file_obj.seek(0) + # create document from file object + return Document(svg_file_obj) + def paths(self, group_filter=lambda x: True, path_filter=lambda x: True, path_conversions=CONVERSIONS): """Returns a list of all paths in the document.
Add factory method for creating from string holding svg object
py
diff --git a/chatterbot/__init__.py b/chatterbot/__init__.py index <HASH>..<HASH> 100644 --- a/chatterbot/__init__.py +++ b/chatterbot/__init__.py @@ -3,7 +3,7 @@ ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot -__version__ = '0.8.6' +__version__ = '0.8.7' __author__ = 'Gunther Cox' __email__ = '[email protected]' __url__ = 'https://github.com/gunthercox/ChatterBot'
Update package version to <I>
py
diff --git a/param/version.py b/param/version.py index <HASH>..<HASH> 100644 --- a/param/version.py +++ b/param/version.py @@ -128,9 +128,11 @@ class Version(object): def __init__(self, release=None, fpath=None, commit=None, reponame=None, commit_count=0): """ - :release: Release tuple (corresponding to the current VCS tag) - :fpath: Set to ``__file__`` to access version control information - :reponame: Used to verify VCS repository name. + :release: Release tuple (corresponding to the current VCS tag) + :commit Short SHA. Set to '$Format:%h$' for git archive support. + :fpath: Set to ``__file__`` to access version control information + :reponame: Used to verify VCS repository name. + :commit_count Commits since last release. Set for dev releases. """ self.fpath = fpath self._expected_commit = commit
Updated docstring of param.Version constructor
py
diff --git a/salt/runners/manage.py b/salt/runners/manage.py index <HASH>..<HASH> 100644 --- a/salt/runners/manage.py +++ b/salt/runners/manage.py @@ -41,6 +41,11 @@ def _ping(tgt, tgt_type, timeout, gather_job_timeout): if not pub_data: return pub_data + log.debug( + 'manage runner will ping the following minion(s): %s', + ', '.join(sorted(pub_data['minions'])) + ) + returned = set() for fn_ret in client.get_cli_event_returns( pub_data['jid'], @@ -52,11 +57,13 @@ def _ping(tgt, tgt_type, timeout, gather_job_timeout): if fn_ret: for mid, _ in six.iteritems(fn_ret): + log.debug('minion \'%s\' returned from ping', mid) returned.add(mid) - not_returned = set(pub_data['minions']) - returned + not_returned = sorted(set(pub_data['minions']) - returned) + returned = sorted(returned) - return list(returned), list(not_returned) + return returned, not_returned def status(output=True, tgt='*', tgt_type='glob', expr_form=None, timeout=None, gather_job_timeout=None):
Add debug logging for manage.up/down This adds some debug logging which we can hopefully use to troubleshoot <URL>
py
diff --git a/threadedcomments/models.py b/threadedcomments/models.py index <HASH>..<HASH> 100644 --- a/threadedcomments/models.py +++ b/threadedcomments/models.py @@ -1,5 +1,6 @@ from django.db import models from django.contrib.comments.models import Comment +from django.contrib.comments.managers import CommentManager from django.conf import settings from django.utils.translation import ugettext_lazy as _ @@ -15,6 +16,8 @@ class ThreadedComment(Comment): verbose_name=_('Last child')) tree_path = models.CharField(_('Tree path'), max_length=MAX_PATH_LENGTH, editable=False, db_index=True) + + objects = CommentManager() def _get_depth(self): return len(self.tree_path.split(PATH_SEPARATOR))
Have ThreadedComment use CommentManager
py
diff --git a/openquake/job/params.py b/openquake/job/params.py index <HASH>..<HASH> 100644 --- a/openquake/job/params.py +++ b/openquake/job/params.py @@ -39,6 +39,7 @@ CALCULATION_MODE = { 'Deterministic': 'deterministic', 'Event Based': 'event_based', 'Disaggregation': 'disaggregation', + 'UHS': 'uhs', } INPUT_FILE_TYPES = { @@ -201,6 +202,9 @@ define_param('EPSILON_BIN_LIMITS', 'epsilon_bin_limits', define_param('DISTANCE_BIN_LIMITS', 'distance_bin_limits', modes='disaggregation', to_job=cttfl) +# Uniform Hazard Spectra parameters: +define_param('UHS_PERIODS', 'uhs_periods', modes='uhs', to_job=cttfl) + # area sources define_param('INCLUDE_AREA_SOURCES', 'include_area_sources', modes=('classical', 'event_based', 'disaggregation'),
added uhs_periods to param definition Former-commit-id: 6cd<I>e7f9aa<I>ecef<I>ad1ff<I>b
py
diff --git a/anycast_healthchecker/utils.py b/anycast_healthchecker/utils.py index <HASH>..<HASH> 100644 --- a/anycast_healthchecker/utils.py +++ b/anycast_healthchecker/utils.py @@ -925,7 +925,7 @@ def setup_logger(config): custom_format = log_format() json_formatter = CustomJsonFormatter(custom_format, - prefix=PROGRAM_NAME) + prefix=PROGRAM_NAME + ': ') formatter = logging.Formatter( '%(asctime)s {program}[%(process)d] %(levelname)-8s ' '%(threadName)-{width}s %(message)s'
Make the JSON formatted logs more compatible with syslog
py
diff --git a/examples/guestbook.py b/examples/guestbook.py index <HASH>..<HASH> 100644 --- a/examples/guestbook.py +++ b/examples/guestbook.py @@ -42,17 +42,17 @@ LAYOUT = """ <title>Guestbook</title> <body> <form method=post action=/add> - Name: <input type=text name=name><br> - Message: <textarea name=message></textarea><br> - <input type=submit value=Post><br> - </form> - <ul> - {% for entry in entries %} - <li><h2>{{ entry.name }}</h2> - {{ entry.message | safe }} + Name: <input type=text name=name><br> + Message: <textarea name=message></textarea><br> + <input type=submit value=Post><br> + </form> + <ul> + {% for entry in entries %} + <li><h2>{{ entry.name }}</h2> + {{ entry.message | safe }} </li> {% endfor %} - </ul> + </ul> </body> </html> """
Fix indentation in Flask example.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ from distutils.core import setup -setup(name='pytds', +setup(name='python-tds', version='0.1', description='Python DBAPI driver for MSSQL using pure Python TDS (Tabular Data Stream) protocol implementation', author='Mikhail Denisenko',
changed name of package to be able to upload to pypi
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -51,7 +51,7 @@ data_files=[ ('cherrypy/scaffold', ['cherrypy/scaffold/example.conf', 'cherrypy/scaffold/site.conf', ]), - ('cherrypy/scaffold/static', ['made_with_cherrypy_small.png', + ('cherrypy/scaffold/static', ['cherrypy/scaffold/static/made_with_cherrypy_small.png', ]), ('cherrypy/test', ['cherrypy/test/style.css', 'cherrypy/test/test.pem',
Oops. Buglet in setup.py.
py
diff --git a/c7n/policy.py b/c7n/policy.py index <HASH>..<HASH> 100644 --- a/c7n/policy.py +++ b/c7n/policy.py @@ -425,6 +425,13 @@ class LambdaMode(PolicyExecutionMode): TODO: better customization around execution context outputs TODO: support centralized lambda exec across accounts. """ + + mode = self.policy.data.get('mode', {}) + if not bool(mode.get("log", True)): + root = logging.getLogger() + map(root.removeHandler, root.handlers[:]) + root.handlers = [logging.NullHandler()] + resources = self.resolve_resources(event) if not resources: return resources
policy lambda - allow removing lambda logging handler (#<I>)
py
diff --git a/nailgun/entities.py b/nailgun/entities.py index <HASH>..<HASH> 100644 --- a/nailgun/entities.py +++ b/nailgun/entities.py @@ -4982,7 +4982,7 @@ class CompliancePolicies(Entity, EntityReadMixin): unique=True ), 'organization': entity_fields.OneToManyField(Organization), - 'hosts': entity_fields.ListField() + 'hosts': entity_fields.OneToManyField(Host) } self._meta = { 'api_path': 'api/v2/compliance/policies',
Changed ListField() to OneToManyField(Host)
py
diff --git a/src/python/grpcio/commands.py b/src/python/grpcio/commands.py index <HASH>..<HASH> 100644 --- a/src/python/grpcio/commands.py +++ b/src/python/grpcio/commands.py @@ -286,10 +286,11 @@ class TestLite(setuptools.Command): runner = tests.Runner() result = runner.run(loader.suite) if not result.wasSuccessful(): - sys.exit(1) + sys.exit('Test failure') def _add_eggs_to_path(self): """Adds all egg files under .eggs to sys.path""" + # TODO(jtattemusch): there has to be a cleaner way to do this import pkg_resources eggs_dir = os.path.join(PYTHON_STEM, '../../../.eggs') eggs = [os.path.join(eggs_dir, filename)
Address comments and add a TODO.
py
diff --git a/testproject/testapp/tests/common.py b/testproject/testapp/tests/common.py index <HASH>..<HASH> 100644 --- a/testproject/testapp/tests/common.py +++ b/testproject/testapp/tests/common.py @@ -6,6 +6,8 @@ try: except ImportError: import mock +__all__ = ['get_user_model', 'IntegrityError', 'mock'] + def create_user(**kwargs): data = {
Fix tests common F<I> with __all__
py