{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \" % {\n- \"code\": status_code,\n- \"message\": httplib.responses[status_code],\n- }\n+ if self.settings.get(\"debug\"):\n+ # in debug mode, try to send a traceback\n+ self.set_header('Content-Type', 'text/plain')\n+ return traceback.format_exc()\n+ else:\n+ return \"%(code)d: %(message)s\" \\\n+ \"%(code)d: %(message)s\" % {\n+ \"code\": status_code,\n+ \"message\": httplib.responses[status_code],\n+ }\n \n @property\n def locale(self):"},"message":{"kind":"string","value":"add text tracebacks on s when in debug mode"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":277097,"cells":{"diff":{"kind":"string","value":"diff --git a/uliweb/core/SimpleFrame.py b/uliweb/core/SimpleFrame.py\nindex .. 100644\n--- a/uliweb/core/SimpleFrame.py\n+++ b/uliweb/core/SimpleFrame.py\n@@ -330,7 +330,7 @@ def get_apps(apps_dir, include_apps=None, settings_file='settings.ini', local_se\n local_inifile = norm_path(os.path.join(apps_dir, local_settings_file))\n if os.path.exists(local_inifile):\n x = pyini.Ini(local_inifile)\n- if x:\n+ if x and x.get('GLOBAL'):\n installed_apps.extend(x.GLOBAL.get('INSTALLED_APPS', []))\n \n installed_apps.extend(include_apps)"},"message":{"kind":"string","value":"fix issue of \"when local_settings.ini have no GLOBAL section,uliweb will exit with exception\""},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":277098,"cells":{"diff":{"kind":"string","value":"diff --git a/aiortc/contrib/media.py b/aiortc/contrib/media.py\nindex .. 100644\n--- a/aiortc/contrib/media.py\n+++ b/aiortc/contrib/media.py\n@@ -219,6 +219,7 @@ class MediaPlayer:\n self.__log_debug('Starting worker thread')\n self.__thread_quit = threading.Event()\n self.__thread = threading.Thread(\n+ name='media-player',\n target=player_worker,\n args=(\n asyncio.get_event_loop(), self.__container,"},"message":{"kind":"string","value":"[media player] give the thread a name"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":277099,"cells":{"diff":{"kind":"string","value":"diff --git a/tests/python/unittest/test_numpy_op.py b/tests/python/unittest/test_numpy_op.py\nindex .. 100644\n--- a/tests/python/unittest/test_numpy_op.py\n+++ b/tests/python/unittest/test_numpy_op.py\n@@ -6420,6 +6420,7 @@ def test_np_linalg_det():\n \n @with_seed()\n @use_np\n+@pytest.mark.skip(reason='https://github.com/apache/incubator-mxnet/issues/18184')\n def test_np_linalg_slogdet():\n class TestSlogdet(HybridBlock):\n def __init__(self):"},"message":{"kind":"string","value":"Update test_numpy_op.py (#)"},"diff_languages":{"kind":"string","value":"py"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":2770,"numItemsPerPage":100,"numTotalItems":278877,"offset":277000,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1Njk2MTA4Nywic3ViIjoiL2RhdGFzZXRzL2hrczM1MGQvZ2l0LWRpZmYtdG8tY29tbWl0LWdlbW1hLTMtMjcwbSIsImV4cCI6MTc1Njk2NDY4NywiaXNzIjoiaHR0cHM6Ly9odWdnaW5nZmFjZS5jbyJ9._cS0LvKXGVKPEzVH0BAXjyWosUM0vZPzKzRwFT9wN5ijN254yk4MkpT1aDm3YG2ocpHDvDOT5mSFUV_6lG-bDQ","displayUrls":true},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/atlassian/stash.py b/atlassian/stash.py index <HASH>..<HASH> 100644 --- a/atlassian/stash.py +++ b/atlassian/stash.py @@ -95,3 +95,13 @@ class Stash(AtlassianRestAPI): ref_to=ref_to, limit=limit) return self.get(url)['values'] + + def get_content_of_file(self, project, repository, filename): + url = '/projects/{project}/repos/{repository}/browse/{filename}?raw'.format( + project=project, + repository=repository, + filename=filename + ) + return self.get(url) + +
added get_content_of_file to get raw content of file on master branch
py
diff --git a/pymunin/__init__.py b/pymunin/__init__.py index <HASH>..<HASH> 100644 --- a/pymunin/__init__.py +++ b/pymunin/__init__.py @@ -16,7 +16,7 @@ __copyright__ = "Copyright 2011, Ali Onur Uyar" __credits__ = ["Samuel Stauffer (https://github.com/samuel)", "Mark Lavin (https://github.com/mlavin)"] __license__ = "GPL" -__version__ = "0.9.27" +__version__ = "0.9.28" __maintainer__ = "Ali Onur Uyar" __email__ = "aouyar at gmail.com" __status__ = "Development" @@ -798,7 +798,7 @@ class MuninGraph: def __init__(self, title, category = None, vlabel=None, info=None, args =None, period=None, scale=None, total=None, order=None, - printf=None, witdh=None, height=None, + printf=None, width=None, height=None, autoFixNames = False): """Initialize Munin Graph.
Fix trivial error in framework. Thanks to Philippe LUC for reporting this problem.
py
diff --git a/searx/autocomplete.py b/searx/autocomplete.py index <HASH>..<HASH> 100644 --- a/searx/autocomplete.py +++ b/searx/autocomplete.py @@ -21,6 +21,16 @@ def dbpedia(query): return results +def duckduckgo(query): + # wikipedia autocompleter + url = 'https://ac.duckduckgo.com/ac/?{0}&type=list' + + resp = loads(get(url.format(urlencode(dict(q=query)))).text) + if len(resp) > 1: + return resp[1] + return [] + + def google(query): # google autocompleter autocomplete_url = 'http://suggestqueries.google.com/complete/search?client=toolbar&' # noqa @@ -48,6 +58,7 @@ def wikipedia(query): backends = {'dbpedia': dbpedia, + 'duckduckgo': duckduckgo, 'google': google, 'wikipedia': wikipedia }
[enh] duckduckgo autocomplete added
py
diff --git a/tests/python/unittest/test_operator.py b/tests/python/unittest/test_operator.py index <HASH>..<HASH> 100644 --- a/tests/python/unittest/test_operator.py +++ b/tests/python/unittest/test_operator.py @@ -5746,7 +5746,6 @@ def test_stack(): @with_seed() [email protected]("Flaky test https://github.com/apache/incubator-mxnet/issues/12329") def test_dropout(): def zero_count(array, ratio): zeros = 0
Re-enables test_dropout (#<I>)
py
diff --git a/root_numpy/tests/test_tree.py b/root_numpy/tests/test_tree.py index <HASH>..<HASH> 100644 --- a/root_numpy/tests/test_tree.py +++ b/root_numpy/tests/test_tree.py @@ -39,11 +39,17 @@ def test_list_trees(): assert_equal(trees, ['ntuple']) # Multiple key cycles of the same tree with temp() as rfile: - tree = ROOT.TTree("tree", "tree") + tree = ROOT.TTree('tree', 'tree') rfile.Write() assert_equal(len(rnp.list_trees(rfile.GetName())), 1) rfile.Write() assert_equal(len(rnp.list_trees(rfile.GetName())), 1) + rdir = rfile.mkdir('dir') + rdir.cd() + tree = ROOT.TTree('tree', 'tree') + rfile.Write() + assert_equal(set(rnp.list_trees(rfile.GetName())), + set(['tree', 'dir/tree'])) def test_list_branches():
list_trees: test with subdirectory
py
diff --git a/services/datalad/tests/test_history.py b/services/datalad/tests/test_history.py index <HASH>..<HASH> 100644 --- a/services/datalad/tests/test_history.py +++ b/services/datalad/tests/test_history.py @@ -1,5 +1,6 @@ import falcon import json +import pygit2 from .dataset_fixtures import * from datalad_service.handlers.history import HistoryResource @@ -12,4 +13,19 @@ def test_history(client): history = json.loads( response.content) if response.content else None assert history is not None - assert len(history["log"]) == 4 \ No newline at end of file + assert len(history["log"]) == 4 + for entry in history["log"]: + assert isinstance(entry["authorEmail"], str) + assert '@' in entry["authorEmail"] + assert isinstance(entry["authorName"], str) + assert isinstance(entry["date"], int) + assert isinstance(entry["message"], str) + assert isinstance(entry["id"], str) + assert len(entry["id"]) == 40 + assert isinstance(entry["references"], str) + # If there is any references content, check the format + if (len(entry["references"]) > 0): + for ref in entry["references"].split(','): + # Full references will always have at least "refs" prefixed + assert len(ref) > 4 + pygit2.reference_is_valid_name(ref)
test: Add coverage for history API format
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -115,12 +115,11 @@ setup( 'rez': \ ['rezconfig'] + \ ['README*'] + \ - find_files('plugins', '*.yapsy-plugin') + \ find_files('_sys', '*.csh') + \ find_files('_sys', '*.sh') + \ - #find_files('plugins/build_system/cmake_files', '*.cmake') + \ find_files('tests/data', '*.*'), 'rezplugins': [ + 'build_system/template_files/Doxyfile', 'build_system/cmake_files/*.cmake', ] },
+ Fix up some install time errors and install template Doxyfile in correct location.
py
diff --git a/torequests/utils.py b/torequests/utils.py index <HASH>..<HASH> 100644 --- a/torequests/utils.py +++ b/torequests/utils.py @@ -275,7 +275,10 @@ class Regex(object): self.allow_duplicated = allow_duplicated def register(self, pattern, obj, **kwargs): - self.container.append((re.compile(pattern, **kwargs), obj)) + if not isinstance(pattern, (list, tuple)): + pattern = [pattern] + for one_pattern in pattern: + self.container.append((re.compile(one_pattern, **kwargs), obj)) def register_function(self, pattern, **kwargs): def wrapper(function):
Regex class support multi-pattern binding
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -7,7 +7,7 @@ __version__ = '0.98pre' setup(name='visidata', version=__version__, - install_requires='python-dateutil openpyxl xlrd h5py psycopg2'.split(), + install_requires='python-dateutil openpyxl xlrd h5py psycopg2 pyshp'.split(), description='curses interface for exploring and arranging tabular data', long_description=open('README.md').read(), author='Saul Pwanson',
Add pyshp to setup.py requirements
py
diff --git a/sos/plugins/powerpc.py b/sos/plugins/powerpc.py index <HASH>..<HASH> 100644 --- a/sos/plugins/powerpc.py +++ b/sos/plugins/powerpc.py @@ -80,8 +80,9 @@ class PowerPC(Plugin, RedHatPlugin, UbuntuPlugin, DebianPlugin): if isPowerNV: self.add_copy_specs([ - "/proc/ppc64/", - "/sys/kernel/debug/powerpc/", + "/proc/ppc64/eeh", + "/proc/ppc64/systemcfg" + "/proc/ppc64/topology_updates" "/sys/firmware/opal/msglog", "/var/log/opal-elog/" ])
[powerpc] collect selected file instead of entire directory We are collecting powerpc dir from debugfs which is causing some issues.. Also we don't need that dir data for debugging purpose... Hence removing that file. Also collecting selected files from /proc/ppc<I> directory instead of collecting entire directory.
py
diff --git a/django_extensions/management/commands/sync_media_s3.py b/django_extensions/management/commands/sync_media_s3.py index <HASH>..<HASH> 100644 --- a/django_extensions/management/commands/sync_media_s3.py +++ b/django_extensions/management/commands/sync_media_s3.py @@ -177,9 +177,8 @@ class Command(BaseCommand): return # Skip directories we don't want to sync # Later we assume the MEDIA_ROOT ends with a trailing slash - # TODO: Check if we should check os.path.sep for Windows - if not root_dir.endswith('/'): - root_dir = root_dir + '/' + if not root_dir.endswith(os.path.sep): + root_dir = root_dir + os.path.sep for file in names: headers = {}
Changed path to use os.path.sep to support Windows.
py
diff --git a/livebridge/storages/base.py b/livebridge/storages/base.py index <HASH>..<HASH> 100644 --- a/livebridge/storages/base.py +++ b/livebridge/storages/base.py @@ -81,5 +81,7 @@ class BaseStorage(object): raise NotImplementedError() def get_control(self): - """Method for retrieving of control data form storage.""" + """Method for retrieving of control data form storage. + + :returns: - dictionary""" raise NotImplementedError()
Added missing docstring to BaseStorage.
py
diff --git a/tests/test_std.py b/tests/test_std.py index <HASH>..<HASH> 100644 --- a/tests/test_std.py +++ b/tests/test_std.py @@ -288,6 +288,17 @@ class TestConsul(object): time.sleep(40/1000.0) + def test_agent_register_enable_tag_override(self, consul_port): + c = consul.Consul(port=consul_port) + index, nodes = c.health.service("foo1") + assert nodes == [] + + c.agent.service.register('foo', enable_tag_override=True) + + assert c.agent.services()['foo']['EnableTagOverride'] + # Cleanup tasks + c.agent.check.deregister('foo') + def test_agent_service_maintenance(self, consul_port): c = consul.Consul(port=consul_port)
Added EnableTagOverride pytest
py
diff --git a/openquake/kvs/__init__.py b/openquake/kvs/__init__.py index <HASH>..<HASH> 100644 --- a/openquake/kvs/__init__.py +++ b/openquake/kvs/__init__.py @@ -49,6 +49,16 @@ def get_keys(regexp): return get_client().keys(regexp) +def mget(keys): + return get_client().mget(keys) + + +def mget_decoded(keys): + decoder = json.JSONDecoder() + + return [decoder.decode(value) for value in get_client().mget(keys)] + + def get_pattern(regexp): """Get all the values whose keys satisfy the given regexp.
Added mget() and mget_decoded() helper functions. Former-commit-id: <I>e8acb6e<I>dfe4d<I>ce4b5e<I>f0fa9e<I>e
py
diff --git a/zappa/middleware.py b/zappa/middleware.py index <HASH>..<HASH> 100644 --- a/zappa/middleware.py +++ b/zappa/middleware.py @@ -175,7 +175,10 @@ class ZappaWSGIMiddleware(object): for kvp in kvps: kvp = kvp.strip() if 'expires' in kvp.lower(): - exp = time.strptime(kvp.split('=')[1], "%a, %d-%b-%Y %H:%M:%S GMT") + try: + exp = time.strptime(kvp.split('=')[1], "%a, %d-%b-%Y %H:%M:%S GMT") + except ValueError: # https://tools.ietf.org/html/rfc6265#section-5.1.1 + exp = time.strptime(kvp.split('=')[1], "%a, %d-%b-%y %H:%M:%S GMT") yield name, exp break
Support both 2 and 4 digit year formats in cookies
py
diff --git a/otree_redwood/mixins.py b/otree_redwood/mixins.py index <HASH>..<HASH> 100644 --- a/otree_redwood/mixins.py +++ b/otree_redwood/mixins.py @@ -30,10 +30,14 @@ class SubsessionSilosMixin(models.Model): class Meta: abstract = True - def group_randomly_in_silos(self, groups_per_silo, fixed_id_in_group=False): + def group_randomly_in_silos(self, num_silos, fixed_id_in_group=False): groups = self.get_group_matrix() - num_silos = math.ceil(len(groups) / groups_per_silo) + if num_silos > len(groups): + raise ValueError('number of silos cannot be greater than number of groups') + + groups_per_silo = math.ceil(len(groups) / num_silos) + # num_silos = math.ceil(len(groups) / groups_per_silo) silos = [groups[x:x+groups_per_silo] for x in range(0, num_silos * groups_per_silo, groups_per_silo)] randomized_groups = [] for silo in silos:
change mixins.group_randomly_in_silos to take 'number of silos' instead of 'groups per silo'
py
diff --git a/atrcopy.py b/atrcopy.py index <HASH>..<HASH> 100755 --- a/atrcopy.py +++ b/atrcopy.py @@ -322,8 +322,18 @@ class RawSectorsSegment(DefaultSegment): class IndexedByteSegment(DefaultSegment): def __init__(self, byte_order, bytes, **kwargs): - data = bytes[byte_order] - DefaultSegment.__init__(self, 0, data, **kwargs) + self.order = byte_order + DefaultSegment.__init__(self, 0, bytes, **kwargs) + + def __getitem__(self, index): + return self.data[self.order[index]] + + def __setitem__(self, index, value): + self.data[self.order[index]] = value + self._search_copy = None + + def tostring(self): + return self.data[self.order[:]].tostring() class AtariDosFile(object):
Workaround for numpy treating indexed arrays as copies: operate on original array using indexes
py
diff --git a/tests/iarm/test_parsing.py b/tests/iarm/test_parsing.py index <HASH>..<HASH> 100644 --- a/tests/iarm/test_parsing.py +++ b/tests/iarm/test_parsing.py @@ -50,5 +50,9 @@ class TestArmValidation(TestArm): with self.assertRaises(iarm.exceptions.ValidationError): self.interp.evaluate(' BADINST') + def test_bad_formatting(self): + with self.assertRaises(iarm.exceptions.ValidationError): + self.interp.evaluate('B .') # `B .` used to pass because `.` is not letter, had to fix regex + if __name__ == '__main__': unittest.main()
Added regression test for `B .`
py
diff --git a/safe_qgis/tools/wizard_dialog.py b/safe_qgis/tools/wizard_dialog.py index <HASH>..<HASH> 100644 --- a/safe_qgis/tools/wizard_dialog.py +++ b/safe_qgis/tools/wizard_dialog.py @@ -326,7 +326,7 @@ class WizardDialog(QtGui.QDialog, Ui_WizardDialogBase): self.global_default_data = metadata.global_default_attribute['id'] self.do_not_use_string = metadata.do_not_use_attribute['name'] self.do_not_use_data = metadata.do_not_use_attribute['id'] - self.defaults = breakdown_defaults() + self.defaults = get_defaults() # Initialize attributes self.existing_keywords = None @@ -1030,7 +1030,7 @@ class WizardDialog(QtGui.QDialog, Ui_WizardDialogBase): def set_widgets_step_kw_aggregation(self): """Set widgets on the aggregation tab.""" # Set values based on existing keywords (if already assigned) - self.defaults = breakdown_defaults() + self.defaults = get_defaults() female_ratio_default = self.get_existing_keyword( female_ratio_default_key)
[IFCW] Update the wizard to the recent API changes
py
diff --git a/crosscat/utils/geweke_utils.py b/crosscat/utils/geweke_utils.py index <HASH>..<HASH> 100644 --- a/crosscat/utils/geweke_utils.py +++ b/crosscat/utils/geweke_utils.py @@ -316,6 +316,8 @@ def plot_diagnostic_data(forward_diagnostics_data, diagnostics_data_list, map(pylab.plot, kl_series_list) pylab.xlabel('iteration') pylab.ylabel('KL') + # FIXME: remove, or do something "better" + pylab.gca().set_ylim((0., 0.1)) if parameters is not None: pu.show_parameters(parameters) pass
temporarily fix y-limits of kl plot
py
diff --git a/aiogram/types/chat.py b/aiogram/types/chat.py index <HASH>..<HASH> 100644 --- a/aiogram/types/chat.py +++ b/aiogram/types/chat.py @@ -76,7 +76,25 @@ class Chat(base.TelegramObject): if self.type == ChatType.PRIVATE: return f"tg://user?id={self.id}" - return f'https://t.me/{self.username}' if self.username else await self.export_invite_link() + if self.username: + return f'https://t.me/{self.username}' + + if self.invite_link: + return self.invite_link + + await self.update_chat() + return self.invite_link + + async def update_chat(self): + """ + User this method to update Chat data + + :return: None + """ + other = await self.bot.get_chat(self.id) + + for key, value in other: + self[key] = value async def set_photo(self, photo): """
Update Chat method and get_link method fix
py
diff --git a/claripy/ast/base.py b/claripy/ast/base.py index <HASH>..<HASH> 100644 --- a/claripy/ast/base.py +++ b/claripy/ast/base.py @@ -985,7 +985,7 @@ def simplify(e): else: # Copy some parameters (that should really go to the Annotation backend) s._uninitialized = e.uninitialized - if s.op == 'BVS': + if s.op == 'BVS' and e.op == 'BVS': modifiable = list(s.args) modifiable[4] = e.args[4] s.args = tuple(modifiable)
Fix simplification into BVS from not BVS breaking the fix
py
diff --git a/django_extensions/management/modelviz.py b/django_extensions/management/modelviz.py index <HASH>..<HASH> 100644 --- a/django_extensions/management/modelviz.py +++ b/django_extensions/management/modelviz.py @@ -12,6 +12,7 @@ import datetime import os import six +import django from django.db.models.fields.related import ( ForeignKey, ManyToManyField, OneToOneField, RelatedField, ) @@ -305,6 +306,8 @@ def generate_dot(app_labels, **kwargs): 'use_subgraph': use_subgraph, 'graphs': graphs, }) + if django.VERSION >= (1, 8): + c = c.flatten() dot = t.render(c) return dot
Resolve "render() must be called with a dict..." in modelviz.py
py
diff --git a/eli5/keras/explain_prediction.py b/eli5/keras/explain_prediction.py index <HASH>..<HASH> 100644 --- a/eli5/keras/explain_prediction.py +++ b/eli5/keras/explain_prediction.py @@ -25,7 +25,6 @@ and heatmap image for a target. def explain_prediction_keras(estimator, # type: Model doc, # type: np.ndarray image=None, # type: Optional['PIL.Image.Image'] - target_names=None, targets=None, # type: Optional[list] layer=None, # type: Optional[Union[int, str, Layer]] ): @@ -38,7 +37,7 @@ def explain_prediction_keras(estimator, # type: Model * The model's task is classification, i.e. final output is class scores. See :func:`eli5.explain_prediction` for more information about the ``estimator``, - ``doc``, ``target_names``, and ``targets`` parameters. + ``doc``, and ``targets`` parameters. :param keras.models.Model estimator: @@ -76,11 +75,6 @@ def explain_prediction_keras(estimator, # type: Model :type image: PIL.Image.Image, optional - :param target_names: - *Not Implemented*. - Names for classes in the final output layer. - :type target_names: list, optional - :param targets: Prediction ID's to focus on.
Remove mentions of target_names (not implemented)
py
diff --git a/setuptools/command/easy_install.py b/setuptools/command/easy_install.py index <HASH>..<HASH> 100755 --- a/setuptools/command/easy_install.py +++ b/setuptools/command/easy_install.py @@ -435,7 +435,7 @@ class easy_install(Command): self.pth_file = None PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep) - if instdir not in map(normalize_path, [_f for _f in PYTHONPATH if _f]): + if instdir not in map(normalize_path, filter(None, PYTHONPATH)): # only PYTHONPATH dirs need a site.py, so pretend it's there self.sitepy_installed = True elif self.multi_version and not os.path.exists(pth_file):
Use filter(None) for brevity
py
diff --git a/sos/plugins/apport.py b/sos/plugins/apport.py index <HASH>..<HASH> 100644 --- a/sos/plugins/apport.py +++ b/sos/plugins/apport.py @@ -25,5 +25,12 @@ class Apport(Plugin, DebianPlugin, UbuntuPlugin): def setup(self): self.add_copy_spec("/etc/apport/*") + self.add_copy_spec("/var/lib/whoopsie/whoopsie-id") + self.add_cmd_output( + "gdbus call -y -d com.ubuntu.WhoopsiePreferences \ + -o /com/ubuntu/WhoopsiePreferences \ + -m com.ubuntu.WhoopsiePreferences.GetIdentifier") + self.add_cmd_output("ls -alh /var/crash/") + self.add_cmd_output("bash -c 'grep -B 50 -m 1 ProcMaps /var/crash/*'") # vim: et ts=4 sw=4
[apport] Add information on specific crashes The whoopsie ID let's us look the machine up on errors.ubuntu.com for crash reports. Partial output from /var/crash let's us better know what crashdumps the user has without uploading all of them.
py
diff --git a/holoviews/plotting/bokeh/element.py b/holoviews/plotting/bokeh/element.py index <HASH>..<HASH> 100644 --- a/holoviews/plotting/bokeh/element.py +++ b/holoviews/plotting/bokeh/element.py @@ -14,7 +14,8 @@ except ImportError: mpl = None import param -from ...core import Store, HoloMap, Overlay, DynamicMap, CompositeOverlay +from ...core import (Store, HoloMap, Overlay, DynamicMap, + CompositeOverlay, Element) from ...core import util from ...element import RGB from ..plot import GenericElementPlot, GenericOverlayPlot @@ -149,6 +150,8 @@ class ElementPlot(BokehPlot, GenericElementPlot): self.current_ranges = None super(ElementPlot, self).__init__(element, **params) self.handles = {} if plot is None else self.handles['plot'] + element_ids = self.hmap.traverse(lambda x: id(x), [Element]) + self.static = len(set(element_ids)) == 1 and len(self.keys) == len(self.hmap) def _init_tools(self, element): @@ -499,6 +502,8 @@ class ElementPlot(BokehPlot, GenericElementPlot): Returns a list of the plot objects to update. """ handles = [] + if self.static and not self.dynamic: + return handles for handle in self._update_handles: if handle in self.handles: handles.append(self.handles[handle])
Bokeh Elements only send data once if completely static
py
diff --git a/riak/transports/transport.py b/riak/transports/transport.py index <HASH>..<HASH> 100644 --- a/riak/transports/transport.py +++ b/riak/transports/transport.py @@ -35,7 +35,7 @@ class RiakTransport(object): Returns a random client identifier """ return 'py_%s' % base64.b64encode( - str(random.randint(1, 1073741824))) + str(random.randint(1, 0x40000000))) @classmethod def make_fixed_client_id(self):
Turn a seemingly random base-<I> value into an understandable hex value.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ from setuptools import setup, find_packages setup( name='pycannon', - version='0.2.0', + version='0.3.0', packages=find_packages(), install_requires=[ 'requests',
Bumped version to <I>.
py
diff --git a/sessionprofile/backends/db.py b/sessionprofile/backends/db.py index <HASH>..<HASH> 100644 --- a/sessionprofile/backends/db.py +++ b/sessionprofile/backends/db.py @@ -11,6 +11,9 @@ class SessionProfileStore(Base): """ def save_session(self, request): + if not hasattr(request, 'user'): + return + store = self.get_session_store(request) if store is not None and store.session_key is not None: sp, _ = SessionProfile.objects.get_or_create(session_key=store.session_key)
Fix for requests that have no user attribute
py
diff --git a/panoramisk/fast_agi.py b/panoramisk/fast_agi.py index <HASH>..<HASH> 100644 --- a/panoramisk/fast_agi.py +++ b/panoramisk/fast_agi.py @@ -171,8 +171,11 @@ class Application(dict): encoding=self.default_encoding) try: yield from route(request) - except Exception as e: - log.exception(e) + except BaseException: + log.exception( + 'An exception has been raised for the request "%s"', + agi_network_script + ) else: log.error('No route for the request "%s"', agi_network_script) else:
Avoid double exception logging because log.exception() already logs the stacktrace
py
diff --git a/tmdbsimple/changes.py b/tmdbsimple/changes.py index <HASH>..<HASH> 100644 --- a/tmdbsimple/changes.py +++ b/tmdbsimple/changes.py @@ -17,7 +17,7 @@ class Changes(TMDB): """ Changes functionality. - See: http://docs.themoviedb.apiary.io/#changes + See: https://developers.themoviedb.org/3/changes """ BASE_PATH = '' URLS = {
Update changes.py Deprecated changes url.
py
diff --git a/tests/test_djangoclient.py b/tests/test_djangoclient.py index <HASH>..<HASH> 100644 --- a/tests/test_djangoclient.py +++ b/tests/test_djangoclient.py @@ -65,7 +65,7 @@ class DjangoClientDriverTest(BaseBrowserTests, IsElementPresentNoJSTest, unittes browser.quit() def test_cant_switch_to_frame(self): - "zope.testbrowser should not be able to switch to frames" + "django driver should not be able to switch to frames" with self.assertRaises(NotImplementedError) as cm: self.browser.get_iframe('frame_123') self.fail() @@ -75,7 +75,7 @@ class DjangoClientDriverTest(BaseBrowserTests, IsElementPresentNoJSTest, unittes def test_simple_type(self): """ - zope.testbrowser won't support type method + django won't support type method because it doesn't interact with JavaScript """ with self.assertRaises(NotImplementedError):
tests/django: fix comments
py
diff --git a/pyang/translators/dsdl.py b/pyang/translators/dsdl.py index <HASH>..<HASH> 100644 --- a/pyang/translators/dsdl.py +++ b/pyang/translators/dsdl.py @@ -116,9 +116,6 @@ class DSDLTranslator(object): def schematron_assert(elem, cond, err_msg=None): """Install <sch:assert> under `elem`. - - The assert is wrapped in <sch:pattern> and <sch:rule> elements - and the latter also sets the context to `elem`. """ assert_ = ET.SubElement(elem, "sch:assert", test=cond) if err_msg is not None:
Updated comment that contradicted code in schematron_assert method.
py
diff --git a/samples/RecurringTask.py b/samples/RecurringTask.py index <HASH>..<HASH> 100755 --- a/samples/RecurringTask.py +++ b/samples/RecurringTask.py @@ -7,7 +7,7 @@ This application demonstrates doing something at a regular interval. import sys from bacpypes.debugging import bacpypes_debugging, ModuleLogger -from bacpypes.consolelogging import ConfigArgumentParser +from bacpypes.consolelogging import ArgumentParser from bacpypes.core import run from bacpypes.task import RecurringTask @@ -46,7 +46,7 @@ class PrairieDog(RecurringTask): try: # parse the command line arguments - parser = ConfigArgumentParser(description=__doc__) + parser = ArgumentParser(description=__doc__) # add an argument for seconds per dog parser.add_argument('seconds', metavar='N', type=int, nargs='+',
this example doesn't need a configuration file
py
diff --git a/serfclient/client.py b/serfclient/client.py index <HASH>..<HASH> 100644 --- a/serfclient/client.py +++ b/serfclient/client.py @@ -13,16 +13,26 @@ class SerfClient(object): self.connection.handshake() def event(self, name, payload, coalesce=True): + """ + Send an event to the cluster. Can take an optional payload as well, + which will be sent in the form that it's provided. + """ return self.connection.call( 'event', {'Name': name, 'Payload': payload, 'Coalesce': coalesce}) def force_leave(self, name): + """ + Force a node to leave the cluster. + """ return self.connection.call( 'force-leave', {"Node": name}) def join(self, location): + """ + Join another cluster by provided a list of ip:port locations. + """ if not isinstance(location, (list, tuple)): location = [location] return self.connection.call(
Added some documentation for the methods we have
py
diff --git a/ceph_deploy/tests/unit/util/test_arg_validators.py b/ceph_deploy/tests/unit/util/test_arg_validators.py index <HASH>..<HASH> 100644 --- a/ceph_deploy/tests/unit/util/test_arg_validators.py +++ b/ceph_deploy/tests/unit/util/test_arg_validators.py @@ -99,3 +99,30 @@ class TestHostName(object): hostname('0') message = error.value.message assert '0 must be a hostname' in message + + +class TestSubnet(object): + + def test_subnet_has_less_than_four_numbers(self): + validator = arg_validators.Subnet() + + with raises(ArgumentError) as error: + validator('3.3.3/12') + message = error.value.message + assert 'at least 4 numbers' in message + + def test_subnet_has_non_digits(self): + validator = arg_validators.Subnet() + + with raises(ArgumentError) as error: + validator('3.3.3.a/12') + message = error.value.message + assert 'have digits separated by dots' in message + + def test_subnet_missing_slash(self): + validator = arg_validators.Subnet() + + with raises(ArgumentError) as error: + validator('3.3.3.3') + message = error.value.message + assert 'must contain a slash' in message
add some tests for the dumb subnet validator
py
diff --git a/helperlibs/bio/seqio.py b/helperlibs/bio/seqio.py index <HASH>..<HASH> 100644 --- a/helperlibs/bio/seqio.py +++ b/helperlibs/bio/seqio.py @@ -27,6 +27,8 @@ except ImportError: def _get_seqtype_from_ext(handle): if isinstance(handle, basestring): name = handle + elif hasattr(handle, 'filename'): + name = handle.filename elif hasattr(handle, 'name'): name = handle.name else:
seqio: Support guessing the filetype from Flask FileStores
py
diff --git a/pyrogram/client/client.py b/pyrogram/client/client.py index <HASH>..<HASH> 100644 --- a/pyrogram/client/client.py +++ b/pyrogram/client/client.py @@ -2104,7 +2104,7 @@ class Client: file_part += 1 if progress: - progress(file_part * part_size, file_size) + progress(min(file_part * part_size, file_size), file_size) except Exception as e: log.error(e) else: @@ -2202,7 +2202,7 @@ class Client: offset += limit if progress: - progress(offset, size) + progress(min(offset, size), size) r = session.send( functions.upload.GetFile(
Fix progress going over <I>%
py
diff --git a/scoop/launch/__init__.py b/scoop/launch/__init__.py index <HASH>..<HASH> 100644 --- a/scoop/launch/__init__.py +++ b/scoop/launch/__init__.py @@ -233,7 +233,7 @@ class Host(object): self.log.info("Zombie process(es) possibly left on " "host {0}!".format(self.hostname)) elif not self.isLocal(): - command = "kill -9 -{0} &>/dev/null".format(self.remoteProcessGID) + command = "kill -9 -{0} >&/dev/null".format(self.remoteProcessGID) subprocess.Popen(self.BASE_SSH + [self.hostname] + [command],
Made cleanup compatible with (t)csh shells
py
diff --git a/slack/rtm/client.py b/slack/rtm/client.py index <HASH>..<HASH> 100644 --- a/slack/rtm/client.py +++ b/slack/rtm/client.py @@ -10,6 +10,7 @@ import inspect import signal from typing import Optional, Callable, DefaultDict from ssl import SSLContext +from threading import current_thread, main_thread # ThirdParty Imports import asyncio @@ -185,7 +186,7 @@ class RTMClient(object): SlackApiError: Unable to retreive RTM URL from Slack. """ # TODO: Add Windows support for graceful shutdowns. - if os.name != "nt": + if os.name != "nt" and current_thread() == main_thread(): signals = (signal.SIGHUP, signal.SIGTERM, signal.SIGINT) for s in signals: self._event_loop.add_signal_handler(s, self.stop)
Allow running RTM over a thread When trying to start a RTM connection over a thread a RuntimeError appears: RuntimeError: set_wakeup_fd only works in main thread This change allows it.
py
diff --git a/pyontutils/ontload.py b/pyontutils/ontload.py index <HASH>..<HASH> 100755 --- a/pyontutils/ontload.py +++ b/pyontutils/ontload.py @@ -262,6 +262,8 @@ def scigraph_build(zip_location, git_remote, org, git_local, branch, commit, if not os.path.exists(local): repo = Repo.clone_from(remote + '.git', local) + elif not Path(local, '.git').exists(): + repo = Repo.clone_from(remote + '.git', local) else: repo = Repo(local)
ontload handle case where SciGraph folder exists but is empty
py
diff --git a/tests/settings.py b/tests/settings.py index <HASH>..<HASH> 100644 --- a/tests/settings.py +++ b/tests/settings.py @@ -15,6 +15,7 @@ INSTALLED_APPS = [ 'django.contrib.sessions', 'sundial', 'tests.test_fields', + 'tests.test_forms', ] SILENCED_SYSTEM_CHECKS = ['1_7.W001']
Fixed a deprecation warning by making sure a test app is installed.
py
diff --git a/shinken/util.py b/shinken/util.py index <HASH>..<HASH> 100644 --- a/shinken/util.py +++ b/shinken/util.py @@ -31,9 +31,12 @@ except ImportError: from shinken.macroresolver import MacroResolver #from memoized import memoized - -stdout_encoding = sys.stdout.encoding -safe_stdout = (stdout_encoding == 'UTF-8') +try: + stdout_encoding = sys.stdout.encoding + safe_stdout = (stdout_encoding == 'UTF-8') +except Exception, exp: + print "Encoding detection error", exp + safe_stdout = False #import locale #print locale.getdefaultlocale() #utf8_safe = (locale.getdefaultlocale() == ('en_US','UTF8'))
Fix : nosetest is not happy about sys.stdout and encoding.
py
diff --git a/core/graph.py b/core/graph.py index <HASH>..<HASH> 100755 --- a/core/graph.py +++ b/core/graph.py @@ -103,7 +103,7 @@ def plotFCM(data, channel_names, kind='histogram', ax=None, x = data[channel_names[0]] if len(x): - pHandle = x.hist(ax = ax, **kwargs) + pHandle = ax.hist(x, **kwargs) else: return None
changed pandas hist to matplotlib hist to get references to histogram patches
py
diff --git a/cumulusci/tasks/bulkdata/load.py b/cumulusci/tasks/bulkdata/load.py index <HASH>..<HASH> 100644 --- a/cumulusci/tasks/bulkdata/load.py +++ b/cumulusci/tasks/bulkdata/load.py @@ -136,10 +136,15 @@ class LoadData(BaseSalesforceApiTask, SqlAlchemyMixin): sql = f"""BEGIN TRANSACTION; UPDATE {table_name} SET {column_name} = '' - WHERE IsPersonAccount = true; + WHERE IsPersonAccount = 'true'; COMMIT; """ - self.session.connection().cursor().executescript(sql) + self.session.connection().connection.cursor().executescript(sql) + self.logger.debug("") + self.logger.debug( + f"Set Account.Name to blank for Person Account records" + ) + self.logger.debug("") mapping["oid_as_pk"] = bool(mapping.get("fields", {}).get("Id")) @@ -323,6 +328,11 @@ COMMIT; lookup_column = getattr(model, key_field) query = query.order_by(lookup_column) + # Filter out non-person account Contact records. + # Contact records for person accounts were already created by the system. + if self._is_person_accounts_enabled and mapping["table"].lower() == "contact": + query = query.filter(text("IsPersonAccount == 'false'")) + return query def _convert(self, value):
load setes Account.Name as blank + only inserts non-person account Contact records; TODO: Update Contact Id table with person account contact IDs
py
diff --git a/glad/lang/c/loader/gl.py b/glad/lang/c/loader/gl.py index <HASH>..<HASH> 100644 --- a/glad/lang/c/loader/gl.py +++ b/glad/lang/c/loader/gl.py @@ -213,12 +213,14 @@ class OpenGLCLoader(BaseLoader): def write_header(self, fobj): fobj.write(_OPENGL_HEADER_START) + written = set() for api, hname, name in [ ('gl', 'gl', 'OpenGL'), ('gles1', 'gl', 'OpenGL ES 1'), ('gles2', 'gl2', 'OpenGL ES 2'), ('gles2', 'gl3', 'OpenGL ES 3') ]: - if api in self.apis: + if api in self.apis and hname not in written: fobj.write(_OPENGL_HEADER_INCLUDE_ERROR.format(hname, name)) + written.add(hname) fobj.write(_OPENGL_HEADER) if not self.disabled and 'gl' in self.apis:
c: Fix issue with headers having GLES1 and OpenGL generated, related #<I>.
py
diff --git a/gui/test_riabclipper.py b/gui/test_riabclipper.py index <HASH>..<HASH> 100644 --- a/gui/test_riabclipper.py +++ b/gui/test_riabclipper.py @@ -16,10 +16,15 @@ __date__ = '20/01/2011' __copyright__ = ('Copyright 2012, Australia Indonesia Facility for ' 'Disaster Reduction') - +import unittest +import sys import os + +# Add PARENT directory to path to make test aware of other modules +pardir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) +sys.path.append(pardir) + import numpy -import unittest from qgis.core import (QgsVectorLayer, QgsRasterLayer) @@ -293,6 +298,7 @@ class RiabClipper(unittest.TestCase): raise Exception(msg) # Check None option without keyword datatype == 'density' + R.keywords['datatype'] = 'undefined' A_none = R.get_data(scaling=None) msg = 'Data should not have changed' assert nanallclose(A_native, A_none,
Fixed scaling test that broke when density keywords were changed. It is now robust by setting the expected keyword rather than relying on those in the test data
py
diff --git a/salt/states/file.py b/salt/states/file.py index <HASH>..<HASH> 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -3074,7 +3074,8 @@ def append(name, sources=None, source_hashes=None, defaults=None, - context=None): + context=None, + ignore_whitespace=True): ''' Ensure that some text appears at the end of a file. @@ -3177,6 +3178,13 @@ def append(name, context Overrides default context variables passed to the template. + ignore_whitespace + .. versionadded:: 2015.8.4 + + Spaces and Tabs in text are ignored by default, when searching for the + appending content, one space or multiple tabs are the same for salt. + Set this option to ``False`` if you want to change this behavior. + Multi-line example: .. code-block:: yaml @@ -3276,11 +3284,16 @@ def append(name, try: for chunk in text: - if __salt__['file.search']( + if ignore_whitespace and __salt__['file.search']( name, salt.utils.build_whitespace_split_regex(chunk), multiline=True): continue + elif __salt__['file.search']( + name, + chunk, + multiline=True): + continue lines = chunk.splitlines()
Add option in file.append to ignore_whitespace. When appending content to a file, salt will try to search for that content first. During this operation whitespace is ignored. This option allows to override this bahavior. Fixes #<I>.
py
diff --git a/marshmallow/schema.py b/marshmallow/schema.py index <HASH>..<HASH> 100644 --- a/marshmallow/schema.py +++ b/marshmallow/schema.py @@ -589,13 +589,10 @@ class BaseSchema(base.SchemaABC): """Update fields based on the passed in object.""" # if only __init__ param is specified, only return those fields if self.only: - ret = self.__filter_fields(self.only, obj, many=many) - self.__set_field_attrs(ret) - self.fields = ret - return self.fields - - if self.opts.fields: # Return only fields specified in fields option + field_names = self.set_class(self.only) + elif self.opts.fields: + # Return fields specified in fields option field_names = self.set_class(self.opts.fields) elif self.opts.additional: # Return declared fields + additional fields
Schema.py - only respects exclude I did make an issue a week ago but couldn't resist to try out this pull request.
py
diff --git a/tests/test_stl.py b/tests/test_stl.py index <HASH>..<HASH> 100644 --- a/tests/test_stl.py +++ b/tests/test_stl.py @@ -89,6 +89,25 @@ class STLTests(g.unittest.TestCase): except BaseException: return raise ValueError("Shouldn't export empty scenes!") + + def test_vertex_order(self): + # removing doubles should respect the vertex order + m_raw = g.get_mesh('featuretype.STL', process=False) + m_proc = g.get_mesh('featuretype.STL', process=True) + + verts_raw = g.trimesh.grouping.hashable_rows(m_raw.vertices) + verts_proc = g.trimesh.grouping.hashable_rows(m_proc.vertices) + + # go through all processed verts + # find index in unprocessed mesh + idxs = [] + for vert in verts_proc: + idxs.append(g.np.where(verts_raw == vert)[0][0]) + + # indices should be increasing + assert (g.np.diff(idxs) >= 0).all() + + if __name__ == '__main__':
Add test for STL vertex order
py
diff --git a/src/tests/streamcorpus_pipeline/test_clean_html.py b/src/tests/streamcorpus_pipeline/test_clean_html.py index <HASH>..<HASH> 100644 --- a/src/tests/streamcorpus_pipeline/test_clean_html.py +++ b/src/tests/streamcorpus_pipeline/test_clean_html.py @@ -19,6 +19,8 @@ def test_make_clean_html_nyt(): stable = open(os.path.join(path, 'nytimes-index-clean-stable.html')).read() assert generated == stable + assert '<script' not in generated + def test_make_clean_html(): test_bad_html = '''
asserting that at least in this test data, the lxml.html.clean.Cleaner removes <script tags
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -14,7 +14,7 @@ long_description = ('Ariane CLIP3 (client python 3) is the python implementation ' + IRC on freenode #ariane.echinopsii') setup(name='ariane_clip3', - version='0.1.2', + version='0.1.3-b01', description='Ariane Python API Library', long_description=long_description, author='Mathilde Ffrench', @@ -22,7 +22,7 @@ setup(name='ariane_clip3', maintainer='Mathilde Ffrench', maintainer_email='[email protected]', url='https://github.com/echinopsii/net.echinopsii.ariane.community.cli.python3.git', - download_url='https://github.com/echinopsii/net.echinopsii.ariane.community.cli.python3.git/tarball/0.1.2', + download_url='https://github.com/echinopsii/net.echinopsii.ariane.community.cli.python3.git/tarball/0.1.3-b01', packages=['ariane_clip3', 'ariane_clip3.rabbitmq', 'ariane_clip3.rest'], license='AGPLv3', install_requires=['requests', 'epika-python3', 'pykka'],
[ACC-<I>] new beta version
py
diff --git a/searx/webapp.py b/searx/webapp.py index <HASH>..<HASH> 100644 --- a/searx/webapp.py +++ b/searx/webapp.py @@ -148,6 +148,14 @@ def stats(): stats = get_engines_stats() return render('stats.html', stats=stats) [email protected]('/robots.txt', methods=['GET']) +def robots(): + return Response("""User-agent: * +Allow: / +Allow: /about +Disallow: /stats +""", mimetype='text/plain') + @app.route('/opensearch.xml', methods=['GET']) def opensearch(): global opensearch_xml
[enh] robots.txt added
py
diff --git a/sievelib/tests/parser.py b/sievelib/tests/parser.py index <HASH>..<HASH> 100644 --- a/sievelib/tests/parser.py +++ b/sievelib/tests/parser.py @@ -49,13 +49,13 @@ class SieveTest(unittest.TestCase): class AdditionalCommands(SieveTest): def test_add_command(self): + self.assertRaises(sievelib.commands.UnknownCommand, sievelib.commands.get_command_instance, 'mytest') sievelib.commands.add_commands(MytestCommand) sievelib.commands.get_command_instance('mytest') - self.assertRaises(sievelib.commands.UnknownCommand, sievelib.commands.get_command_instance, 'unknowncommand') self.compilation_ok(""" mytest :testtag 10 ["[email protected]"]; """) - + class ValidSyntaxes(SieveTest):
changed test_add_command to check for UnknownCommand before injection
py
diff --git a/allennlp/common/configuration.py b/allennlp/common/configuration.py index <HASH>..<HASH> 100644 --- a/allennlp/common/configuration.py +++ b/allennlp/common/configuration.py @@ -478,7 +478,7 @@ VOCAB_CONFIG: Config = Config([ default_value=False, comment="whether to extend the existing vocabulary (if you specified one)"), ConfigItem(name="min_count", - annotation=int, + annotation=Dict[str, int], default_value=None, comment="only include tokens that occur at least this many times"), ConfigItem(name="max_vocab_size",
fix type in vocab config (#<I>)
py
diff --git a/client/main.py b/client/main.py index <HASH>..<HASH> 100644 --- a/client/main.py +++ b/client/main.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python +# -*- coding: utf-8-*- import yaml import sys import speaker
Add shebang and PEP <I> source encoding to main.py
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -3,9 +3,6 @@ import os import sys -import requests -from requests.compat import is_py2 - try: from setuptools import setup except ImportError: @@ -19,7 +16,7 @@ os.environ['PYTHONDONTWRITEBYTECODE'] = '1' setup( name='kippt', - version="1.0.0", + version="1.0.2", description='Kippt.com API wrapper for Python', long_description=open('README').read(), author='TJ (Thomas) Biddle', @@ -27,6 +24,7 @@ setup( url='https://github.com/thomasbiddle/Kippt-Python-Wrapper', packages=['kippt',], package_data={'': ['LICENSE']}, + install_requires=['requests'], include_package_data=True, license='wtfpl', )
porting to python3, just needed to ensure requests was installed it looks like
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ install_requires = [ "python-dateutil>=2.2", "pytz>=2014.2", ] -if sys.version_info.major < 3: +if sys.version_info < (3, 0): install_requires.append("dnspython>=1.11.1") else: install_requires.append("dnspython3>=1.11.1")
<I> needs to die.
py
diff --git a/mir_eval/beat.py b/mir_eval/beat.py index <HASH>..<HASH> 100644 --- a/mir_eval/beat.py +++ b/mir_eval/beat.py @@ -29,7 +29,7 @@ def trim_beats(beats, min_beat_time=5.): Trimmed beat array. ''' # Remove beats before min_beat_time - return beats[beats > min_beat_time] + return beats[beats >= min_beat_time] def validate(metric): '''Decorator which checks that the input annotations to a metric
Should not be removing beats exactly at the boundary to be consistent with docstring and beat evaluation toolbox
py
diff --git a/pyani/anim.py b/pyani/anim.py index <HASH>..<HASH> 100644 --- a/pyani/anim.py +++ b/pyani/anim.py @@ -236,7 +236,10 @@ def construct_nucmer_cmdline( fname1, fname2 = Path(fname1), Path(fname2) # Compile commands - # Nested output folders to avoid N^2 scaling in files-per-folder: + # Nested output folders to avoid N^2 scaling in files-per-folder + # Create folders incrementally (want an error if outdir does not exist) + outsubdir = outdir / pyani_config.ALIGNDIR["ANIm"] + outsubdir.mkdir(exist_ok=True) outsubdir = outdir / pyani_config.ALIGNDIR["ANIm"] / fname1.stem outsubdir.mkdir(exist_ok=True) outprefix = outsubdir / f"{fname1.stem}_vs_{fname2.stem}"
May have to make two levels of output dir
py
diff --git a/tests/pytests/integration/states/test_file.py b/tests/pytests/integration/states/test_file.py index <HASH>..<HASH> 100644 --- a/tests/pytests/integration/states/test_file.py +++ b/tests/pytests/integration/states/test_file.py @@ -1146,6 +1146,7 @@ def test_recurse( assert test_tempdir.joinpath(_dir, str(_file)).is_file() [email protected]_on_windows def test_recurse_keep_symlinks_in_fileserver_root( salt_master, salt_call_cli, @@ -1199,6 +1200,7 @@ def test_recurse_keep_symlinks_in_fileserver_root( assert target_path.joinpath("test").is_symlink() [email protected]_on_windows def test_recurse_keep_symlinks_outside_fileserver_root( salt_secondary_minion, salt_secondary_master,
skip tests on Windows since it does not support symlinks
py
diff --git a/gns3server/handlers/api/controller/symbol_handler.py b/gns3server/handlers/api/controller/symbol_handler.py index <HASH>..<HASH> 100644 --- a/gns3server/handlers/api/controller/symbol_handler.py +++ b/gns3server/handlers/api/controller/symbol_handler.py @@ -49,7 +49,7 @@ class SymbolHandler: controller = Controller.instance() try: yield from response.file(controller.symbols.get_path(request.match_info["symbol_id"])) - except KeyError: + except (KeyError, FileNotFoundError): response.set_status(404) @Route.post(
Catch FileNotFound error for builtin symbols if erase them from disk Fix #<I>
py
diff --git a/pyflunearyou/__version__.py b/pyflunearyou/__version__.py index <HASH>..<HASH> 100644 --- a/pyflunearyou/__version__.py +++ b/pyflunearyou/__version__.py @@ -1,2 +1,2 @@ """Define a version constant.""" -__version__ = '0.0.2' +__version__ = '0.1.0'
Bumped version to <I>
py
diff --git a/src/sentry_plugins/slack/plugin.py b/src/sentry_plugins/slack/plugin.py index <HASH>..<HASH> 100644 --- a/src/sentry_plugins/slack/plugin.py +++ b/src/sentry_plugins/slack/plugin.py @@ -1,6 +1,5 @@ from __future__ import absolute_import -from django.core.urlresolvers import reverse from sentry import http, tagstore from sentry.plugins.bases import notify from sentry.utils import json @@ -176,10 +175,9 @@ class SlackPlugin(CorePluginMixin, notify.NotificationPlugin): if self.get_option('include_rules', project): rules = [] for rule in notification.rules: - rule_link = reverse( - 'sentry-edit-project-rule', - args=[group.organization.slug, project.slug, rule.id] - ) + rule_link = '/%s/%s/settings/alerts/rules/%s/' % ( + group.organization.slug, project.slug, rule.id) + # Make sure it's an absolute uri since we're sending this # outside of Sentry into Slack rule_link = absolute_uri(rule_link)
fix: Update URL for Slack plugin (#<I>) Not a Django route anymore
py
diff --git a/law/workflow/remote.py b/law/workflow/remote.py index <HASH>..<HASH> 100644 --- a/law/workflow/remote.py +++ b/law/workflow/remote.py @@ -429,15 +429,17 @@ class BaseRemoteWorkflowProxy(BaseWorkflowProxy): n_parallel = sys.maxsize if task.parallel_jobs < 0 else task.parallel_jobs new_jobs = OrderedDict() for job_num, branches in list(self.submission_data.waiting_jobs.items()): - if n_active + len(new_jobs) >= n_parallel: - break - - # remove job from the waiting list - del self.submission_data.waiting_jobs[job_num] - if skip_job(job_num, branches): + # remove jobs that don't need to be submitted + del self.submission_data.waiting_jobs[job_num] continue + # stop for now when n_parllel jobs are already running + if n_active + len(new_jobs) >= n_parallel: + continue + + # remove jobs that are going to be submitted from the waiting list + del self.submission_data.waiting_jobs[job_num] new_jobs[job_num] = sorted(branches) # add new jobs to the jobs to submit, maybe also shuffle
Continue scanning and pruning the whole waiting list list when submitting (#<I>)
py
diff --git a/eli5/sklearn/explain_weights.py b/eli5/sklearn/explain_weights.py index <HASH>..<HASH> 100644 --- a/eli5/sklearn/explain_weights.py +++ b/eli5/sklearn/explain_weights.py @@ -191,7 +191,7 @@ def explain_linear_classifier_weights(clf, vec=None, top=_TOP, target_names=None @explain_weights.register(ExtraTreesClassifier) @explain_weights.register(GradientBoostingClassifier) @explain_weights.register(AdaBoostClassifier) -def explain_rf_feature_importance(clf, vec, top=_TOP, target_names=None, +def explain_rf_feature_importance(clf, vec=None, top=_TOP, target_names=None, feature_names=None, coef_scale=None): """ Return an explanation of a tree-based ensemble classifier in the
vectorizer is not required for explain_rf_feature_importance
py
diff --git a/spyderlib/widgets/ipython.py b/spyderlib/widgets/ipython.py index <HASH>..<HASH> 100644 --- a/spyderlib/widgets/ipython.py +++ b/spyderlib/widgets/ipython.py @@ -39,6 +39,7 @@ class IPythonControlWidget(QTextEdit, mixins.BaseEditMixin, mixins.TracebackLinksMixin.__init__(self) mixins.InspectObjectMixin.__init__(self) self.calltips = False # To not use Spyder calltips + self.found_results = [] def _key_question(self, text): """Action for '?'""" @@ -58,7 +59,7 @@ class IPythonControlWidget(QTextEdit, mixins.BaseEditMixin, self._key_question(text) else: # Let the parent widget handle the key press event - QTextEdit.keyPressEvent(self, event) + QTextEdit.keyPressEvent(self, event) class IPythonPageControlWidget(QTextEdit, mixins.BaseEditMixin): @@ -70,6 +71,7 @@ class IPythonPageControlWidget(QTextEdit, mixins.BaseEditMixin): def __init__(self, parent=None): QTextEdit.__init__(self, parent) mixins.BaseEditMixin.__init__(self) + self.found_results = [] class SpyderIPythonWidget(RichIPythonWidget):
IPython Console/Bug: Trying to find more than one result of a given text was failing - A traceback was generated complaining about a missing found_results attribute. - Adding it solved the problem
py
diff --git a/superset/views/core.py b/superset/views/core.py index <HASH>..<HASH> 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -2049,9 +2049,9 @@ class Superset(BaseSupersetView): pass dashboard(dashboard_id=dash.id) - dash_edit_perm = check_ownership(dash, raise_if_false=False) - dash_save_perm = \ - dash_edit_perm and security_manager.can_access('can_save_dash', 'Superset') + dash_edit_perm = check_ownership(dash, raise_if_false=False) and \ + security_manager.can_access('can_save_dash', 'Superset') + dash_save_perm = security_manager.can_access('can_save_dash', 'Superset') superset_can_explore = security_manager.can_access('can_explore', 'Superset') slice_can_edit = security_manager.can_access('can_edit', 'SliceModelView')
[Dashboard] Allow Superset Alpha, Gamma users to save dashboard as a copy (#<I>)
py
diff --git a/_pydev_runfiles/pydev_runfiles_nose.py b/_pydev_runfiles/pydev_runfiles_nose.py index <HASH>..<HASH> 100644 --- a/_pydev_runfiles/pydev_runfiles_nose.py +++ b/_pydev_runfiles/pydev_runfiles_nose.py @@ -127,6 +127,8 @@ class PydevPlugin(Plugin): from io import StringIO s = StringIO() etype, value, tb = err + if isinstance(value, str): + return value import traceback;traceback.print_exception(etype, value, tb, file=s) return s.getvalue() return err
Wrong traceback after failing unittest with nosetest and Python3. Fixes PyDev-<I>
py
diff --git a/flowcraft/generator/engine.py b/flowcraft/generator/engine.py index <HASH>..<HASH> 100644 --- a/flowcraft/generator/engine.py +++ b/flowcraft/generator/engine.py @@ -103,7 +103,6 @@ process_map = { "trimmomatic": readsqc.Trimmomatic, "true_coverage": readsqc.TrueCoverage, "viral_assembly": assembly.ViralAssembly, - "abyss": assembly.Abyss } """
removed duplicated abyss in process_map
py
diff --git a/django_afip/models.py b/django_afip/models.py index <HASH>..<HASH> 100644 --- a/django_afip/models.py +++ b/django_afip/models.py @@ -126,7 +126,12 @@ class Receipt(models.Model): 'Receipt', ) - # optionals + # Not implemented: optionals + + # These two values are stored in the receipt's batch. However, before the + # receipt is assigned into a batch, this value should be used. + receipt_type = models.ForeignKey(ReceiptType) + sales_point = models.ForeignKey(PointOfSales) @property def total(self):
Avoid receipts being orphans before they have a batch
py
diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index <HASH>..<HASH> 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -17,6 +17,9 @@ class DatatypeUnitTests(object): def op(self, dtype): raise NotImplementedError + def check_op_output(self, op): + raise NotImplementedError + def test_new_type_is_clean(self): newtype = self.dtype(self.bucket, 'key') self.assertIsNone(newtype.to_op())
Stub out check_op_output as suggested
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -7,11 +7,16 @@ except ImportError: from setuptools import setup, find_packages # NOQA from setuptools.command.test import test as TestCommand import os.path +import warnings def read_file(filepath): - with open(os.path.join(os.path.dirname(__file__), filepath)) as f: - return f.read() + try: + with open(os.path.join(os.path.dirname(__file__), filepath)) as f: + return f.read() + except IOError: + warnings.warn('Could not found {0}'.format(filepath), RuntimeWarning) + return '' class PyTest(TestCommand):
Workaround when README.rst can't find
py
diff --git a/filer/utils/files.py b/filer/utils/files.py index <HASH>..<HASH> 100644 --- a/filer/utils/files.py +++ b/filer/utils/files.py @@ -16,7 +16,14 @@ def handle_upload(request): # the file is stored raw in the request is_raw = True filename = request.GET.get('qqfile', False) or request.GET.get('filename', False) or '' - upload = SimpleUploadedFile(name=filename, content=request.raw_post_data) + if hasattr(request, 'body'): + # raw_post_data was depreciated in django 1.4: + # https://docs.djangoproject.com/en/dev/releases/1.4/#httprequest-raw-post-data-renamed-to-httprequest-body + data = request.body + else: + # fallback for django 1.3 + data = request.raw_post_data + upload = SimpleUploadedFile(name=filename, content=data) else: if len(request.FILES) == 1: # FILES is a dictionary in Django but Ajax Upload gives the uploaded file an
fix #<I> backward compatibility to django-<I>, while using the new method on newer versions
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ from aldryn_apphooks_config import __version__ REQUIREMENTS = [ 'django-appdata>=0.2.0', 'django-cms>=3.4.5' -], +] CLASSIFIERS = [ @@ -24,7 +24,7 @@ CLASSIFIERS = [ 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', -], +] setup(
Fixed typo in setup.py file
py
diff --git a/pyads/ads.py b/pyads/ads.py index <HASH>..<HASH> 100644 --- a/pyads/ads.py +++ b/pyads/ads.py @@ -1041,7 +1041,7 @@ class Connection(object): # read only until null-termination character value = bytearray(data).split(b"\0", 1)[0].decode("utf-8") - elif issubclass(plc_datatype, Structure): + elif plc_datatype is not None and issubclass(plc_datatype, Structure): value = plc_datatype() fit_size = min(data_size, sizeof(value)) memmove(addressof(value), addressof(data), fit_size)
Fix notification decorator when type is None The default value of the type argument is None, so this value should also be supported in the actual conversion. issubclass() throws a TypeError when the argument is None. Also, this offers a simple option to just get the raw value as byte array.
py
diff --git a/spyder/config/user.py b/spyder/config/user.py index <HASH>..<HASH> 100644 --- a/spyder/config/user.py +++ b/spyder/config/user.py @@ -19,7 +19,6 @@ import re import os.path as osp import shutil import time -import sys # Local imports from spyder.config.base import (get_conf_path, get_home_dir,
Deleted unused sys import
py
diff --git a/newsplease/__init__.py b/newsplease/__init__.py index <HASH>..<HASH> 100644 --- a/newsplease/__init__.py +++ b/newsplease/__init__.py @@ -39,7 +39,11 @@ class NewsPlease: # assume utf-8 encoding = 'utf-8' - html = raw_stream.decode(encoding, errors=decode_errors) + try: + html = raw_stream.decode(encoding, errors=decode_errors) + except LookupError: + # non-existent encoding: fallback to utf-9 + html = raw_stream.decode('utf-8', errors=decode_errors) url = warc_record.rec_headers.get_header('WARC-Target-URI') download_date = warc_record.rec_headers.get_header('WARC-Date') article = NewsPlease.from_html(html, url=url, download_date=download_date)
Fallback to utf-8 when document gives unknown encoding
py
diff --git a/timeside/core/tools/test_samples.py b/timeside/core/tools/test_samples.py index <HASH>..<HASH> 100644 --- a/timeside/core/tools/test_samples.py +++ b/timeside/core/tools/test_samples.py @@ -196,6 +196,9 @@ class gst_BuildSample(object): #Gst.Element.link_many(*pipe_elements) numpy_src.appsrc.link(converter) converter.link(encoder_muxer[0]) + while len(encoder_muxer) > 1: + next_el = encoder_muxer.pop(0) + next_el.link(encoder_muxer[0]) encoder_muxer[0].link(filesink) def _on_new_pad(self, source, pad, target_pad):
[core] connect all pipeline elements in test_samples.py
py
diff --git a/test/test_plyfile.py b/test/test_plyfile.py index <HASH>..<HASH> 100644 --- a/test/test_plyfile.py +++ b/test/test_plyfile.py @@ -87,6 +87,11 @@ def verify(ply0, ply1): verify_1d(prop0, prop1) + verify_comments(el0[k].comments, el1[k].comments) + + verify_comments(ply0.comments, ply1.comments) + verify_comments(ply0.obj_info, ply1.obj_info) + def verify_1d(prop0, prop1): ''' @@ -111,6 +116,16 @@ def verify_1d(prop0, prop1): assert (prop0 == prop1).all() +def verify_comments(comments0, comments1): + ''' + Verify that comment lists are identical. + + ''' + assert len(comments0) == len(comments1) + for (comment0, comment1) in zip(comments0, comments1): + assert comment0 == comment1 + + def write_read(ply, tmpdir, name='test.ply'): ''' Utility: serialize/deserialize a PlyData instance through a
Compare comments and obj_info in unit tests
py
diff --git a/tests/basics/ordereddict1.py b/tests/basics/ordereddict1.py index <HASH>..<HASH> 100644 --- a/tests/basics/ordereddict1.py +++ b/tests/basics/ordereddict1.py @@ -9,8 +9,19 @@ except ImportError: sys.exit() d = OrderedDict([(10, 20), ("b", 100), (1, 2)]) +print(len(d)) print(list(d.keys())) print(list(d.values())) del d["b"] +print(len(d)) +print(list(d.keys())) +print(list(d.values())) + +# access remaining elements after deleting +print(d[10], d[1]) + +# add an element after deleting +d["abc"] = 123 +print(len(d)) print(list(d.keys())) print(list(d.values()))
tests/basics: Add further tests for OrderedDict.
py
diff --git a/faker/providers/date_time/__init__.py b/faker/providers/date_time/__init__.py index <HASH>..<HASH> 100644 --- a/faker/providers/date_time/__init__.py +++ b/faker/providers/date_time/__init__.py @@ -360,7 +360,7 @@ class Provider(BaseProvider): start_date = cls._parse_date_time(start_date, tzinfo=tzinfo) end_date = cls._parse_date_time(end_date, tzinfo=tzinfo) timestamp = random.randint(start_date, end_date) - return datetime.fromtimestamp(timestamp, tzinfo) + return datetime(1970, 1, 1) + timedelta(seconds=timestamp) @classmethod def future_datetime(cls, end_date='+30d', tzinfo=None):
Update __init__.py Using timestamp doesn't allow you to generate a date before <I>. In fact, you can even put in (-<I>y,-<I>y) as parameters and get a runtime error.
py
diff --git a/tests/test_utils.py b/tests/test_utils.py index <HASH>..<HASH> 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -162,10 +162,10 @@ class TestCache(object): @pytest.fixture def key(self): - if six.PY3: - return 'tests.test_utils.<function TestCache.fn.<locals>.fn ' - else: + if six.PY2: return 'tests.test_utils.<function fn ' + else: + return 'tests.test_utils.<function TestCache.fn.<locals>.fn ' def test_with_blank_cache(self, shelve, fn, key): assert shelve == {}
#N/A: Replace `PY3` checks with `PY2` checks
py
diff --git a/chatterbot/__init__.py b/chatterbot/__init__.py index <HASH>..<HASH> 100644 --- a/chatterbot/__init__.py +++ b/chatterbot/__init__.py @@ -3,7 +3,7 @@ ChatterBot is a machine learning, conversational dialog engine. """ from .chatterbot import ChatBot -__version__ = '1.0.0a2' +__version__ = '1.0.0a3' __author__ = 'Gunther Cox' __email__ = '[email protected]' __url__ = 'https://github.com/gunthercox/ChatterBot'
Increment package version to <I>a3
py
diff --git a/salt/modules/ssh.py b/salt/modules/ssh.py index <HASH>..<HASH> 100644 --- a/salt/modules/ssh.py +++ b/salt/modules/ssh.py @@ -1133,7 +1133,7 @@ def user_keys(user=None, pubfile=None, prvfile=None): if os.path.exists(fn_): try: with salt.utils.fopen(fn_, 'r') as _fh: - keys[u][keyname] = ''.join(_fh.readlines()) + keys[u][keyname] = ''.join(_fh.readlines()).strip() except (IOError, OSError): pass
Remove trailing newline in ssh.user_keys() (#<I>)
py
diff --git a/examples/read-ow.py b/examples/read-ow.py index <HASH>..<HASH> 100644 --- a/examples/read-ow.py +++ b/examples/read-ow.py @@ -79,6 +79,7 @@ class OWExample: def _data_updated(self, mem): print('Updated id={}'.format(mem.id)) + print('\tAddr : {}'.format(mem.addr)) print('\tType : {}'.format(mem.type)) print('\tSize : {}'.format(mem.size)) print('\tValid : {}'.format(mem.valid))
Added printing address of 1-wire memory to example (closes #<I>)
py
diff --git a/bcloud/UploadPage.py b/bcloud/UploadPage.py index <HASH>..<HASH> 100644 --- a/bcloud/UploadPage.py +++ b/bcloud/UploadPage.py @@ -734,6 +734,7 @@ class UploadPage(Gtk.Box): tree_iters.append(self.liststore.get_iter(row.path)) for tree_iter in tree_iters: if tree_iter: + self.remove_task_db(self.liststore[tree_iter][FID_COL]) self.liststore.remove(tree_iter) def on_open_folder_button_clicked(self, button):
Fixed: remove task from upload db when remove-button is clicked
py
diff --git a/zeno/test/testing_utils.py b/zeno/test/testing_utils.py index <HASH>..<HASH> 100644 --- a/zeno/test/testing_utils.py +++ b/zeno/test/testing_utils.py @@ -3,6 +3,8 @@ import os import sys import fcntl +import tempfile + from ioflo.base.consoling import getConsole from zeno.common.stacked import HA @@ -85,9 +87,10 @@ class PortDispenser: port numbers. It leverages the filesystem lock mechanism to ensure there are no overlaps. """ - def __init__(self, ip: str): + def __init__(self, ip: str, filename: str=None): self.ip = ip - self.FILE = "portmutex3.{}.txt".format(ip) + self.FILE = filename or os.path.join(tempfile.gettempdir(), + 'zeno-portmutex.{}.txt'.format(ip)) self.minPort = 6000 self.maxPort = 9999 self.initFile()
PortDispensor now uses a system-supplied temporary directory and an intelligent filename when one is not provided to it.
py
diff --git a/spinoff/util/logging.py b/spinoff/util/logging.py index <HASH>..<HASH> 100644 --- a/spinoff/util/logging.py +++ b/spinoff/util/logging.py @@ -233,8 +233,8 @@ def _do_write(level, *args, **kwargs): elif caller_fn: caller_fn._r_logstring = logstring - logname = getattr(caller, '_r_logname', None) - if not logname: + logname = getattr(caller, '_r_logname', None) if caller else '' + if logname is None: logname = CYAN + get_logname(caller) + RESET_COLOR if not hasattr(caller, '__slots__'): caller._r_logname = logname
Some more graceful handling of logging with only .pyc available
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -12,7 +12,7 @@ with open("rb/__init__.py", "rb") as f: ast.literal_eval(_version_re.search(f.read().decode("utf-8")).group(1)) ) -install_requires = ["redis>=2.6,<3.4"] +install_requires = ["redis>=2.6,<3.5,!=3.4.0"] # override django version in requirements file if DJANGO_VERSION is set REDIS_VERSION = os.environ.get('REDIS_VERSION')
upgrade(redis-py): Allow using redis-py@<I> (#<I>) `redis-py@<I>` has some important concurrency fixes around pool management and doesn't have any breaking changes. That said `<I>` has a bug that might potentially be breaking. So this change relaxes the version requirement but excludes the unsafe `<I>` version. See the relevant changes from <I> to <I>: <URL>
py
diff --git a/openxc/sources/usb.py b/openxc/sources/usb.py index <HASH>..<HASH> 100644 --- a/openxc/sources/usb.py +++ b/openxc/sources/usb.py @@ -49,7 +49,7 @@ class UsbDataSource(BytestreamDataSource): if not self.device: raise DataSourceError("Couldn't find a USB product 0x%x from vendor 0x%x" - % self.product_id, self.vendor_id) + % (self.product_id, self.vendor_id)) self.device.set_configuration() self.in_endpoint = self._connect_in_endpoint(self.device)
Add missing parens to log statement.
py
diff --git a/salt/utils/aws.py b/salt/utils/aws.py index <HASH>..<HASH> 100644 --- a/salt/utils/aws.py +++ b/salt/utils/aws.py @@ -24,7 +24,6 @@ import re # Import Salt libs import salt.utils.xmlutil as xml from salt._compat import ElementTree as ET -import salt.ext.six as six # Import 3rd-party libs try:
salt/utils/aws.py: Removed salt.ext.six import as the code that used it got removed.
py
diff --git a/src/feat/database/tools.py b/src/feat/database/tools.py index <HASH>..<HASH> 100644 --- a/src/feat/database/tools.py +++ b/src/feat/database/tools.py @@ -99,7 +99,7 @@ def push_initial_data(connection, overwrite=False, push_design_docs=True): for name in set(a.keys()).intersection(set(b.keys())): if a[name] != b[name]: - diffs[what] = (a[name], b[name]) + diffs[what][name] = (a[name], b[name]) def strcode(x): if not x:
Fix feat-dbload reporting differences in design documents filter functions.
py
diff --git a/fedmsg/text/__init__.py b/fedmsg/text/__init__.py index <HASH>..<HASH> 100644 --- a/fedmsg/text/__init__.py +++ b/fedmsg/text/__init__.py @@ -65,6 +65,7 @@ from fedmsg.text.default import DefaultProcessor class ProcessorsNotInitialized(Exception): def __iter__(self): raise self + __len__ = __iter__ processors = ProcessorsNotInitialized("You must first call " "fedmsg.text.make_processors(**config)")
Add __len__ to the ProcessorsNotInitialized exception
py
diff --git a/msrest/serialization.py b/msrest/serialization.py index <HASH>..<HASH> 100644 --- a/msrest/serialization.py +++ b/msrest/serialization.py @@ -129,9 +129,11 @@ class Model(object): Remove the polymorphic key from the initial data. """ for subtype_key in cls.__dict__.get('_subtype_map', {}).keys(): - response_key = _decode_attribute_map_key(cls._attribute_map[subtype_key]['key']) - if response_key in response: - subtype_value = response.pop(response_key) + subtype_value = None + + rest_api_response_key = _decode_attribute_map_key(cls._attribute_map[subtype_key]['key']) + subtype_value = response.pop(rest_api_response_key, None) or response.pop(subtype_key, None) + if subtype_value: flatten_mapping_type = cls._flatten_subtype(subtype_key, objects) return objects[flatten_mapping_type[subtype_value]] return cls
Fix serialisation from dict with escape
py
diff --git a/tornado/web.py b/tornado/web.py index <HASH>..<HASH> 100644 --- a/tornado/web.py +++ b/tornado/web.py @@ -70,6 +70,7 @@ import stat import sys import time import tornado +import traceback import types import urllib import urlparse @@ -663,11 +664,16 @@ class RequestHandler(object): If this error was caused by an uncaught exception, the exception object can be found in kwargs e.g. kwargs['exception'] """ - return "<html><title>%(code)d: %(message)s</title>" \ - "<body>%(code)d: %(message)s</body></html>" % { - "code": status_code, - "message": httplib.responses[status_code], - } + if self.settings.get("debug"): + # in debug mode, try to send a traceback + self.set_header('Content-Type', 'text/plain') + return traceback.format_exc() + else: + return "<html><title>%(code)d: %(message)s</title>" \ + "<body>%(code)d: %(message)s</body></html>" % { + "code": status_code, + "message": httplib.responses[status_code], + } @property def locale(self):
add text tracebacks on <I>s when in debug mode
py
diff --git a/uliweb/core/SimpleFrame.py b/uliweb/core/SimpleFrame.py index <HASH>..<HASH> 100644 --- a/uliweb/core/SimpleFrame.py +++ b/uliweb/core/SimpleFrame.py @@ -330,7 +330,7 @@ def get_apps(apps_dir, include_apps=None, settings_file='settings.ini', local_se local_inifile = norm_path(os.path.join(apps_dir, local_settings_file)) if os.path.exists(local_inifile): x = pyini.Ini(local_inifile) - if x: + if x and x.get('GLOBAL'): installed_apps.extend(x.GLOBAL.get('INSTALLED_APPS', [])) installed_apps.extend(include_apps)
fix issue of "when local_settings.ini have no GLOBAL section,uliweb will exit with exception"
py
diff --git a/aiortc/contrib/media.py b/aiortc/contrib/media.py index <HASH>..<HASH> 100644 --- a/aiortc/contrib/media.py +++ b/aiortc/contrib/media.py @@ -219,6 +219,7 @@ class MediaPlayer: self.__log_debug('Starting worker thread') self.__thread_quit = threading.Event() self.__thread = threading.Thread( + name='media-player', target=player_worker, args=( asyncio.get_event_loop(), self.__container,
[media player] give the thread a name
py
diff --git a/tests/python/unittest/test_numpy_op.py b/tests/python/unittest/test_numpy_op.py index <HASH>..<HASH> 100644 --- a/tests/python/unittest/test_numpy_op.py +++ b/tests/python/unittest/test_numpy_op.py @@ -6420,6 +6420,7 @@ def test_np_linalg_det(): @with_seed() @use_np [email protected](reason='https://github.com/apache/incubator-mxnet/issues/18184') def test_np_linalg_slogdet(): class TestSlogdet(HybridBlock): def __init__(self):
Update test_numpy_op.py (#<I>)
py