{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n \n-\"\"\"\n+\"\"\")"},"message":{"kind":"string","value":"Port this to python 3.x."},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1394,"cells":{"diff":{"kind":"string","value":"diff --git a/tests/setup_transaction_tests.py b/tests/setup_transaction_tests.py\nindex .. 100644\n--- a/tests/setup_transaction_tests.py\n+++ b/tests/setup_transaction_tests.py\n@@ -105,7 +105,7 @@ curve_order = 218882428718392752222464057452572750885483644004160343436982041865\n \n @pytest.fixture\n def get_log():\n- def get_log(chain, contract, event_name):\n+ def get_log(tester, contract, event_name):\n event_ids_w_name = [k for k, v in \\\n contract.translator.event_data.items() if v[\"name\"] == event_name]\n assert len(event_ids_w_name) == 1, \\\n@@ -113,7 +113,7 @@ def get_log():\n event_id = event_ids_w_name[0]\n \n # Get the last logged event\n- logs = chain.head_state.receipts[-1].logs[-1]\n+ logs = tester.s.head_state.receipts[-1].logs[-1]\n \n # Ensure it has the event we are looking to decode\n assert logs.address == contract.address, \\"},"message":{"kind":"string","value":"Provide tester instead of chain as arg"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1395,"cells":{"diff":{"kind":"string","value":"diff --git a/pywb/apps/frontendapp.py b/pywb/apps/frontendapp.py\nindex .. 100644\n--- a/pywb/apps/frontendapp.py\n+++ b/pywb/apps/frontendapp.py\n@@ -91,8 +91,6 @@ class FrontEndApp(object):\n \n self.cdx_api_endpoint = config.get('cdx_api_endpoint', '/cdx')\n \n- self._init_routes()\n-\n upstream_paths = self.get_upstream_paths(self.warcserver_server.port)\n \n framed_replay = config.get('framed_replay', True)\n@@ -106,6 +104,8 @@ class FrontEndApp(object):\n metadata_templ = os.path.join(self.warcserver.root_dir, '{coll}', 'metadata.yaml')\n self.metadata_cache = MetadataCache(metadata_templ)\n \n+ self._init_routes()\n+\n def _init_routes(self):\n \"\"\"Initialize the routes and based on the configuration file makes available\n specific routes (proxy mode, record)\"\"\"\n@@ -500,6 +500,11 @@ class FrontEndApp(object):\n # store original script_name (original prefix) before modifications are made\n environ['pywb.app_prefix'] = environ.get('SCRIPT_NAME', '')\n \n+ lang = args.pop('lang', '')\n+ if lang:\n+ pop_path_info(environ)\n+ environ['pywb_lang'] = lang\n+\n response = endpoint(environ, **args)\n \n except HTTPException as hte:"},"message":{"kind":"string","value":"routes: make coll route config extendable to support prefix routing for localization ukwa/ukwa-pywb# split init_routes() into init_coll_routes() and make_coll_routes() which retrieves a list of per-collection routes only"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1396,"cells":{"diff":{"kind":"string","value":"diff --git a/anyconfig/globals.py b/anyconfig/globals.py\nindex .. 100644\n--- a/anyconfig/globals.py\n+++ b/anyconfig/globals.py\n@@ -1,5 +1,6 @@\n #\n # Copyright (C) 2013 - 2018 Satoru SATOH \n+# Copyright (C) 2019 Satoru SATOH \n # License: MIT\n #\n # pylint: disable=invalid-name"},"message":{"kind":"string","value":"fix: correct the copyright header in .globals"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1397,"cells":{"diff":{"kind":"string","value":"diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/manifest_validator/v2/migration.py b/datadog_checks_dev/datadog_checks/dev/tooling/manifest_validator/v2/migration.py\nindex .. 100644\n--- a/datadog_checks_dev/datadog_checks/dev/tooling/manifest_validator/v2/migration.py\n+++ b/datadog_checks_dev/datadog_checks/dev/tooling/manifest_validator/v2/migration.py\n@@ -63,7 +63,7 @@ V2_TO_V1_MAP = JSONDict(\n \n OS_TO_CLASSIFIER_TAGS = {\n \"linux\": \"Supported OS::Linux\",\n- \"mac_os\": \"Supported OS::Mac OS\",\n+ \"mac_os\": \"Supported OS::macOS\",\n \"windows\": \"Supported OS::Windows\",\n }"},"message":{"kind":"string","value":"Fix manifest migration of macOS tag (#)"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1398,"cells":{"diff":{"kind":"string","value":"diff --git a/asammdf/blocks/mdf_v4.py b/asammdf/blocks/mdf_v4.py\nindex .. 100644\n--- a/asammdf/blocks/mdf_v4.py\n+++ b/asammdf/blocks/mdf_v4.py\n@@ -3683,8 +3683,6 @@ class MDF4(object):\n # for embedded attachments extrat data and create new files\n if flags & v4c.FLAG_AT_EMBEDDED:\n data = attachment.extract()\n-\n- return data, file_path\n else:\n # for external attachments read the file and return the content\n if flags & v4c.FLAG_AT_MD5_VALID:\n@@ -3697,7 +3695,6 @@ class MDF4(object):\n if attachment.mime.startswith(\"text\"):\n with open(file_path, \"r\") as f:\n data = f.read()\n- return data, file_path\n else:\n message = (\n f'ATBLOCK md5sum=\"{attachment[\"md5_sum\"]}\" '\n@@ -3713,12 +3710,15 @@ class MDF4(object):\n with open(file_path, mode) as f:\n file_path = Path(f\"FROM_{file_path}\")\n data = f.read()\n- return data, file_path\n except Exception as err:\n os.chdir(current_path)\n message = \"Exception during attachment extraction: \" + repr(err)\n logger.warning(message)\n- return b\"\", file_path\n+ data = b\"\"\n+ finally:\n+ os.chdir(current_path)\n+\n+ return data, file_path\n \n def get(\n self,"},"message":{"kind":"string","value":"don't change the current working directory"},"diff_languages":{"kind":"string","value":"py"}}},{"rowIdx":1399,"cells":{"diff":{"kind":"string","value":"diff --git a/master/buildbot/process/build.py b/master/buildbot/process/build.py\nindex .. 100644\n--- a/master/buildbot/process/build.py\n+++ b/master/buildbot/process/build.py\n@@ -261,8 +261,6 @@ class Build(properties.PropertiesMixin, WorkerAPICompatMixin):\n (\"control\", \"builds\",\n str(self.buildid),\n \"stop\"))\n- yield self.master.data.updates.generateNewBuildEvent(self.buildid)\n-\n self.setupOwnProperties()\n self.setupWorkerForBuilder(workerforbuilder)\n \n@@ -274,9 +272,11 @@ class Build(properties.PropertiesMixin, WorkerAPICompatMixin):\n \n metrics.MetricCountEvent.log('active_builds', 1)\n \n- yield self.master.data.updates.setBuildStateString(self.buildid,\n- u'starting')\n+ # make sure properties are available to people listening on 'new' events\n+ yield self._flushProperties(None)\n self.build_status.buildStarted(self)\n+ yield self.master.data.updates.setBuildStateString(self.buildid, u'starting')\n+ yield self.master.data.updates.generateNewBuildEvent(self.buildid)\n \n try:\n self.setupBuild() # create .steps"},"message":{"kind":"string","value":"Delay sending 'new' build message until properties are populated"},"diff_languages":{"kind":"string","value":"py"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":13,"numItemsPerPage":100,"numTotalItems":278877,"offset":1300,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1NjYzODA4Niwic3ViIjoiL2RhdGFzZXRzL2hrczM1MGQvZ2l0LWRpZmYtdG8tY29tbWl0LWdlbW1hLTMtMjcwbSIsImV4cCI6MTc1NjY0MTY4NiwiaXNzIjoiaHR0cHM6Ly9odWdnaW5nZmFjZS5jbyJ9.rgI4TdUh_jP0_D2FG839-ktzdqNB4xB8trKuH_7uPsonOZKQCys5A3bmKP9E96exAFkZ5AMOoksuInn34M9WAA","displayUrls":true},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">
diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/las/client.py b/las/client.py index <HASH>..<HASH> 100644 --- a/las/client.py +++ b/las/client.py @@ -203,7 +203,7 @@ class Client: :param consent_id: Delete documents with this consent_id :type consent_id: str - :return: Feedback response from REST API + :return: Delete consent id response from REST API :rtype: dict :raises InvalidCredentialsException: If the credentials are invalid :raises requests.exception.RequestException: If error was raised by requests
Update correct docs error for delete consent id
py
diff --git a/rtpipe/RT.py b/rtpipe/RT.py index <HASH>..<HASH> 100644 --- a/rtpipe/RT.py +++ b/rtpipe/RT.py @@ -367,7 +367,7 @@ def search(d, data_mem, u_mem, v_mem, w_mem): blranges = [(d['nbl'] * t/d['nthread'], d['nbl']*(t+1)/d['nthread']) for t in range(d['nthread'])] for dmind in xrange(len(d['dmarr'])): dm = d['dmarr'][dmind] - logger.info('Dedispersing for %d' % dm,) + logger.debug('Dedispersing for %d' % dm,) dedisppart = partial(correct_dm, d, dm) # moves in fresh data dedispresults = resamppool.map(dedisppart, blranges) @@ -379,7 +379,7 @@ def search(d, data_mem, u_mem, v_mem, w_mem): # dedispersion in shared memory, mapped over baselines # set partial functions for pool.map - logger.info('Resampling for %d' % dt,) + logger.debug('Resampling for %d' % dt,) resample = dt/dtlast resamppart = partial(correct_dt, d, resample) # corrects in place resampresults = resamppool.map(resamppart, blranges)
moved dedisp/resample to debug statements
py
diff --git a/sportsreference/ncaaf/roster.py b/sportsreference/ncaaf/roster.py index <HASH>..<HASH> 100644 --- a/sportsreference/ncaaf/roster.py +++ b/sportsreference/ncaaf/roster.py @@ -254,7 +254,8 @@ class Player(AbstractPlayer): """ all_stats_dict = {} - for table_id in ['passing', 'rushing', 'defense', 'scoring']: + for table_id in ['passing', 'rushing', 'defense', 'scoring', + 'receiving']: table_items = utils._get_stats_table(player_info, 'table#%s' % table_id) career_items = utils._get_stats_table(player_info,
Add NCAAF player receiving stats For a handful of NCAAF players, the "Receiving & Rushing" table has an ID of `receiving` instead of the commonly-used `rushing`. The former was not being parsed, and the relevant stats were ignored for those players. Simply adding that ID to the list of tables to parse resolves the issue.
py
diff --git a/feedjack/fjlib.py b/feedjack/fjlib.py index <HASH>..<HASH> 100644 --- a/feedjack/fjlib.py +++ b/feedjack/fjlib.py @@ -17,6 +17,11 @@ import logging log = logging.getLogger() + +from django.dispatch import Signal +transaction_start = Signal(providing_args=list()) +transaction_done = Signal(providing_args=list()) + from django.db import transaction def transaction_wrapper(func, logger=None): '''Traps exceptions in transaction.commit_manually blocks, @@ -26,13 +31,16 @@ def transaction_wrapper(func, logger=None): @transaction.commit_manually @ft.wraps(func) def _transaction_wrapper(*argz, **kwz): - try: return func(*argz, **kwz) + transaction_start.send(sender=func.func_name) + try: result = func(*argz, **kwz) except Exception as err: import sys, traceback (logger or log).error(( u'Unhandled exception: {0},' ' traceback:\n {1}' ).format( err, smart_unicode(traceback.format_tb(sys.exc_info()[2])) )) raise + finally: transaction_done.send(sender=func.func_name) + return result return _transaction_wrapper else: return ft.partial(transaction_wrapper, logger=func)
fjlib: added transaction_start/done signals
py
diff --git a/salt/modules/schedule.py b/salt/modules/schedule.py index <HASH>..<HASH> 100644 --- a/salt/modules/schedule.py +++ b/salt/modules/schedule.py @@ -132,6 +132,9 @@ def list_(show_all=False, if item not in SCHEDULE_CONF: del schedule[job][item] continue + if schedule[job][item] is None: + del schedule[job][item] + continue if schedule[job][item] == 'true': schedule[job][item] = True if schedule[job][item] == 'false':
Splay defaulting to None internally, broke schedule.present causing it to always report differences when a state is run. Updating the schedule.list function to check if an attribute is None and remove it.
py
diff --git a/drip/models.py b/drip/models.py index <HASH>..<HASH> 100644 --- a/drip/models.py +++ b/drip/models.py @@ -55,7 +55,7 @@ class SentDrip(models.Model): date = models.DateTimeField(auto_now_add=True) drip = models.ForeignKey('drip.Drip', related_name='sent_drips') - user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='sent_drips') + user = models.ForeignKey(getattr(settings, 'AUTH_USER_MODEL', 'auth.User'), related_name='sent_drips') subject = models.TextField() body = models.TextField()
use getattr to set a default user model in old django
py
diff --git a/airflow/configuration.py b/airflow/configuration.py index <HASH>..<HASH> 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -48,7 +48,7 @@ DEFAULT_CONFIG = """\ # The home folder for airflow, default is ~/airflow airflow_home = {AIRFLOW_HOME} -# The folder where you airflow pipelines live, most likely a +# The folder where your airflow pipelines live, most likely a # subfolder in a code repository dags_folder = {AIRFLOW_HOME}/dags
Typo in config docs changing "you" to "your"
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,9 +1,8 @@ # Setup file for feedstail from setuptools import setup -long_description = """\ -Feedstail monitor a feed and emits new entries. It aim to be simple, -hackable and compatible with rsstail, its C brother.""" +with open('README.rst') as readme: + long_description = readme.read() setup( name = "feedstail" , description = "A tail-f-like utility for feeds"
[doc]: Setup: README.rst becomes the long description.
py
diff --git a/defender/urls.py b/defender/urls.py index <HASH>..<HASH> 100644 --- a/defender/urls.py +++ b/defender/urls.py @@ -1,8 +1,7 @@ -from django.conf.urls import patterns, url +from django.conf.urls import url from .views import block_view, unblock_ip_view, unblock_username_view -urlpatterns = patterns( - '', +urlpatterns = [ url(r'^blocks/$', block_view, name="defender_blocks_view"), url(r'^blocks/ip/(?P<ip_address>[a-z0-9-._]+)/unblock$', unblock_ip_view, @@ -10,4 +9,4 @@ urlpatterns = patterns( url(r'^blocks/username/(?P<username>[a-z0-9-._@]+)/unblock$', unblock_username_view, name="defender_unblock_username_view"), -) +]
making urlpatterns a plain list as of Django <I>, creating urlpatterns with the `django.conf.urls.patterns` function became deprecated and will be removed in <I>. <URL>
py
diff --git a/soco/core.py b/soco/core.py index <HASH>..<HASH> 100755 --- a/soco/core.py +++ b/soco/core.py @@ -2875,6 +2875,7 @@ SOURCES = { r"^x-sonosapi-stream:": MUSIC_SRC_RADIO, r"^x-sonosapi-radio:": MUSIC_SRC_RADIO, r"^x-sonosapi-hls:": MUSIC_SRC_RADIO, + r"^x-sonos-http:sonos": MUSIC_SRC_RADIO, r"^aac:": MUSIC_SRC_RADIO, r"^hls-radio:": MUSIC_SRC_RADIO, r"^https?:": MUSIC_SRC_WEB_FILE,
Detect Sonos Radio as radio source (#<I>)
py
diff --git a/lahja/eventbus.py b/lahja/eventbus.py index <HASH>..<HASH> 100644 --- a/lahja/eventbus.py +++ b/lahja/eventbus.py @@ -69,7 +69,8 @@ class EventBus: if not self._is_allowed_to_receive(config, endpoint.name): continue - endpoint._receiving_queue.put_nowait((item, config)) + if self._running: + endpoint._receiving_queue.put_nowait((item, config)) def _is_allowed_to_receive(self, config: BroadcastConfig, endpoint: str) -> bool: return config is None or config.allowed_to_receive(endpoint)
Don't try to propagate events after shutdown
py
diff --git a/autograd/__init__.py b/autograd/__init__.py index <HASH>..<HASH> 100644 --- a/autograd/__init__.py +++ b/autograd/__init__.py @@ -1,6 +1,7 @@ from __future__ import absolute_import from .core import primitive, make_vjp from . import container_types +from .container_types import make_tuple, make_list, make_dict from .convenience_wrappers import (grad, multigrad, multigrad_dict, elementwise_grad, value_and_grad, grad_and_aux, hessian_vector_product, hessian, jacobian, vector_jacobian_product, grad_named,
import make_tuple, make_list, and make_dict into autograd namespace
py
diff --git a/tests/www/test_security.py b/tests/www/test_security.py index <HASH>..<HASH> 100644 --- a/tests/www/test_security.py +++ b/tests/www/test_security.py @@ -169,6 +169,12 @@ class TestSecurity(unittest.TestCase): assert role_perms_len == new_role_perms_len + def test_verify_public_role_has_no_permissions(self): + with self.app.app_context(): + public = self.appbuilder.sm.find_role("Public") + + assert public.permissions == [] + def test_get_user_roles(self): user = mock.MagicMock() user.is_anonymous = False
Add test for Public role permissions. (#<I>) In #<I>, all permissions were removed from the Public role. This adds a test to ensure that the default public role doesn't have any permissions. related: #<I>
py
diff --git a/src/extargsparse/__lib_debug__.py b/src/extargsparse/__lib_debug__.py index <HASH>..<HASH> 100644 --- a/src/extargsparse/__lib_debug__.py +++ b/src/extargsparse/__lib_debug__.py @@ -1553,13 +1553,6 @@ class ExtArgsParse(_LoggerObject): parentpaths = [self.__maincmd] if paths is not None: parentpaths = paths - for chld in parentpaths[-1].subcommands: - curpaths = parentpaths - curpaths.append(chld) - copyoptcheck = _OptCheck() - copyoptcheck.copy(optcheck) - self.__check_varname_inner(curpaths,copyoptcheck) - curpaths.pop() for opt in parentpaths[-1].cmdopts: if opt.isflag: @@ -1578,6 +1571,15 @@ class ExtArgsParse(_LoggerObject): if not bval: msg = '%s is already in the check list'%(opt.longopt) self.error_msg(msg) + + for chld in parentpaths[-1].subcommands: + curpaths = parentpaths + curpaths.append(chld) + copyoptcheck = _OptCheck() + copyoptcheck.copy(optcheck) + self.__check_varname_inner(curpaths,copyoptcheck) + curpaths.pop() + return def __set_command_line_self_args_inner(self,paths=None):
to make the __check_varname_inner more valid and check
py
diff --git a/macaca/asserters.py b/macaca/asserters.py index <HASH>..<HASH> 100644 --- a/macaca/asserters.py +++ b/macaca/asserters.py @@ -16,7 +16,7 @@ def is_displayed(target): Return True if the element is displayed or return False otherwise. """ is_displayed = getattr(target, 'is_displayed', None) - if not is_displayed or not hasattr(is_displayed, '__call__'): + if not is_displayed or not callable(is_displayed): raise TypeError('Target has no attribute \'is_displayed\' or not callable') if not is_displayed(): raise WebDriverException('element not visible') @@ -32,7 +32,7 @@ def is_not_displayed(target): Return True if the element is not displayed or return False otherwise. """ is_displayed = getattr(target, 'is_displayed', None) - if not is_displayed or not hasattr(is_displayed, '__call__'): + if not is_displayed or not callable(is_displayed): raise TypeError('Target has no attribute \'is_displayed\' or not callable') if is_displayed(): raise WebDriverException('element is visible')
Use callable instead of check for __call__
py
diff --git a/sos/policies/__init__.py b/sos/policies/__init__.py index <HASH>..<HASH> 100644 --- a/sos/policies/__init__.py +++ b/sos/policies/__init__.py @@ -707,6 +707,7 @@ any third party. _msg = self.msg % {'distro': self.distro, 'vendor': self.vendor, 'vendor_url': self.vendor_url, 'vendor_text': self.vendor_text, + 'tmpdir': self.commons['tmpdir'], 'changes_text': changes_text} _fmt = "" for line in _msg.splitlines():
[policies] fix missing tmpdir for RedHat distro a0e0a<I> removed setting 'tmpdir' from Policy get_msg while RedHatPolicy refers on it. Resolves: #<I>
py
diff --git a/dataswim/data/transform.py b/dataswim/data/transform.py index <HASH>..<HASH> 100644 --- a/dataswim/data/transform.py +++ b/dataswim/data/transform.py @@ -492,6 +492,19 @@ class Transform(): if self.autoprint is True: self.ok("Diff column " + name + " added to the dataframe") + def gsum_(self, col, index_col=True): + """ + Group by and sum column + """ + try: + df = self.df.copy() + df = df.groupby([col]).sum() + if index_col is True: + df[col] = df.index.values + return self.clone_(df) + except Exception as e: + self.err(e, self.ratio, "Can not groupsum column") + def ratio(self, col, ratio_col="Ratio"): """ Add a column whith the percentages ratio from a column
Add a gsum_ method for groupsum
py
diff --git a/forms_builder/example_project/settings.py b/forms_builder/example_project/settings.py index <HASH>..<HASH> 100644 --- a/forms_builder/example_project/settings.py +++ b/forms_builder/example_project/settings.py @@ -4,6 +4,7 @@ import os, sys DEBUG = True +SITE_ID = 1 PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__)) PROJECT_DIRNAME = PROJECT_ROOT.split(os.sep)[-1] STATIC_URL = "/static/"
Old Djangos need SITE_ID in settings for tests.
py
diff --git a/salesforce_bulk/__init__.py b/salesforce_bulk/__init__.py index <HASH>..<HASH> 100644 --- a/salesforce_bulk/__init__.py +++ b/salesforce_bulk/__init__.py @@ -1,4 +1,4 @@ from salesforce_bulk import SalesforceBulk from csv_adapter import CsvDictsAdapter -__version__ = '1.0.4' +__version__ = '1.0.5'
Bumping version to <I>
py
diff --git a/wtforms_html5.py b/wtforms_html5.py index <HASH>..<HASH> 100644 --- a/wtforms_html5.py +++ b/wtforms_html5.py @@ -179,6 +179,39 @@ from wtforms.validators import Length, NumberRange, StopValidation from wtforms.compat import string_types +__all__ = ( + # WIDGETS + 'Input', + 'TextInput', + 'DateInput', + 'URLInput', + 'EmailInput', + 'SearchInput', + 'TelInput', + 'NumberInput', + 'RangeInput', + 'DecimalInput', + 'DecimalRangeInput', + # FIELDS + 'TextField', + 'SearchField', + 'URLField', + 'EmailField', + 'TelField', + 'IntegerField', + 'DateField', + 'DecimalField', + 'FloatField', + 'IntegerRangeField', + 'DecimalRangeField', + 'FloatRangeField', + # VALIDATORS + 'Required', + 'DataNotNone', + 'DateRange' +) + + # CUSTOM LOGIC def get_html5_kwargs(field, kwargs):
added `__all__`
py
diff --git a/mtools/util/logfile.py b/mtools/util/logfile.py index <HASH>..<HASH> 100644 --- a/mtools/util/logfile.py +++ b/mtools/util/logfile.py @@ -85,14 +85,14 @@ class LogFile(InputSource): def __iter__(self): """ iteration over LogFile object will return a LogEvent object for each line. """ - # always start from the beginning logfile - if not self.from_stdin: - self.filehandle.seek(0) - for line in self.filehandle: le = LogEvent(line) yield le + # future iterations start from the beginning + if not self.from_stdin: + self.filehandle.seek(0) + def __len__(self): """ return the number of lines in a log file. """
fixed bug where mlogfilter fast_forward wasn't working anymore.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,22 +1,17 @@ import os from setuptools import setup -# Function for reading readme. -def read(fname): - return open(os.path.join(os.path.dirname(__file__), fname)).read() - setup( name = "pytesseract", - version = "0.1", + version = "0.1.3", author = "Samuel Hoffstaetter", author_email="", maintainer = "Matthias Lee", maintainer_email = "[email protected]", - description = ("Python-tesseract is a python wrapper for google's Tesseract-OCR "), + description = ("Python-tesseract is a python wrapper for google's Tesseract-OCR"), license = "GPLv3", keywords = "python-tesseract OCR Python", url = "https://github.com/madmaze/python-tesseract", packages=['pytesseract'], - long_description=read('README.md'), package_data = {'pytesseract': ['*.png','*.jpg']} )
removed reading of readme.md from setup.py as it caused issues when installing through pip
py
diff --git a/tests/test_toolbox_network_tools.py b/tests/test_toolbox_network_tools.py index <HASH>..<HASH> 100644 --- a/tests/test_toolbox_network_tools.py +++ b/tests/test_toolbox_network_tools.py @@ -37,6 +37,18 @@ class NetworkToolsTest(unittest.TestCase): time.sleep(.4) #os.remove(fname) + def test_03_download_failed_file(self): + fname = os.getcwd() + os.sep + 'no_such_file.txt' + url = 'http://no_such_file_or_domain.blah/some_file_that_doesnt_exist.mp8' + mod_net.download_file_no_logon(url, fname) + self.assertEqual(os.path.isfile(fname), False) + + # make sure error is logged + with open('result.log', 'r') as f: + lg_data = f.read() + self.assertTrue('Error - cant download http://no_such_file_or_domain.blah/some_file_that_doesnt_exist.mp8' in lg_data) + + def test_04_read_username_password(self): pass with open('dummy_credentials.txt', 'w') as f:
test to ensure network_tools logs failed downloads correctly
py
diff --git a/src/feat/agents/host/host_agent.py b/src/feat/agents/host/host_agent.py index <HASH>..<HASH> 100644 --- a/src/feat/agents/host/host_agent.py +++ b/src/feat/agents/host/host_agent.py @@ -56,6 +56,7 @@ class HostAgent(agent.BaseAgent, rpc.AgentMixin): ports = state.medium.get_descriptor().port_range state.port_allocator = port_allocator.PortAllocator(self, ports) + f = fiber.Fiber() f.add_callback(fiber.drop_result, self._update_hostname) f.add_callback(fiber.drop_result, self._load_definition, hostdef) @@ -268,7 +269,7 @@ class Descriptor(descriptor.Descriptor): # Hostname of the machine, updated when an agent is started document.field('hostname', None) # Range used for allocating new ports - document.field('port_range', (5000, 5999)) + document.field('port_range', range(5000, 5999)) class StartAgentReplier(replier.BaseReplier):
Add port allocator in Host Agent
py
diff --git a/autograd/differential_operators.py b/autograd/differential_operators.py index <HASH>..<HASH> 100644 --- a/autograd/differential_operators.py +++ b/autograd/differential_operators.py @@ -24,7 +24,7 @@ def grad(fun, x): vjp, ans = _make_vjp(fun, x) if not vspace(ans).size == 1: raise TypeError("Grad only applies to real scalar-output functions. " - "Try jacobian or elementwise_grad.") + "Try jacobian, elementwise_grad or holomorphic_grad.") return vjp(vspace(ans).ones()) @unary_to_nary @@ -127,6 +127,10 @@ def value_and_grad(fun, x): """Returns a function that returns both value and gradient. Suitable for use in scipy.optimize""" vjp, ans = _make_vjp(fun, x) + if not vspace(ans).size == 1: + raise TypeError("value_and_grad only applies to real scalar-output " + "functions. Try jacobian, elementwise_grad or " + "holomorphic_grad.") return ans, vjp(vspace(ans).ones()) def grad_and_aux(fun, argnum=0):
Added check to value_and_grad to ensure that the function's output is real-valued and scalar
py
diff --git a/werkzeug/utils.py b/werkzeug/utils.py index <HASH>..<HASH> 100644 --- a/werkzeug/utils.py +++ b/werkzeug/utils.py @@ -352,7 +352,6 @@ def redirect(location, code=302): :param location: the location the response should redirect to. :param code: the redirect status code. defaults to 302. """ - assert code in (201, 301, 302, 303, 305, 307), 'invalid code' from werkzeug.wrappers import BaseResponse display_location = location if isinstance(location, unicode):
Removed assertion for the status codes on redirects for free HTTP abusing. This fixes #<I>
py
diff --git a/qpsphere/models/__init__.py b/qpsphere/models/__init__.py index <HASH>..<HASH> 100644 --- a/qpsphere/models/__init__.py +++ b/qpsphere/models/__init__.py @@ -34,8 +34,8 @@ def simulate(radius=5e-6, sphere_index=1.339, medium_index=1.333, Sphere model to use (see `available`) pixel_size: float or None Pixel size [m]; if set to `None` the pixel size is - chosen such that the radius fits at least 3.5 times - into the grid. + chosen such that the radius fits at least three to + four times into the grid. center: tuple of floats or None Center position in image coordinates [px]; if set to None, the center of the image (grid_size - 1)/2 is @@ -47,7 +47,14 @@ def simulate(radius=5e-6, sphere_index=1.339, medium_index=1.333, Quantitative phase data set """ if pixel_size is None: - pixel_size = 3.5 * radius / np.min(grid_size) + rl = radius / wavelength + if rl < 5: + fact = 4 + elif rl >=5 and rl <=10: + fact = 4 - (rl - 5) / 5 + else: + fact = 3 + pixel_size = fact * radius / np.min(grid_size) if center is None: center = (np.array(grid_size) - 1) / 2
dynamic auto pixel size depending on expected degree of diffraction
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -24,7 +24,6 @@ setup( name='precisionmapper', packages=find_packages(), package_data={}, - version=__version__, license="MIT license", platforms='Posix; MacOS X', @@ -33,7 +32,7 @@ setup( author="Thibault Ducret", author_email='[email protected]', url=_GITHUB_URL, - download_url='%s/tarball/%s' % (_GITHUB_URL, __version__), + download_url=_GITHUB_URL+"/tarball/"+__version__, keywords=_KEYWORDS, setup_requires=requirements, install_requires=requirements, @@ -52,3 +51,7 @@ setup( # (or git commit --am "Comment" and git push) # git tag 0.0.1 -m "First version" # git push --tags + +# If you need to delete a tag +# git push --delete origin VERSION +# git tag -d VERSION
Another fix for the deploy problem on Travis
py
diff --git a/smart_open/smart_open_lib.py b/smart_open/smart_open_lib.py index <HASH>..<HASH> 100644 --- a/smart_open/smart_open_lib.py +++ b/smart_open/smart_open_lib.py @@ -668,7 +668,7 @@ def _compression_wrapper(file_obj, filename, mode): if _need_to_buffer(file_obj, mode, ext): warnings.warn('streaming gzip support unavailable, see %s' % _ISSUE_189_URL) file_obj = io.BytesIO(file_obj.read()) - if ext in COMPRESSED_EXT and mode.endswith('+'): + if ext in _COMPRESSOR_REGISTRY and mode.endswith('+'): raise ValueError('transparent (de)compression unsupported for mode %r' % mode) try:
fix post-merge artifact
py
diff --git a/spyder/plugins/completion/kite/plugin.py b/spyder/plugins/completion/kite/plugin.py index <HASH>..<HASH> 100644 --- a/spyder/plugins/completion/kite/plugin.py +++ b/spyder/plugins/completion/kite/plugin.py @@ -9,6 +9,7 @@ # Standard library imports import logging import functools +import os.path as osp # Qt imports from qtpy.QtCore import Slot @@ -156,11 +157,12 @@ class KiteCompletionPlugin(SpyderCompletionPlugin): box.set_checked(False) box.set_check_visible(True) box.setText( - _("Seems like your Kite installation is faulty. " - "If you want to use Kite, please remove all files " - "related to Kite at the following path, " + _("It seems like your Kite installation is faulty. " + "If you want to use Kite, please remove the " + "directory that appears bellow, " "and try a reinstallation:<br><br>" - "<code>{path}</code>".format(path=path))) + "<code>{kite_dir}</code>".format( + kite_dir=osp.dirname(path)))) box.exec_()
Kite: Update message to point directory of the faulty installation
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -18,6 +18,7 @@ requires = [ 'async_timeout~=3.0', 'attrs>=18.0', 'etcd3~=0.8.0', + 'protobuf==3.5.2.post1', # remove if kragniz/python-etcd3#432 is released 'msgpack~=0.5.6', 'python-json-logger', ]
setup: Temporarily add explicit protobuf dependency until next etcd3 release
py
diff --git a/salt/modules/rh_service.py b/salt/modules/rh_service.py index <HASH>..<HASH> 100644 --- a/salt/modules/rh_service.py +++ b/salt/modules/rh_service.py @@ -48,6 +48,10 @@ def __virtual__(): Only work on select distros which still use Red Hat's /usr/bin/service for management of either sysvinit or a hybrid sysvinit/upstart init system. ''' + # Disable when booted with systemd + if __utils__['systemd.booted'](__context__): + return (False, 'The rh_service execution module failed to load: this system was booted with systemd.') + # Enable on these platforms only. enable = set(( 'XenServer',
Do not load rh_service module when booted with systemd Amazon continues to release versions of Amazon Linux 1, which boots with rh_service, but also Amazon linux 2, and both with the same versioning scheme, so we cannot depend on the date for making decisions here.
py
diff --git a/basil/utils/utils.py b/basil/utils/utils.py index <HASH>..<HASH> 100644 --- a/basil/utils/utils.py +++ b/basil/utils/utils.py @@ -23,7 +23,7 @@ def logging(fn): def bitvector_to_byte_array(bitvector): bsize = len(bitvector) - size_bytes = ((bsize - 1) / 8) + 1 + size_bytes = int(((bsize - 1) / 8) + 1) bs = array.array('B', bitvector.vector.tostring())[0:size_bytes] bitstream_swap = '' lsbits = lambda b: (b * 0x0202020202 & 0x010884422010) % 1023
MAINT: force int under python 3
py
diff --git a/src/websockets/version.py b/src/websockets/version.py index <HASH>..<HASH> 100644 --- a/src/websockets/version.py +++ b/src/websockets/version.py @@ -42,7 +42,7 @@ if not released: # pragma: no cover check=True, text=True, ).stdout.strip() - except subprocess.CalledProcessError: + except (subprocess.CalledProcessError, FileNotFoundError): pass else: description_re = r"[0-9.]+-([0-9]+)-(g[0-9a-f]{7}(?:-dirty)?)"
Update version.py Allow version check to ignore `git` if the command is not available on the path.
py
diff --git a/perception/phoxi_sensor.py b/perception/phoxi_sensor.py index <HASH>..<HASH> 100644 --- a/perception/phoxi_sensor.py +++ b/perception/phoxi_sensor.py @@ -207,6 +207,7 @@ class PhoXiSensor(CameraSensor): """Callback for handling textures (greyscale images). """ data = self._bridge.imgmsg_to_cv2(msg) + data = 255.0 * data / 1200.0 # Experimentally set value for white data = np.clip(data, 0., 255.0).astype(np.uint8) gsimage = GrayscaleImage(data, frame=self._frame) self._cur_color_im = gsimage.to_color()
Perform proper color rescaling on phoxi
py
diff --git a/sigal/utils.py b/sigal/utils.py index <HASH>..<HASH> 100644 --- a/sigal/utils.py +++ b/sigal/utils.py @@ -73,7 +73,8 @@ def read_markdown(filename): with codecs.open(filename, 'r', 'utf-8-sig') as f: text = f.read() - md = Markdown(extensions=['meta'], output_format='html5') + md = Markdown(extensions=['markdown.extensions.meta'], + output_format='html5') output = {'description': md.convert(text)} try:
Update markdown extension name. With Python-Markdown <I>, “shortened” extension names are deprecated.
py
diff --git a/aiomysql/utils.py b/aiomysql/utils.py index <HASH>..<HASH> 100644 --- a/aiomysql/utils.py +++ b/aiomysql/utils.py @@ -99,6 +99,20 @@ class _SAConnectionContextManager(_ContextManager): return result +class _TransactionContextManager(_ContextManager): + + if PY_35: # pragma: no branch + + @asyncio.coroutine + def __aexit__(self, exc_type, exc, tb): + if exc_type: + yield from self._obj.rollback() + else: + if self._obj.is_active: + yield from self._obj.commit() + self._obj = None + + class _PoolAcquireContextManager(_ContextManager): __slots__ = ('_coro', '_conn', '_pool')
add async transaction context manager
py
diff --git a/falafel/__init__.py b/falafel/__init__.py index <HASH>..<HASH> 100644 --- a/falafel/__init__.py +++ b/falafel/__init__.py @@ -1,5 +1,6 @@ import os from .core import LogFileOutput, Mapper, IniConfigFile, LegacyItemAccess # noqa: F401 +from .core import FileListing # noqa: F401 from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401 from .mappers import get_active_lines # noqa: F401 from .util import defaults, parse_table # noqa: F401
Add FileListing to objects we export
py
diff --git a/polyaxon_client/tracking/contrib/logging_tensor_hook.py b/polyaxon_client/tracking/contrib/logging_tensor_hook.py index <HASH>..<HASH> 100644 --- a/polyaxon_client/tracking/contrib/logging_tensor_hook.py +++ b/polyaxon_client/tracking/contrib/logging_tensor_hook.py @@ -20,7 +20,7 @@ class PolyaxonLoggingTensorHook(tf.train.LoggingTensorHook): self.tensors_dict = tensors_dict.copy() def _log_tensors(self, tensor_values): - super(PolyaxonMetrics, self)._log_tensors(tensor_values) + super(PolyaxonLoggingTensorHook, self)._log_tensors(tensor_values) if settings.IN_CLUSTER: for k in self.tensors_dict.keys(): self.tensors_dict[k] = tensor_values[k]
Update logging_tensor_hook.py
py
diff --git a/grimoire/elk/elastic.py b/grimoire/elk/elastic.py index <HASH>..<HASH> 100644 --- a/grimoire/elk/elastic.py +++ b/grimoire/elk/elastic.py @@ -28,6 +28,7 @@ from dateutil import parser import json import logging import requests +import time class ElasticConnectException(Exception): message = "Can't connect to ElasticSearch" @@ -68,6 +69,8 @@ class ElasticSearch(object): max_items = self.max_items_bulk current = 0 + total = 0 # total items added with bulk + total_search = 0 # total items found with search bulk_json = "" url = self.index_url+'/'+es_type+'/_bulk' @@ -78,13 +81,20 @@ class ElasticSearch(object): if current >= max_items: requests.put(url, data=bulk_json) bulk_json = "" + total += current current = 0 data_json = json.dumps(item) bulk_json += '{"index" : {"_id" : "%s" } }\n' % (item[field_id]) bulk_json += data_json +"\n" # Bulk document current += 1 - requests.put(url, data=bulk_json) + total += current + + # Wait until in searches all items are returned + while total_search != total: + time.sleep(0.1) + r = requests.get(self.index_url+'/'+es_type+'/_search?size=1') + total_search = r.json()['hits']['total'] def create_mapping(self, mappings):
Wait after a bulk operation until all items are returned in searches.
py
diff --git a/timepiece/forms.py b/timepiece/forms.py index <HASH>..<HASH> 100644 --- a/timepiece/forms.py +++ b/timepiece/forms.py @@ -54,7 +54,6 @@ class EditPersonForm(auth_forms.UserChangeForm): instance = super(EditPersonForm, self).save(*args, **kwargs) password_one = self.cleaned_data.get('password_one', None) if password_one: - print 'tests' instance.set_password(password_one) if commit: instance.save()
removed print --HG-- branch : feature/no-crm
py
diff --git a/openquakeserver/engine/v1/calc_urls.py b/openquakeserver/engine/v1/calc_urls.py index <HASH>..<HASH> 100644 --- a/openquakeserver/engine/v1/calc_urls.py +++ b/openquakeserver/engine/v1/calc_urls.py @@ -1,7 +1,7 @@ from django.conf.urls.defaults import patterns from django.conf.urls.defaults import url -# each url is prefixed with /calc/ +# each url is prefixed with /v1/calc/ urlpatterns = patterns( 'openquakeserver.engine.views', url(r'^hazard$', 'calc_hazard'),
engine/v1/calc_urls: Corrected a comment.
py
diff --git a/ca/django_ca/admin.py b/ca/django_ca/admin.py index <HASH>..<HASH> 100644 --- a/ca/django_ca/admin.py +++ b/ca/django_ca/admin.py @@ -127,7 +127,6 @@ class CertificateAuthorityAdmin(CertificateMixin, admin.ModelAdmin): 'fields': [ 'authorityInfoAccess', 'authorityKeyIdentifier', - 'issuerAltName', 'nameConstraints', 'subjectKeyIdentifier', ],
remove issuerAltName (cannot by set by this software
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -20,7 +20,10 @@ if sys.version_info[:2] <= (2, 6): "Python version." ) -dependencies = ['ordered-set'] +if sys.version_info[:2] <= (3, 5): + dependencies = ['ordered-set<4.0.0'] +else: + dependencies = ['ordered-set'] extras = { 'docs': ['sphinx'],
Use old version of ordered-set for old versions of python
py
diff --git a/grimoire/elk/bugzilla.py b/grimoire/elk/bugzilla.py index <HASH>..<HASH> 100644 --- a/grimoire/elk/bugzilla.py +++ b/grimoire/elk/bugzilla.py @@ -274,7 +274,7 @@ class BugzillaEnrich(Enrich): def issues_to_es(self, items): - elastic_type = "issues" + elastic_type = "items" max_items = self.elastic.max_items_bulk current = 0
[enrich][bugzilla] Use items as the type for the ES index
py
diff --git a/xmantissa/signup.py b/xmantissa/signup.py index <HASH>..<HASH> 100644 --- a/xmantissa/signup.py +++ b/xmantissa/signup.py @@ -48,7 +48,7 @@ class TicketClaimer(Page): res = IResource(something) lgo = getattr(res, 'logout', lambda : None) ISession(ctx).setDefaultResource(res, lgo) - return URL.fromContext(ctx).click("/") + return URL.fromContext(ctx).click("/private") return None
redirect to /private after ticket claiming, fixes #<I>
py
diff --git a/mbuild/compound.py b/mbuild/compound.py index <HASH>..<HASH> 100755 --- a/mbuild/compound.py +++ b/mbuild/compound.py @@ -54,15 +54,7 @@ def load(filename, relative_to_module=None, compound=None, coords_only=False, rigid : bool, optional, default=False Treat the compound as a rigid body **kwargs : keyword arguments - Arbitrary keyword arguments. - - Args: - **ref_distance : float, default=1.0 - Reference distance for conversion to reduced units - **ref_mass : float, default=1.0 - Reference mass for conversion to reduced units - **ref_energy : float, default=1.0 - Reference energy for conversion to reduced units + Key word arguments passed to mdTraj for loading. Returns -------
Removed improper **kwarg docstring Incorrect description of arguments that could be passed to load function.
py
diff --git a/openquake/risklib/scientific.py b/openquake/risklib/scientific.py index <HASH>..<HASH> 100644 --- a/openquake/risklib/scientific.py +++ b/openquake/risklib/scientific.py @@ -243,6 +243,7 @@ class VulnerabilityFunction(object): for a in range(len(values)): losses[a] *= ratios elif self.distribution_name == 'BT': + assert rng, 'ignore_covs cannot be zero with the beta distribution' stddevs = means * covs alpha = _alpha(means, stddevs) beta = _beta(means, stddevs)
Added a sanity check [ci skip]
py
diff --git a/qiskit/circuit/library/standard_gates/swap.py b/qiskit/circuit/library/standard_gates/swap.py index <HASH>..<HASH> 100644 --- a/qiskit/circuit/library/standard_gates/swap.py +++ b/qiskit/circuit/library/standard_gates/swap.py @@ -104,7 +104,7 @@ class SwapGate(Gate): class CSwapGate(ControlledGate): - r"""Controlled-X gate. + r"""Controlled-SWAP gate, also known as the Fredkin gate. **Circuit symbol:**
Typo fix of controlled SWAP gate documentation (#<I>) * Update for typo fix of controlled swap gate In response to Issue #<I> * To keep consistency.
py
diff --git a/flask_moment.py b/flask_moment.py index <HASH>..<HASH> 100644 --- a/flask_moment.py +++ b/flask_moment.py @@ -7,11 +7,7 @@ class _moment(object): @staticmethod def include_moment(version = '2.5.1'): if version is not None: - if request.is_secure: - scheme = 'https' - else: - scheme = 'http' - js = '<script src="%s://cdnjs.cloudflare.com/ajax/libs/moment.js/%s/moment-with-langs.min.js"></script>\n' % (scheme, version) + js = '<script src="//cdnjs.cloudflare.com/ajax/libs/moment.js/%s/moment-with-langs.min.js"></script>\n' % version return Markup('''%s<script> function flask_moment_render(elem) { $(elem).text(eval('moment("' + $(elem).data('timestamp') + '").' + $(elem).data('format') + ';'));
fixed https support (is_secure() is not reliable in some scenarios, like when it's behind an ssl terminating proxy)
py
diff --git a/tilequeue/command.py b/tilequeue/command.py index <HASH>..<HASH> 100755 --- a/tilequeue/command.py +++ b/tilequeue/command.py @@ -2134,7 +2134,7 @@ def tilequeue_meta_tile(cfg, args): coord_data = [dict(coord=x) for x in pyramid_coords] try: - fetched_coord_data = data_fetcher.fetch_tiles(coord_data) + fetched_coord_data = list(data_fetcher.fetch_tiles(coord_data)) except Exception as e: meta_tile_logger.pyramid_fetch_failed(e, parent, job_coord) continue
Realize fetch_tiles generator call immediately In order to catch fetch errors, the call needs to be realized immediately. We know that all coordinates here will be a part of the same zoom <I> parent so realizing the list shouldn't impact memory usage.
py
diff --git a/udiskie/mount.py b/udiskie/mount.py index <HASH>..<HASH> 100644 --- a/udiskie/mount.py +++ b/udiskie/mount.py @@ -26,6 +26,9 @@ class AutoMounter: self.bus.add_signal_receiver(self.device_added, signal_name='DeviceAdded', bus_name='org.freedesktop.UDisks') + self.bus.add_signal_receiver(self.device_removed, + signal_name='DeviceRemoved', + bus_name='org.freedesktop.UDisks') self.bus.add_signal_receiver(self.device_changed, signal_name='DeviceChanged', bus_name='org.freedesktop.UDisks') @@ -48,8 +51,16 @@ class AutoMounter: def device_added(self, device): self.log.debug('device added: %s' % (device,)) + # Since the device just appeared we don't want the old state. + if device in self.last_device_state: + del self.last_device_state[device] self._mount_device(udiskie.device.Device(self.bus, device)) + def device_removed(self, device): + self.log.debug('device removed: %s' % (device,)) + if device in self.last_device_state: + del self.last_device_state[device] + def device_changed(self, device): self.log.debug('device changed: %s' % (device,)) last_state = self.last_device_state.get(device)
More complete handling of the last state storage. refs issue 1
py
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -162,7 +162,7 @@ html_split_index = True html_show_sourcelink = True html_sourcelink_suffix = ".rst" # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -html_show_sphinx = True +html_show_sphinx = False # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. html_show_copyright = True
docs/conf.py: disable html footer showing sphinx and theme.
py
diff --git a/jp_proxy_widget/downloader.py b/jp_proxy_widget/downloader.py index <HASH>..<HASH> 100644 --- a/jp_proxy_widget/downloader.py +++ b/jp_proxy_widget/downloader.py @@ -12,7 +12,7 @@ js_file = "js/Filesaver.js" js_helper = """ // function(element) { - debugger; + // debugger; element.download = function(file_name, value, type) { // value can be string or Uint8Array var blob = new Blob([value], {type: type}); @@ -24,7 +24,7 @@ js_helper = """ def load_file_saver(to_proxy_widget, sleep=0.1): w = to_proxy_widget if not hasattr(to_proxy_widget, "saveAs_loaded"): - w.load_js_module("saveAs", js_file) + w.require_js("saveAs", js_file) w.js_init(js_helper) w.flush() # sleep a little bit to allow javascript interpreter to sync
renamed method, now require_js
py
diff --git a/proso_flashcards/management/commands/load_flashcards.py b/proso_flashcards/management/commands/load_flashcards.py index <HASH>..<HASH> 100644 --- a/proso_flashcards/management/commands/load_flashcards.py +++ b/proso_flashcards/management/commands/load_flashcards.py @@ -39,14 +39,14 @@ class Command(BaseCommand): def handle(self, *args, **options): with open(os.path.join(os.path.dirname(os.path.realpath(__file__)), "schema.json"), "r") as schema_file: - schema = json.load(schema_file, 'utf-8') + schema = json.load(schema_file) if len(args) < 1: raise CommandError( "Not enough arguments. One argument required: " + " <file> JSON file containing questions") with open(args[0], 'r') as json_file: with transaction.atomic(): - data = json.load(json_file, 'utf-8') + data = json.load(json_file) validate(data, schema) if "categories" in data: self._load_categories(data["categories"])
drop encoding from loading of json file
py
diff --git a/gdown/cli.py b/gdown/cli.py index <HASH>..<HASH> 100644 --- a/gdown/cli.py +++ b/gdown/cli.py @@ -60,7 +60,7 @@ def main(): parser.add_argument( "url_or_id", help="url or file/folder id (with --id) to download from" ) - parser.add_argument("-O", "--output", help="output filename") + parser.add_argument("-O", "--output", help="output file name / path") parser.add_argument( "-q", "--quiet", action="store_true", help="suppress standard output" ) @@ -130,9 +130,11 @@ def main(): if args.folder: download_folder( url=url, + output=args.output, quiet=args.quiet, proxy=args.proxy, speed=args.speed, + use_cookies=not args.no_cookies, ) return
Add support for directory structure in CLI
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -34,6 +34,7 @@ setup( install_requires=REQUIRES, extras_require=EXTRAS, packages=find_packages(exclude=["tests", "tests.*"]), + include_package_data=True, python_requires=">=3.5.3", entry_points={"console_scripts": ["pymysensors = mysensors.cli:cli"]}, keywords=["sensor", "actuator", "IoT", "DYI"],
Fix package data for wheels (#<I>)
py
diff --git a/rage.py b/rage.py index <HASH>..<HASH> 100644 --- a/rage.py +++ b/rage.py @@ -182,7 +182,7 @@ class RegistryKey(object): @property def path(self): - return self._path + return self._path.strip(os.path.sep) @property def key(self): @@ -243,6 +243,16 @@ class RegistryKey(object): for index in xrange(subkeys): yield self._enum_key(index) + def get_parent_key(self): + path = self.path + + try: + parent, current = path.rstrip(os.path.sep).rsplit(os.path.sep, 1) + except: + raise ValueError("No parent key.") + + return RegistryKey(key=parent) + def _parse_key(self, key, subkey): if isinstance(key, RegistryKey): return key.key, subkey @@ -290,4 +300,4 @@ if __name__ == '__main__': for name, value in key.values: print name, value - key.add_subkey("Tamir") \ No newline at end of file + print key["Tamir"].get_parent_key() \ No newline at end of file
Added option to get parent key. closes #2
py
diff --git a/openquake/calculators/classical.py b/openquake/calculators/classical.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/classical.py +++ b/openquake/calculators/classical.py @@ -158,7 +158,7 @@ class PSHACalculator(base.HazardCalculator): oq = self.oqparam opt = self.oqparam.optimize_same_id_sources num_tiles = math.ceil(len(self.sitecol) / oq.sites_per_tile) - tasks_per_tile = math.ceil(oq.concurrent_tasks / math.sqrt(num_tiles)) + tasks_per_tile = math.ceil(oq.concurrent_tasks / num_tiles) if num_tiles > 1: tiles = self.sitecol.split_in_tiles(num_tiles) else:
Reduced tasks per tile [skip hazardlib] Former-commit-id: <I>a<I>f8bbdf8e9e7de<I>e<I>d<I>f<I>d1eaca
py
diff --git a/geoviews/plotting/bokeh/plot.py b/geoviews/plotting/bokeh/plot.py index <HASH>..<HASH> 100644 --- a/geoviews/plotting/bokeh/plot.py +++ b/geoviews/plotting/bokeh/plot.py @@ -65,7 +65,7 @@ class GeoPlot(ProjectionPlot, ElementPlot): self.show_frame = False show_bounds = self._traverse_options(element, 'plot', ['show_bounds'], defaults=False) - self.show_bounds = not any(not sb for sb in show_bounds['show_bounds']) + self.show_bounds = not any(not sb for sb in show_bounds.get('show_bounds', [])) if self.show_grid: param.main.warning( 'Grid lines do not reflect {0}; to do so '
Fixed show_bounds issue with latest HoloViews (#<I>)
py
diff --git a/salt/proxy/philips_hue.py b/salt/proxy/philips_hue.py index <HASH>..<HASH> 100644 --- a/salt/proxy/philips_hue.py +++ b/salt/proxy/philips_hue.py @@ -99,7 +99,7 @@ def shutdown(opts, *args, **kw): return True -def _set(lamp_id, state): +def _set(lamp_id, state, method="state"): ''' Set state to the device by ID. @@ -107,8 +107,8 @@ def _set(lamp_id, state): :param state: :return: ''' - res = json.loads(requests.put(CONFIG['url']+"/lights/" - + str(lamp_id) + "/state", json=state).content) + url = "{0}/lights/{1}".format(CONFIG['url'], lamp_id) + (method and "/{0}".format(method) or '') + res = json.loads(requests.put(url, json=state).content) res = len(res) > 1 and res[-1] or res[0] if res.get('success'): res = {'result': True}
Enhance _set method so it can set more than just lights status
py
diff --git a/dallinger/recruiters.py b/dallinger/recruiters.py index <HASH>..<HASH> 100644 --- a/dallinger/recruiters.py +++ b/dallinger/recruiters.py @@ -487,6 +487,10 @@ def from_config(config): if name is not None: klass = by_name(name) + # Don't use a configured recruiter in replay mode + if config.get('replay', None): + return HotAirRecruiter() + # Special case 1: may run BotRecruiter in any mode (debug or not), # so it trumps everything else: if klass is BotRecruiter:
Don't use a recruiter when in replay mode.
py
diff --git a/py3status/py3.py b/py3status/py3.py index <HASH>..<HASH> 100644 --- a/py3status/py3.py +++ b/py3status/py3.py @@ -857,8 +857,8 @@ class Py3: An Exception is raised if an error occurs """ - # convert the command to sequence if a string - if isinstance(command, basestring): + # convert the non-shell command to sequence if it is a string + if not shell and isinstance(command, basestring): command = shlex.split(command) try: process = Popen(command, stdout=PIPE, stderr=PIPE, close_fds=True,
Do not split shell commands into sequences The modules like `xrandr_rotate` are totally broken, because since `cmd` is a string, py3.command_output() will `shlex.split` it, and the command becomes invalid. The solution is to not split shell commands.
py
diff --git a/bin/chardetect.py b/bin/chardetect.py index <HASH>..<HASH> 100755 --- a/bin/chardetect.py +++ b/bin/chardetect.py @@ -32,7 +32,7 @@ def description_of(path): def main(): for path in argv[1:]: - print description_of(path) + print(description_of(path)) if __name__ == '__main__':
Minor defect in chardetect.
py
diff --git a/tools/interop_matrix/client_matrix.py b/tools/interop_matrix/client_matrix.py index <HASH>..<HASH> 100644 --- a/tools/interop_matrix/client_matrix.py +++ b/tools/interop_matrix/client_matrix.py @@ -184,6 +184,9 @@ LANG_RELEASE_MATRIX = { { 'v1.14.0': None }, + { + 'v1.15.0': None + }, ], 'python': [ {
Add <I> release of grpc-java
py
diff --git a/tests/featureWriters/featureWriters_test.py b/tests/featureWriters/featureWriters_test.py index <HASH>..<HASH> 100644 --- a/tests/featureWriters/featureWriters_test.py +++ b/tests/featureWriters/featureWriters_test.py @@ -11,10 +11,35 @@ from ufo2ft.featureWriters import ( loadFeatureWriters, loadFeatureWriterFromString, ) + +try: + from plistlib import loads, FMT_XML + def readPlistFromString(s): + return loads(s, fmt=FMT_XML) +except ImportError: + from plistlib import readPlistFromString + import pytest from ..testSupport import _TempModule +TEST_LIB_PLIST = readPlistFromString(""" +<dict> + <key>com.github.googlei18n.ufo2ft.featureWriters</key> + <array> + <dict> + <key>class</key> + <string>KernFeatureWriter</string> + <key>options</key> + <dict> + <key>mode</key> + <string>skip</string> + </dict> + </dict> + </array> +</dict> +""".encode("utf-8")) + class FooBarWriter(BaseFeatureWriter): tableTag = "GSUB" @@ -49,6 +74,7 @@ VALID_SPEC_LISTS = [ "options": {"a": 1}, } ], + TEST_LIB_PLIST[FEATURE_WRITERS_KEY], ]
featureWriters_test: add example plist
py
diff --git a/zsl/interface/celery/worker.py b/zsl/interface/celery/worker.py index <HASH>..<HASH> 100644 --- a/zsl/interface/celery/worker.py +++ b/zsl/interface/celery/worker.py @@ -18,8 +18,8 @@ class CeleryTaskQueueWorkerBase(TaskQueueWorker): # type: (Config) -> None super(CeleryTaskQueueWorkerBase, self).__init__() - self.celery_app = Celery('zsl', backend='rpc', broker='redis://localhost') - self.celery_app.config_from_object(config.get('CELERY_CONFIG')) + self.celery_app = Celery() + self.celery_app.config_from_object(config['CELERY']) def execute_celery_task(self, job_data): job = Job(job_data)
Removed hard coded configuration values I had some settings for Celery hardcoded. I moved them to application settings.
py
diff --git a/exchangelib/folders.py b/exchangelib/folders.py index <HASH>..<HASH> 100644 --- a/exchangelib/folders.py +++ b/exchangelib/folders.py @@ -417,7 +417,7 @@ class Item(EWSElement): ITEM_FIELDS = { 'item_id': ('Id', str), 'changekey': ('ChangeKey', str), - 'mime_content': ('MimeContent', str), + # 'mime_content': ('MimeContent', str), 'sensitivity': ('Sensitivity', Choice), 'importance': ('Importance', Choice), 'is_draft': ('IsDraft', bool), @@ -445,7 +445,7 @@ class Item(EWSElement): # Item fields that are necessary to create an item REQUIRED_FIELDS = {'sensitivity', 'importance', 'reminder_is_set'} # Fields that are read-only in Exchange. Put mime_content here until it's properly supported - READONLY_FIELDS = {'is_draft', 'mime_content'} + READONLY_FIELDS = {'is_draft'} __slots__ = tuple(ITEM_FIELDS) + tuple(EXTRA_ITEM_FIELDS)
Remove mime_type support until we actually find it useful. It's a lot of needless data to fetch by default
py
diff --git a/HARK/distribution.py b/HARK/distribution.py index <HASH>..<HASH> 100644 --- a/HARK/distribution.py +++ b/HARK/distribution.py @@ -1031,9 +1031,9 @@ def calcExpectation(func,values,dstn): temp_shape = value_shape.copy() temp_shape[i] = 1 new_array = np.tile(new_array, temp_shape) - new_array = new_array[:,np.new_axis] # Add dimension for shocks + new_array = new_array[:,np.newaxis] # Add dimension for shocks new_array = np.tile(new_array, shock_tiling_shape) - args_list.append(args_list) + args_list.append(new_array) # Just add a dimension for the shocks else:
Fixed typos, basic evalExpectations works Still need to test alternate input versions.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -11,7 +11,7 @@ from distutils.core import Command from setuptools import setup, find_packages from version import get_git_version -VERSION = get_git_version() +VERSION, SOURCE_LABEL = get_git_version() PROJECT = 'streamcorpus_pipeline' AUTHOR = 'Diffeo, Inc.' AUTHOR_EMAIL = '[email protected]' @@ -110,6 +110,7 @@ setup( version=VERSION, description=DESC, license='MIT/X11 license http://opensource.org/licenses/MIT', + source_label=SOURCE_LABEL, #long_description=read_file('README.md'), long_description="", author=AUTHOR, @@ -143,7 +144,7 @@ setup( 'jellyfish', 'nilsimsa>=0.2', 'pytest', ## required in .rpm - 'pycassa', + 'pycassa', 'chromium_compact_language_detector', 'pytest', 'pytest-capturelog',
Update setup.py to new version.py api
py
diff --git a/rosbag_pandas.py b/rosbag_pandas.py index <HASH>..<HASH> 100755 --- a/rosbag_pandas.py +++ b/rosbag_pandas.py @@ -14,7 +14,7 @@ import rospy from roslib.message import get_message_class -def bag_to_dataframe(bag_name, include=None, exclude=None, parse_header=True): +def bag_to_dataframe(bag_name, include=None, exclude=None, parse_header=False): ''' Read in a rosbag file and create a pandas data frame that is indexed by the time the message was recorded in the bag. @@ -253,5 +253,17 @@ def get_key_name(name): return name +def clean_for_export(name): + for c, t in df.dtypes.iteritems(): + if t.kind in 'OSUV': + df[c] = df[c].apply(func=str) + df[c] = df[c].str.replace('\n', '') + df[c] = df[c].str.replace('\r', '') + df[c] = df[c].str.replace(',','\t') + return df + + + + if __name__ == '__main__': print 'hello'
changed default to not parse headers and added cleaning script for csv export
py
diff --git a/klue_microservice/exceptions.py b/klue_microservice/exceptions.py index <HASH>..<HASH> 100644 --- a/klue_microservice/exceptions.py +++ b/klue_microservice/exceptions.py @@ -11,7 +11,7 @@ from klue.swagger.apipool import ApiPool log = logging.getLogger(__name__) -class KlueMicroServiceException(Exception): +class KlueMicroServiceException(KlueException): code = 'UNKNOWN_EXCEPTION' status = 500 error_id = None @@ -67,6 +67,10 @@ class UnhandledServerError(KlueMicroServiceException): code = 'UNHANDLED_SERVER_ERROR' status = 500 +class InternalServerError(KlueMicroServiceException): + code = 'SERVER_ERROR' + status = 500 + class AuthMissingHeaderError(KlueMicroServiceException): code = 'AUTHORIZATION_HEADER_MISSING' status = 401 @@ -108,6 +112,10 @@ def is_error(o): return False +def report_error(data, msg=None, caught=None, title=None): + return crash.report_error(data, msg=msg, caught=caught, title=title) + + def format_error(e): """Take an exception caught within klue-client-server and turn it into a bravado-core Error instance"""
Added report_error and missing exceptions
py
diff --git a/mysql/toolkit/components/database.py b/mysql/toolkit/components/database.py index <HASH>..<HASH> 100644 --- a/mysql/toolkit/components/database.py +++ b/mysql/toolkit/components/database.py @@ -171,14 +171,14 @@ class Database(DatabaseCopy): if not one_query: self.copy_database_slow(source, destination, optimized) else: - self.copy_tables_onequery(source, destination) + self._copy_tables_onequery(source, destination) def create_database(self, name): """Create a new database.""" statement = "CREATE DATABASE " + wrap(name) + " DEFAULT CHARACTER SET latin1 COLLATE latin1_swedish_ci" return self.execute(statement) - def copy_tables_onequery(self, source, destination, tables=None, primary_keys=True): + def _copy_tables_onequery(self, source, destination, tables=None, primary_keys=True): """Copy all tables in a DB by executing CREATE TABLE, SELECT and INSERT INTO statements all in one query.""" # Change database to source self.change_db(source)
Refactored copy_tables_onequery to a private method
py
diff --git a/discord/message.py b/discord/message.py index <HASH>..<HASH> 100644 --- a/discord/message.py +++ b/discord/message.py @@ -95,9 +95,10 @@ class Attachment: use_cached: :class:`bool` Whether to use :attr:`proxy_url` rather than :attr:`url` when downloading the attachment. This will allow attachments to be saved after deletion - more often, which is generally deleted right after the message is deleted. - Note that this can still fail to download deleted attachments if too much time - has passed. + more often, compared to the regular URL is generally deleted right after + the message is deleted. Note that this can still fail to download + deleted attachments if too much time has passed and it does not work + on some type of attachments. Raises --------
Fix Attachment.save wording nit.
py
diff --git a/tower/management/commands/extract.py b/tower/management/commands/extract.py index <HASH>..<HASH> 100644 --- a/tower/management/commands/extract.py +++ b/tower/management/commands/extract.py @@ -138,6 +138,10 @@ class Command(BaseCommand): dest='outputdir', help='The directory where extracted files will be placed. ' '(Default: %default)'), + make_option('-c', '--create', + action='store_true', dest='create', default=False, + help='Create output-dir if missing'), + ) def handle(self, *args, **options): @@ -145,9 +149,13 @@ class Command(BaseCommand): outputdir = os.path.abspath(options.get('outputdir')) if not os.path.isdir(outputdir): - print ("Output directory must exist (%s). " - "Specify one with --output-dir" % outputdir) - return "FAILURE\n" + if not options.get('create'): + print ("Output directory must exist (%s) unless -c option is " + "given. " + "Specify one with --output-dir" % outputdir) + return "FAILURE\n" + else: + os.makedirs(outputdir) if domains == "all": domains = settings.DOMAIN_METHODS.keys()
Extract now takes a '-c' option '-c' will create the locale/ directory so that extract can do it's thing.
py
diff --git a/umis/umis.py b/umis/umis.py index <HASH>..<HASH> 100644 --- a/umis/umis.py +++ b/umis/umis.py @@ -376,21 +376,6 @@ def umi_histogram(fastq): for bc, count in counter.most_common(): sys.stdout.write('{}\t{}\n'.format(bc, count)) -def cb_filterer(chunk, bc1, bc2): - parser_re = re.compile('(.*):CELL_(?P<CB>.*):UMI_(.*)\\n(.*)\\n\\+\\n(.*)\\n') - kept = [] - for read in chunk: - match = parser_re.search(read).groupdict() - cb1 = match['CB'] - if bc2: - cb1, cb2 = cb1.split("-") - if cb1 not in bc1: - continue - if bc2 and cb2 not in bc2: - continue - kept.append(read) - return kept - def get_cb_depth_set(cb_histogram, cb_cutoff): ''' Returns a set of barcodes with a minimum number of reads '''
Remove unused cb_filterer function. This was moved to barcodes.py and renamed to exact_barcode_filter.
py
diff --git a/spikeextractors/SortingExtractor.py b/spikeextractors/SortingExtractor.py index <HASH>..<HASH> 100644 --- a/spikeextractors/SortingExtractor.py +++ b/spikeextractors/SortingExtractor.py @@ -77,7 +77,7 @@ class SortingExtractor(ABC): if unit_id not in self._unit_features.keys(): self._unit_features[unit_id] = {} if isinstance(feature_name, str) and len(value) == len(self.getUnitSpikeTrain(unit_id)): - self._unit_features[unit_id][feature_name] = value + self._unit_features[unit_id][feature_name] = np.asarray(value) else: if not isinstance(feature_name, str): raise ValueError("feature_name must be a string")
Wrapped value in numpy array for spike features --> easier indexing in future
py
diff --git a/fedora_messaging/api.py b/fedora_messaging/api.py index <HASH>..<HASH> 100644 --- a/fedora_messaging/api.py +++ b/fedora_messaging/api.py @@ -1,8 +1,6 @@ """The API for publishing messages and consuming from message queues.""" -from ._session import ConsumerSession, PublisherSession - -_session = None +from . import _session from .signals import pre_publish_signal, publish_signal, publish_failed_signal from .message import Message @@ -16,6 +14,8 @@ __all__ = ( 'publish_failed_signal', ) +_session_cache = None + def consume(callback, bindings=None): """ @@ -44,7 +44,7 @@ def consume(callback, bindings=None): """ if isinstance(bindings, dict): bindings = [bindings] - session = ConsumerSession() + session = _session.ConsumerSession() session.consume(callback, bindings) @@ -67,7 +67,7 @@ def publish(message): # TODO doc retry behavior, when messages could get double published, etc. """ # TODO make thread-local registry, probably - global _session - if _session is None: - _session = PublisherSession() - _session.publish(message) + global _session_cache + if _session_cache is None: + _session_cache = _session.PublisherSession(exchange=exchange) + _session_cache.publish(message)
Don't expose _session classes as public names in api.py
py
diff --git a/openid/fetchers.py b/openid/fetchers.py index <HASH>..<HASH> 100644 --- a/openid/fetchers.py +++ b/openid/fetchers.py @@ -218,8 +218,10 @@ class Urllib2Fetcher(HTTPFetcher): 'User-Agent', "%s Python-urllib/%s" % (USER_AGENT, urllib.request.__version__)) - req = urllib.request.Request(url, data=bytes(body, encoding="utf-8"), - headers=headers) + if isinstance(body, str): + body = bytes(body, encoding="utf-8") + + req = urllib.request.Request(url, data=body, headers=headers) try: f = self.urlopen(req) try:
Only coerce to bytes if it's str to begin with
py
diff --git a/psiturk/psiturk_shell.py b/psiturk/psiturk_shell.py index <HASH>..<HASH> 100644 --- a/psiturk/psiturk_shell.py +++ b/psiturk/psiturk_shell.py @@ -191,12 +191,10 @@ class PsiturkShell(Cmd, object): for i in range(readline.get_current_history_length()): if readline.get_history_item(i) is not None: self.history.append(readline.get_history_item(i)) - Cmd.preloop(self) def postloop(self): ''' Save history on exit. ''' readline.write_history_file('.psiturk_history') - Cmd.postloop(self) def onecmd_plus_hooks(self, line): ''' Trigger hooks after command. '''
remove call to parent stub functions each of the removed calls were just stub hook functions in the parend module. no need to call them.
py
diff --git a/tests/test_unicorn.py b/tests/test_unicorn.py index <HASH>..<HASH> 100644 --- a/tests/test_unicorn.py +++ b/tests/test_unicorn.py @@ -128,8 +128,8 @@ def _compare_paths(pu, pn): def run_similarity(binpath, depth): b = angr.Project(os.path.join(test_location, binpath)) - s_unicorn = b.factory.entry_state(add_options=so.unicorn, remove_options={so.LAZY_SOLVES}) # unicorn - s_normal = b.factory.entry_state(add_options={so.INITIALIZE_ZERO_REGISTERS}, remove_options={so.LAZY_SOLVES}) # normal + s_unicorn = b.factory.entry_state(add_options=so.unicorn, remove_options={so.LAZY_SOLVES, so.TRACK_MEMORY_MAPPING}) # unicorn + s_normal = b.factory.entry_state(add_options={so.INITIALIZE_ZERO_REGISTERS}, remove_options={so.LAZY_SOLVES, so.TRACK_MEMORY_MAPPING}) # normal p_unicorn = b.factory.path(s_unicorn) p_normal = b.factory.path(s_normal) pg = b.factory.path_group(p_unicorn)
use the new option to disable memory map tracking
py
diff --git a/sphinx_gallery/gen_rst.py b/sphinx_gallery/gen_rst.py index <HASH>..<HASH> 100644 --- a/sphinx_gallery/gen_rst.py +++ b/sphinx_gallery/gen_rst.py @@ -438,14 +438,16 @@ def execute_script(code_block, example_globals, image_path, fig_count, # Depending on whether we have one or more figures, we're using a # horizontal list or a single rst call to 'image'. + image_list = "" if len(figure_list) == 1: figure_name = figure_list[0] image_list = SINGLE_IMAGE % figure_name.lstrip('/') - else: + elif len(figure_list) > 1: image_list = HLIST_HEADER for figure_name in figure_list: image_list += HLIST_IMAGE_TEMPLATE % figure_name.lstrip('/') + except Exception: figure_list = [] image_list = '%s is not compiling:' % src_file
Don't put CSS horizontal when there are no images If the code outputs no images the is no good reason to put the multiple images list CSS environment
py
diff --git a/ayrton/tests/test_ayrton.py b/ayrton/tests/test_ayrton.py index <HASH>..<HASH> 100644 --- a/ayrton/tests/test_ayrton.py +++ b/ayrton/tests/test_ayrton.py @@ -106,6 +106,39 @@ class Bash(unittest.TestCase): self.assertEqual (bash ('~', single=True), os.environ['HOME']) +class Argv (unittest.TestCase): + + def testEmpty (self): + self.assertRaises (ValueError, ayrton.Argv, []) + + + def testIter (self): + data= ['foo', 'bar', 'baz'] + argv= ayrton.Argv (data) + + args= list (iter (argv)) + + self.assertEqual (args, data[1:]) + + + def testLen (self): + data= ['foo', 'bar', 'baz'] + argv= ayrton.Argv (data) + + l= len (argv) + + self.assertEqual (l, len (data)-1) + + + def testPopFirst (self): + data= ['foo', 'bar', 'baz'] + argv= ayrton.Argv (data) + + i= argv.pop () + + self.assertEqual (i, data[1]) + + class ScriptExecution (unittest.TestCase): def setUp (self):
[+] tests for Argv, improve coverage.
py
diff --git a/test/test_api/test_text.py b/test/test_api/test_text.py index <HASH>..<HASH> 100644 --- a/test/test_api/test_text.py +++ b/test/test_api/test_text.py @@ -143,7 +143,7 @@ def test_save_file(editor): assert editor.file.encoding == 'latin-1' os.remove('tmp.py') editor.file.open(__file__) - with pytest.raises(OSError): + with pytest.raises(IOError): editor.file.save(path='/usr/bin') is False editor.file._path = '' editor.file.save() is False
Tests: fix expected exception type (IOError instead of OSError)
py
diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py index <HASH>..<HASH> 100644 --- a/salt/modules/yumpkg.py +++ b/salt/modules/yumpkg.py @@ -540,7 +540,7 @@ def verify(*package): return ret -def grouplist(): +def group_list(): ''' Lists all groups known by yum on this system @@ -563,7 +563,7 @@ def grouplist(): return ret -def groupinfo(groupname): +def group_info(groupname): ''' Lists packages belonging to a certain group
Multi-word, unspaced function names are ugly
py
diff --git a/HydraLib/python/HydraLib/hydra_dateutil.py b/HydraLib/python/HydraLib/hydra_dateutil.py index <HASH>..<HASH> 100644 --- a/HydraLib/python/HydraLib/hydra_dateutil.py +++ b/HydraLib/python/HydraLib/hydra_dateutil.py @@ -362,7 +362,7 @@ def reindex_timeseries(ts_string, new_timestamps): if set(idx.year) == set([int(seasonal_year)]): if isinstance(new_timestamps, list): seasonal_timestamp = [] - for t in idx: + for t in ts_timestamps: t_1900 = t.replace(year=int(seasonal_year)) seasonal_timestamp.append(t_1900) ts_timestamps = seasonal_timestamp
Fix for reindex timestamps of seasonal timeseries.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -104,7 +104,7 @@ setup(name='km3pipe', cmdclass={'build_ext': build_ext}, include_package_data=True, platforms='any', - setup_requires=['setuptools>=18.0', 'cython', 'numpy'], + setup_requires=['setuptools>=24.3', 'pip>=9.0.1', 'cython', 'numpy'], install_requires=['cython', 'docopt', 'numpy>=1.12', 'pandas', 'pytz', 'six', ], extras_require=require_groups,
PKG bump pip + setuptools (yes I'm that lame)
py
diff --git a/izaber/__init__.py b/izaber/__init__.py index <HASH>..<HASH> 100644 --- a/izaber/__init__.py +++ b/izaber/__init__.py @@ -4,7 +4,7 @@ import imp class IZaberFinder(object): - def find_module(self, module_name, package_path): + def find_module(self, module_name, package_path=None): elements = module_name.split('.') if elements[0] != 'izaber': return @@ -14,14 +14,17 @@ class IZaberFinder(object): # izaber_extensionname # # where extensionname is all alphabetical - elements[1] = "izaber_"+elements[1] - found = None - path = None - for e in elements[1:]: - found = imp.find_module(e,path) - path = [found[1]] - - return IZaberLoader(*found) + try: + elements[1] = "izaber_"+elements[1] + found = None + package_path = None + for e in elements[1:]: + found = imp.find_module(e,package_path) + package_path = [found[1]] + + return IZaberLoader(*found) + except ImportError: + return class IZaberLoader(object):
fix to make sure it does not clash with flask
py
diff --git a/openquake/engine/engine.py b/openquake/engine/engine.py index <HASH>..<HASH> 100644 --- a/openquake/engine/engine.py +++ b/openquake/engine/engine.py @@ -342,7 +342,8 @@ def run_calc(job, log_level, log_file, exports, job_type): if not job_pid: # calculation executor process try: - logs.init_logs_amqp_send(level=log_level, job_id=job.id) + logs.init_logs_amqp_send(level=log_level, calc_domain=job_type, + calc_id=job.calculation.id) # run the job job.is_running = True job.save()
engine: Update call to logs.init_logs_amqp_send; there's a new func signature. Former-commit-id: <I>b<I>d<I>ab0b1eda<I>b0b<I>fc4bcd<I>eaad
py
diff --git a/app_namespace/loader.py b/app_namespace/loader.py index <HASH>..<HASH> 100644 --- a/app_namespace/loader.py +++ b/app_namespace/loader.py @@ -13,6 +13,8 @@ from django.template.base import TemplateDoesNotExist from django.core.exceptions import ImproperlyConfigured from django.utils.datastructures import SortedDict # Deprecated in Django 1.9 +FS_ENCODING = sys.getfilesystemencoding() or sys.getdefaultencoding() + class Loader(BaseLoader): """ @@ -29,9 +31,6 @@ class Loader(BaseLoader): """ app_templates_dirs = SortedDict() for app in settings.INSTALLED_APPS: - if not six.PY3: - fs_encoding = (sys.getfilesystemencoding() or - sys.getdefaultencoding()) try: mod = import_module(app) except ImportError as e: # pragma: no cover @@ -42,7 +41,7 @@ class Loader(BaseLoader): 'templates') if os.path.isdir(templates_dir): if not six.PY3: - templates_dir = templates_dir.decode(fs_encoding) + templates_dir = templates_dir.decode(FS_ENCODING) app_templates_dirs[app] = templates_dir if '.' in app: app_templates_dirs[app.split('.')[-1]] = templates_dir
Compute the filesystem encoding one time at the loading
py
diff --git a/tensor2tensor/utils/bleu_hook.py b/tensor2tensor/utils/bleu_hook.py index <HASH>..<HASH> 100644 --- a/tensor2tensor/utils/bleu_hook.py +++ b/tensor2tensor/utils/bleu_hook.py @@ -115,7 +115,12 @@ def compute_bleu(reference_corpus, if use_bp: ratio = translation_length / reference_length - bp = math.exp(1 - 1. / ratio) if ratio < 1.0 else 1.0 + if ratio <= 0.0: + bp = 0.0 + elif ratio >= 1.0: + bp = 1.0 + else: + bp = math.exp(1 - 1. / ratio) bleu = geo_mean * bp return np.float32(bleu)
Division by zero bug fix. PiperOrigin-RevId: <I>
py
diff --git a/anchore/cli/login.py b/anchore/cli/login.py index <HASH>..<HASH> 100644 --- a/anchore/cli/login.py +++ b/anchore/cli/login.py @@ -18,8 +18,18 @@ def login(anchore_config): ecode = 0 try: - username = raw_input("Username: ") - password = getpass.getpass("Password: ") + if os.getenv('ANCHOREUSER'): + anchore_print("Using user from environment (ANCHOREUSER)") + username = os.getenv('ANCHOREUSER') + else: + username = raw_input("Username: ") + + if os.getenv('ANCHOREPASS'): + anchore_print("Using password from environment (ANCHOREPASS)") + password = os.getenv('ANCHOREPASS') + else: + password = getpass.getpass("Password: ") + aa = contexts['anchore_auth'] new_anchore_auth = anchore_auth.anchore_auth_init(username, password, aa['auth_file'], aa['client_info_url'], aa['token_url'], aa['conn_timeout'], aa['max_retries'])
allow ANCHOREUSER/ANCHOREPASS environment override for login operation
py
diff --git a/hangups/client.py b/hangups/client.py index <HASH>..<HASH> 100644 --- a/hangups/client.py +++ b/hangups/client.py @@ -249,7 +249,8 @@ class Client(object): initial_entities = [] try: entities = schemas.INITIAL_CLIENT_ENTITIES.parse( - data_dict['ds:21'][0] + #data_dict['ds:21'][0] + data_dict['ds:37'][0] ) except ValueError as e: logger.warning('Failed to parse initial client entities: {}'
fixes #<I> by re-mapping ds:<I> to ds:<I>
py
diff --git a/asciimathml.py b/asciimathml.py index <HASH>..<HASH> 100644 --- a/asciimathml.py +++ b/asciimathml.py @@ -26,7 +26,7 @@ def El(tag, text=None, *children, **attrib): element = Element_(tag, **attrib) if not text is None: - if isinstance(text, basestring): + if isinstance(text, str): element.text = AtomicString_(text) else: children = (text, ) + children @@ -571,7 +571,7 @@ if __name__ == '__main__': else: element = Element - print """\ + print("""\ <?xml version="1.0"?> <html xmlns="http://www.w3.org/1999/xhtml"> <head> @@ -579,9 +579,9 @@ if __name__ == '__main__': <title>ASCIIMathML preview</title> </head> <body> -""" - print tostring(parse(' '.join(args), element)) - print """\ +""") + print(tostring(parse(' '.join(args), element))) + print("""\ </body> </html> -""" +""")
Port this to python 3.x.
py
diff --git a/tests/setup_transaction_tests.py b/tests/setup_transaction_tests.py index <HASH>..<HASH> 100644 --- a/tests/setup_transaction_tests.py +++ b/tests/setup_transaction_tests.py @@ -105,7 +105,7 @@ curve_order = 218882428718392752222464057452572750885483644004160343436982041865 @pytest.fixture def get_log(): - def get_log(chain, contract, event_name): + def get_log(tester, contract, event_name): event_ids_w_name = [k for k, v in \ contract.translator.event_data.items() if v["name"] == event_name] assert len(event_ids_w_name) == 1, \ @@ -113,7 +113,7 @@ def get_log(): event_id = event_ids_w_name[0] # Get the last logged event - logs = chain.head_state.receipts[-1].logs[-1] + logs = tester.s.head_state.receipts[-1].logs[-1] # Ensure it has the event we are looking to decode assert logs.address == contract.address, \
Provide tester instead of chain as arg
py
diff --git a/pywb/apps/frontendapp.py b/pywb/apps/frontendapp.py index <HASH>..<HASH> 100644 --- a/pywb/apps/frontendapp.py +++ b/pywb/apps/frontendapp.py @@ -91,8 +91,6 @@ class FrontEndApp(object): self.cdx_api_endpoint = config.get('cdx_api_endpoint', '/cdx') - self._init_routes() - upstream_paths = self.get_upstream_paths(self.warcserver_server.port) framed_replay = config.get('framed_replay', True) @@ -106,6 +104,8 @@ class FrontEndApp(object): metadata_templ = os.path.join(self.warcserver.root_dir, '{coll}', 'metadata.yaml') self.metadata_cache = MetadataCache(metadata_templ) + self._init_routes() + def _init_routes(self): """Initialize the routes and based on the configuration file makes available specific routes (proxy mode, record)""" @@ -500,6 +500,11 @@ class FrontEndApp(object): # store original script_name (original prefix) before modifications are made environ['pywb.app_prefix'] = environ.get('SCRIPT_NAME', '') + lang = args.pop('lang', '') + if lang: + pop_path_info(environ) + environ['pywb_lang'] = lang + response = endpoint(environ, **args) except HTTPException as hte:
routes: make coll route config extendable to support prefix routing for localization ukwa/ukwa-pywb#<I> split init_routes() into init_coll_routes() and make_coll_routes() which retrieves a list of per-collection routes only
py
diff --git a/anyconfig/globals.py b/anyconfig/globals.py index <HASH>..<HASH> 100644 --- a/anyconfig/globals.py +++ b/anyconfig/globals.py @@ -1,5 +1,6 @@ # # Copyright (C) 2013 - 2018 Satoru SATOH <ssato @ redhat.com> +# Copyright (C) 2019 Satoru SATOH <satoru.satoh @ gmail.com> # License: MIT # # pylint: disable=invalid-name
fix: correct the copyright header in .globals
py
diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/manifest_validator/v2/migration.py b/datadog_checks_dev/datadog_checks/dev/tooling/manifest_validator/v2/migration.py index <HASH>..<HASH> 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/manifest_validator/v2/migration.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/manifest_validator/v2/migration.py @@ -63,7 +63,7 @@ V2_TO_V1_MAP = JSONDict( OS_TO_CLASSIFIER_TAGS = { "linux": "Supported OS::Linux", - "mac_os": "Supported OS::Mac OS", + "mac_os": "Supported OS::macOS", "windows": "Supported OS::Windows", }
Fix manifest migration of macOS tag (#<I>)
py
diff --git a/asammdf/blocks/mdf_v4.py b/asammdf/blocks/mdf_v4.py index <HASH>..<HASH> 100644 --- a/asammdf/blocks/mdf_v4.py +++ b/asammdf/blocks/mdf_v4.py @@ -3683,8 +3683,6 @@ class MDF4(object): # for embedded attachments extrat data and create new files if flags & v4c.FLAG_AT_EMBEDDED: data = attachment.extract() - - return data, file_path else: # for external attachments read the file and return the content if flags & v4c.FLAG_AT_MD5_VALID: @@ -3697,7 +3695,6 @@ class MDF4(object): if attachment.mime.startswith("text"): with open(file_path, "r") as f: data = f.read() - return data, file_path else: message = ( f'ATBLOCK md5sum="{attachment["md5_sum"]}" ' @@ -3713,12 +3710,15 @@ class MDF4(object): with open(file_path, mode) as f: file_path = Path(f"FROM_{file_path}") data = f.read() - return data, file_path except Exception as err: os.chdir(current_path) message = "Exception during attachment extraction: " + repr(err) logger.warning(message) - return b"", file_path + data = b"" + finally: + os.chdir(current_path) + + return data, file_path def get( self,
don't change the current working directory
py
diff --git a/master/buildbot/process/build.py b/master/buildbot/process/build.py index <HASH>..<HASH> 100644 --- a/master/buildbot/process/build.py +++ b/master/buildbot/process/build.py @@ -261,8 +261,6 @@ class Build(properties.PropertiesMixin, WorkerAPICompatMixin): ("control", "builds", str(self.buildid), "stop")) - yield self.master.data.updates.generateNewBuildEvent(self.buildid) - self.setupOwnProperties() self.setupWorkerForBuilder(workerforbuilder) @@ -274,9 +272,11 @@ class Build(properties.PropertiesMixin, WorkerAPICompatMixin): metrics.MetricCountEvent.log('active_builds', 1) - yield self.master.data.updates.setBuildStateString(self.buildid, - u'starting') + # make sure properties are available to people listening on 'new' events + yield self._flushProperties(None) self.build_status.buildStarted(self) + yield self.master.data.updates.setBuildStateString(self.buildid, u'starting') + yield self.master.data.updates.generateNewBuildEvent(self.buildid) try: self.setupBuild() # create .steps
Delay sending 'new' build message until properties are populated
py