diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
---|---|---|
diff --git a/doc/conf.py b/doc/conf.py
index <HASH>..<HASH> 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -157,7 +157,8 @@ pygments_style = 'sphinx'
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
-html_theme = 'alabaster'
+# html_theme = 'alabaster'
+html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
|
switch documentation from alabaster to rtd
|
py
|
diff --git a/scrubadub/__init__.py b/scrubadub/__init__.py
index <HASH>..<HASH> 100644
--- a/scrubadub/__init__.py
+++ b/scrubadub/__init__.py
@@ -8,7 +8,7 @@ from . import detectors
from . import post_processors
from .filth import Filth
-__version__ = VERSION = "2.0.0-rc0"
+__version__ = VERSION = "2.0.0.rc0"
__all__ = [
'Scrubber', 'filth', 'detectors', 'post_processors', 'clean', 'clean_documents', 'list_filth',
'list_filth_documents',
|
use python versioning needed for pypi
|
py
|
diff --git a/stravalib/tests/functional/test_client_rate_limiter.py b/stravalib/tests/functional/test_client_rate_limiter.py
index <HASH>..<HASH> 100644
--- a/stravalib/tests/functional/test_client_rate_limiter.py
+++ b/stravalib/tests/functional/test_client_rate_limiter.py
@@ -18,8 +18,8 @@ class ClientDefaultRateLimiterTest(FunctionalTestBase):
# setup 'short' limit for testing
self.client.protocol.rate_limiter.rules = []
self.client.protocol.rate_limiter.rules.append(XRateLimitRule(
- {'short': {'usageFieldIndex': 0, 'usage': 0, 'limit': 600,
- 'time': 5, 'lastExceeded': None}}))
+ {'short': {'usage': 0, 'limit': 600, 'time': 5, 'lastExceeded': None},
+ 'long': {'usage': 0, 'limit': 30000, 'time': 5, 'lastExceeded': None}}))
# interact with api to get the limits
self.client.get_athlete()
|
Added long rate limit to prevent test from breaking
|
py
|
diff --git a/wagtailmenus/settings/base.py b/wagtailmenus/settings/base.py
index <HASH>..<HASH> 100644
--- a/wagtailmenus/settings/base.py
+++ b/wagtailmenus/settings/base.py
@@ -122,7 +122,6 @@ if DJANGO_VERSION >= (2, 0):
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
- 'compressor.finders.CompressorFinder',
)
WAGTAILMENUS_FLAT_MENUS_HANDLE_CHOICES = (
|
Remove unnecessary line from STATICFILES_FINDERS setting
|
py
|
diff --git a/tests/test_views.py b/tests/test_views.py
index <HASH>..<HASH> 100644
--- a/tests/test_views.py
+++ b/tests/test_views.py
@@ -186,9 +186,10 @@ class RenderSetupTests(TestCase):
def test_generate_data_is_called_once_with_fields_and_queryset(self):
self.mixin.generate_data = mock.MagicMock()
- self.mixin.render_excel_response(queryset='test', fields=self.fields)
- self.mixin.generate_data.assert_called_once_with(queryset='test',
- fields=self.fields)
+ qs = MockModel.objects.all()
+ self.mixin.render_excel_response(queryset=qs, fields=self.fields)
+ self.mixin.generate_data.assert_called_once_with(queryset=qs,
+ fields=self.fields)
def test_if_no_headers_passed_generate_headers_called(self):
self.mixin.render_excel_response(fields=self.fields)
|
Pass a real QuerySet instead of a fake one to render_excel_response. It needs something that it can call .model() on.
|
py
|
diff --git a/coaster/sqlalchemy/statemanager.py b/coaster/sqlalchemy/statemanager.py
index <HASH>..<HASH> 100644
--- a/coaster/sqlalchemy/statemanager.py
+++ b/coaster/sqlalchemy/statemanager.py
@@ -196,6 +196,7 @@ over direct state value changes:
from __future__ import absolute_import
+import collections
from collections import OrderedDict
import functools
from sqlalchemy import and_, or_, column as column_constructor, CheckConstraint
@@ -207,7 +208,7 @@ from .roles import RoleMixin
__all__ = ['StateManager', 'StateTransitionError',
'transition_error', 'transition_before', 'transition_after', 'transition_exception']
-iterables = (set, frozenset, list, tuple) # Used for various isinstance checks
+iterables = (collections.Sequence, collections.Set) # Used for various isinstance checks
# --- Signals -----------------------------------------------------------------
|
Use ABCs for future proofing against set-like and list-like objects
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -48,7 +48,7 @@ setup(
# copyright
author = 'Andrey Mikhaylenko',
- author_email = '[email protected]',
+ author_email = '[email protected]',
license = 'GNU Lesser General Public License (LGPL), Version 3',
# more info
|
Update author's e-mail.
|
py
|
diff --git a/menu_launcher.py b/menu_launcher.py
index <HASH>..<HASH> 100755
--- a/menu_launcher.py
+++ b/menu_launcher.py
@@ -14,12 +14,13 @@ try:
screen = curses.initscr()
screen.keypad(1)
curses.noecho()
+ h = curses.A_BOLD
+ n = curses.A_NORMAL
# Check if terminal can support color
if curses.has_colors():
curses.start_color()
curses.init_pair(1,curses.COLOR_BLACK, curses.COLOR_WHITE)
h = curses.color_pair(1)
- n = curses.A_NORMAL
except Exception as e:
pass
|
h now has a default value for terminals that don't support color.
|
py
|
diff --git a/sos/plugins/kimchi.py b/sos/plugins/kimchi.py
index <HASH>..<HASH> 100644
--- a/sos/plugins/kimchi.py
+++ b/sos/plugins/kimchi.py
@@ -19,7 +19,6 @@ class Kimchi(Plugin, RedHatPlugin, UbuntuPlugin, DebianPlugin):
packages = ('kimchi',)
def setup(self):
- log_limit = self.get_option('log_size')
self.add_copy_spec('/etc/kimchi/')
if not self.get_option('all_logs'):
self.add_copy_spec('/var/log/kimchi/*.log')
|
[kimchi] Remove unused log-size check Removes a check against the log-size option that is not subsequently used later in the plugin.
|
py
|
diff --git a/centinel/primitives/tls.py b/centinel/primitives/tls.py
index <HASH>..<HASH> 100644
--- a/centinel/primitives/tls.py
+++ b/centinel/primitives/tls.py
@@ -6,7 +6,13 @@ import M2Crypto
def get_fingerprint(host, port):
- cert = ssl.get_server_certificate((host, port))
+ try:
+ cert = ssl.get_server_certificate((host, port))
+ # if this fails, there's a possibility that SSLv3 handshake was
+ # attempted and rejected by the server. Use TLSv1 instead.
+ except ssl.SSLError as exp:
+ cert = ssl.get_server_certificate((host, port), ssl_version=ssl.PROTOCOL_TLSv1)
+
x509 = M2Crypto.X509.load_cert_string(cert, M2Crypto.X509.FORMAT_PEM)
fingerprint = x509.get_fingerprint('sha1')
return fingerprint.lower(), cert
|
added SSLv3 fallback to TLSv1 in TLS primitive, fixes #<I>
|
py
|
diff --git a/moto/core/responses.py b/moto/core/responses.py
index <HASH>..<HASH> 100644
--- a/moto/core/responses.py
+++ b/moto/core/responses.py
@@ -20,7 +20,6 @@ import six
from six.moves.urllib.parse import parse_qs, urlparse
import xmltodict
-from pkg_resources import resource_filename
from werkzeug.exceptions import HTTPException
import boto3
@@ -766,6 +765,9 @@ class AWSServiceSpec(object):
"""
def __init__(self, path):
+ # Importing pkg_resources takes ~60ms; keep it local
+ from pkg_resources import resource_filename # noqa
+
self.path = resource_filename("botocore", path)
with io.open(self.path, "r", encoding="utf-8") as f:
spec = json.load(f)
|
Keep pkg_resources import function-local (~<I>s)
|
py
|
diff --git a/spyderlib/utils/introspection/fallback_plugin.py b/spyderlib/utils/introspection/fallback_plugin.py
index <HASH>..<HASH> 100644
--- a/spyderlib/utils/introspection/fallback_plugin.py
+++ b/spyderlib/utils/introspection/fallback_plugin.py
@@ -34,10 +34,15 @@ class FallbackPlugin(IntrospectionPlugin):
Simple completion based on python-like identifiers and whitespace
"""
items = []
- if (info.line.strip().startswith(('import', 'from')) and
- info.is_python_like):
+ line = info.line.strip()
+ is_from = line.startswith('from')
+ if ((line.startswith('import') or is_from and ' import' not in line)
+ and info.is_python_like):
items += module_completion(info.line, [info.filename])
return [(i, 'module') for i in sorted(items)]
+ elif is_from and info.is_python_like:
+ items += module_completion(info.line, [info.filename])
+ return [(i, '') for i in sorted(items)]
elif info.obj:
base = info.obj
tokens = set(info.split_words(-1))
|
Fix handling of icons for fallback import completions
|
py
|
diff --git a/api.py b/api.py
index <HASH>..<HASH> 100644
--- a/api.py
+++ b/api.py
@@ -10,12 +10,12 @@
:license: GNU GPL v3 or above, see LICENSE for more details.
"""
+from dragonlib.CoCo.basic_tokens import COCO_BASIC_TOKENS
from dragonlib.core.basic import BasicListing, RenumTool, BasicTokenUtil,\
BasicLine
from dragonlib.core.basic_parser import BASICParser
from dragonlib.dragon32.basic_tokens import DRAGON32_BASIC_TOKENS
from dragonlib.utils.logging_utils import log
-from dragonlib.CoCo.basic_tokens import COCO_BASIC_TOKENS
DRAGON32 = "Dragon32"
@@ -110,5 +110,3 @@ class CoCoAPI(Dragon32API):
MACHINE_NAME = "CoCo"
BASIC_TOKENS = COCO_BASIC_TOKENS
- # Default memory location of BASIC listing start
- DEFAULT_PROGRAM_START = 0x0601
|
Bugfix: CoCo used the same default start address
|
py
|
diff --git a/twiggy_goodies/json.py b/twiggy_goodies/json.py
index <HASH>..<HASH> 100644
--- a/twiggy_goodies/json.py
+++ b/twiggy_goodies/json.py
@@ -21,13 +21,13 @@ class JsonOutput(outputs.Output):
if source_host is None:
source_host = socket.gethostname()
- severity_names = {
- levels.CRITICAL: 'CRITICAL',
- levels.DEBUG: 'DEBUG',
- levels.ERROR: 'ERROR',
- levels.INFO: 'INFO',
- levels.WARNING: 'WARNING',
- }
+ severity_names = {
+ levels.CRITICAL: 'CRITICAL',
+ levels.DEBUG: 'DEBUG',
+ levels.ERROR: 'ERROR',
+ levels.INFO: 'INFO',
+ levels.WARNING: 'WARNING',
+ }
def format(msg):
|
Fixed errors related to missing severity_names in JsonOutput, when source_host argument was specified.
|
py
|
diff --git a/netmiko/ssh_autodetect.py b/netmiko/ssh_autodetect.py
index <HASH>..<HASH> 100644
--- a/netmiko/ssh_autodetect.py
+++ b/netmiko/ssh_autodetect.py
@@ -102,6 +102,7 @@ SSH_MAPPER_BASE = {
},
}
+
class SSHDetect(object):
"""
The SSHDetect class tries to automatically guess the device type running on the SSH remote end.
|
Update line <I> with 2 blank lines
|
py
|
diff --git a/pyt/__main__.py b/pyt/__main__.py
index <HASH>..<HASH> 100644
--- a/pyt/__main__.py
+++ b/pyt/__main__.py
@@ -17,8 +17,11 @@ def console():
parser.add_argument('names', metavar='NAME', nargs='*', default=[], help='the test(s) you want to run')
parser.add_argument('--basedir', dest='basedir', default=os.curdir, help='run from this directory')
parser.add_argument('--debug', "-d", dest='debug', action='store_true', help='print debugging info')
- parser.add_argument("--version", "-V", action='version', version="%(prog)s {}".format(__version__))
- parser.add_argument("--python", "-P", action='version', version=sys.version, help="print python version")
+ parser.add_argument(
+ "--version", "-V",
+ action='version',
+ version="%(prog)s {}, Python {}".format(__version__, sys.version)
+ )
parser.add_argument('--all', "-a", dest='run_all', action='store_true', help='run all tests with buffer')
#parser.add_argument('--fad', dest='daf', action='store_true', help='run with --all --no-faifast --debug')
|
removes --python flag in favor of also printing the python version when --version is passed in
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -65,7 +65,7 @@ setup(name="datrie",
license=LICENSE,
url='https://github.com/kmike/datrie',
classifiers=CLASSIFIERS,
- libraries=[('libdatrie', {
+ libraries=[('datrie', {
"sources": LIBDATRIE_FILES,
"include_dirs": [LIBDATRIE_DIR]})],
ext_modules=ext_modules,
|
Fix bug: ld cannot find libdatrie
|
py
|
diff --git a/pyana/examples/gp_panel.py b/pyana/examples/gp_panel.py
index <HASH>..<HASH> 100644
--- a/pyana/examples/gp_panel.py
+++ b/pyana/examples/gp_panel.py
@@ -7,7 +7,14 @@ from ..ccsgp.utils import getOpts
from ..ccsgp.config import default_colors
def gp_panel(version):
- """example for a panel plot using QM12 data (see gp_xfac)"""
+ """example for a panel plot using QM12 data (see gp_xfac)
+
+ .. image:: ../ccsgp_get_started_data/examples/gp_panel/panelQM12.png
+ :width: 450 px
+
+ :param version: plot version / input subdir name
+ :type version: str
+ """
inDir, outDir = getWorkDirs()
inDir = os.path.join(inDir, version)
data = {}
|
include panelQM<I> plot
|
py
|
diff --git a/src/collectors/postgres/postgres.py b/src/collectors/postgres/postgres.py
index <HASH>..<HASH> 100644
--- a/src/collectors/postgres/postgres.py
+++ b/src/collectors/postgres/postgres.py
@@ -260,7 +260,7 @@ class DatabaseStats(QueryStats):
JOIN pg_stat_database
ON pg_database.datname = pg_stat_database.datname
WHERE pg_stat_database.datname
- NOT IN ('template0','template1','postgres')
+ NOT IN ('template0','template1','postgres', 'rdsadmin')
"""
query = post_92_query.replace(
'pg_stat_database.temp_files as temp_files,',
|
Add 'rdsadmin' database in the blacklist of DatabaseStats I want to use this collector on a Amazon RDS database. The collector is failing because it has no permission to read the 'rdsadmin' database. I don't have much idea how I should modify cleanly this plugin, but this modification works.
|
py
|
diff --git a/bittrex/bittrex.py b/bittrex/bittrex.py
index <HASH>..<HASH> 100644
--- a/bittrex/bittrex.py
+++ b/bittrex/bittrex.py
@@ -342,7 +342,7 @@ class Bittrex(object):
"""
return self.api_query('getorderhistory', {'market':market, 'count': count})
- def get_order_history(self, uuid):
+ def get_order(self, uuid):
"""
Used to get details of buy or sell order
|
Added get_order to retrieve details of buy/sell order
|
py
|
diff --git a/volapi/volapi.py b/volapi/volapi.py
index <HASH>..<HASH> 100644
--- a/volapi/volapi.py
+++ b/volapi/volapi.py
@@ -22,7 +22,7 @@ import random
import re
import string
import time
-from collections import deque
+from collections import deque, OrderedDict
import requests
import websocket
@@ -311,7 +311,7 @@ class Room:
self.conn.subscribe(self.name, self.user.name)
self._user_count = 0
- self._files = {}
+ self._files = OrderedDict()
self._chat_log = []
self.conn.listen_forever(self)
|
Use OrderedDict so that Room.files preserves the order of added files
|
py
|
diff --git a/spyder/plugins/tests/test_ipythonconsole.py b/spyder/plugins/tests/test_ipythonconsole.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/tests/test_ipythonconsole.py
+++ b/spyder/plugins/tests/test_ipythonconsole.py
@@ -18,7 +18,6 @@ def botipython(qtbot):
widget = IPythonConsole(None, testing=True)
widget.create_new_client()
qtbot.addWidget(widget)
- widget.show()
return qtbot, widget
|
IPython Console: Don't show it when running tests
|
py
|
diff --git a/nupic/support/configuration_base.py b/nupic/support/configuration_base.py
index <HASH>..<HASH> 100644
--- a/nupic/support/configuration_base.py
+++ b/nupic/support/configuration_base.py
@@ -47,7 +47,7 @@ class Configuration(object):
If the environment variable 'NTA_CONF_PATH' is defined, then the configuration
files are expected to be in the NTA_CONF_PATH search path, which is a ':'
separated list of directories. If NTA_CONF_PATH is not defined, then it is
- assumed to be NTA/conf/default (typically ~/nupic/current/conf/default).
+ loaded via pkg_resources.
"""
@@ -257,7 +257,7 @@ class Configuration(object):
contents = '<configuration/>'
else:
# If the file was not found in the normal search paths, which includes
- # checking the NTA_CONF_DIR, we'll try loading it from pkg_resources.
+ # checking the NTA_CONF_PATH, we'll try loading it from pkg_resources.
try:
contents = resource_string("nupic.support", filename)
except:
|
Fixed confusing and erroneous comments
|
py
|
diff --git a/salt/cloud/__init__.py b/salt/cloud/__init__.py
index <HASH>..<HASH> 100644
--- a/salt/cloud/__init__.py
+++ b/salt/cloud/__init__.py
@@ -1290,11 +1290,13 @@ class Cloud(object):
)
)
- client = salt.client.get_local_client(mopts=mopts_)
+ client = salt.client.get_local_client(mopts=self.opts)
- ret = client.cmd(vm_['name'], 'saltutil.sync_{0}'.format(
- self.opts['sync_after_install']
- ))
+ ret = client.cmd(
+ vm_['name'],
+ 'saltutil.sync_{0}'.format(self.opts['sync_after_install']),
+ timeout=self.opts['timeout']
+ )
log.info(
six.u('Synchronized the following dynamic modules: '
' {0}').format(ret)
|
Use get_local_client with MASTER opts, not MINION
|
py
|
diff --git a/tests/pytests/unit/modules/test_aix_status.py b/tests/pytests/unit/modules/test_aix_status.py
index <HASH>..<HASH> 100644
--- a/tests/pytests/unit/modules/test_aix_status.py
+++ b/tests/pytests/unit/modules/test_aix_status.py
@@ -1,14 +1,10 @@
import logging
import sys
+import pytest
import salt.modules.status as status
from tests.support.mock import MagicMock, patch
-try:
- import pytest
-except ImportError:
- pytest = None
-
log = logging.getLogger(__name__)
|
After reviewer comments, removed try/except around import pytest
|
py
|
diff --git a/holoviews/plotting/mpl/annotation.py b/holoviews/plotting/mpl/annotation.py
index <HASH>..<HASH> 100644
--- a/holoviews/plotting/mpl/annotation.py
+++ b/holoviews/plotting/mpl/annotation.py
@@ -2,6 +2,7 @@ import matplotlib
from matplotlib import patches as patches
from ...core.util import match_spec
+from ...core.options import abbreviated_exception
from .element import ElementPlot
@@ -22,7 +23,8 @@ class AnnotationPlot(ElementPlot):
ranges = match_spec(annotation, ranges)
axis = self.handles['axis']
opts = self.style[self.cyclic_index]
- handles = self.draw_annotation(axis, annotation.data, opts)
+ with abbreviated_exception():
+ handles = self.draw_annotation(axis, annotation.data, opts)
self.handles['annotations'] = handles
return self._finalize_axis(key, ranges=ranges)
@@ -31,7 +33,8 @@ class AnnotationPlot(ElementPlot):
for element in self.handles['annotations']:
element.remove()
- self.handles['annotations'] = self.draw_annotation(axis, annotation.data, style)
+ with abbreviated_exception():
+ self.handles['annotations'] = self.draw_annotation(axis, annotation.data, style)
class VLinePlot(AnnotationPlot):
|
draw_annotation now wrapped in abbreviated_exception context
|
py
|
diff --git a/aiortc/rtcsctptransport.py b/aiortc/rtcsctptransport.py
index <HASH>..<HASH> 100644
--- a/aiortc/rtcsctptransport.py
+++ b/aiortc/rtcsctptransport.py
@@ -906,8 +906,8 @@ class RTCSctpTransport(EventEmitter):
if tsn_gt(schunk.tsn, self._last_sacked_tsn):
break
done += 1
- done_bytes += schunk._book_size
if not schunk._acked:
+ done_bytes += schunk._book_size
self._flight_size_decrease(schunk)
# update RTO estimate
@@ -930,14 +930,17 @@ class RTCSctpTransport(EventEmitter):
if schunk.tsn not in seen:
schunk._misses += 1
if schunk._misses == 3:
- schunk._acked = False
schunk._misses = 0
schunk._retransmit = True
- loss = True
+
+ schunk._acked = False
self._flight_size_decrease(schunk)
+
+ loss = True
if i == done:
restart_t3 = True
elif not schunk._acked:
+ done_bytes += schunk._book_size
schunk._acked = True
self._flight_size_decrease(schunk)
|
[sctp] rework calculation of partial_bytes_acked
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -23,6 +23,7 @@ setup(
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
install_requires=[
|
Indicate python <I> support
|
py
|
diff --git a/stockcrawler/tests/test_loaders.py b/stockcrawler/tests/test_loaders.py
index <HASH>..<HASH> 100644
--- a/stockcrawler/tests/test_loaders.py
+++ b/stockcrawler/tests/test_loaders.py
@@ -168,6 +168,24 @@ class ReportItemLoaderTest(TestCaseBase):
'cash': 1148925000.0
})
+ def test_agn_20101231(self):
+ item = parse_xml('http://www.sec.gov/Archives/edgar/data/850693/000119312511050632/agn-20101231.xml')
+ self.assert_item(item, {
+ 'symbol': 'AGN',
+ 'amend': False,
+ 'doc_type': '10-K',
+ 'period_focus': 'FY',
+ 'end_date': '2010-12-31',
+ 'revenues': 4919400000.0,
+ 'net_income': 600000.0,
+ 'eps_basic': 0.0,
+ 'eps_diluted': 0.0,
+ 'dividend': 0.2,
+ 'assets': 8308100000.0,
+ 'equity': 4781100000.0,
+ 'cash': 1991200000.0
+ })
+
def test_axp_20100930(self):
item = parse_xml('http://www.sec.gov/Archives/edgar/data/4962/000095012310100214/axp-20100930.xml')
self.assert_item(item, {
|
Test AGN <I> annual report.
|
py
|
diff --git a/parsedmarc/__init__.py b/parsedmarc/__init__.py
index <HASH>..<HASH> 100644
--- a/parsedmarc/__init__.py
+++ b/parsedmarc/__init__.py
@@ -1202,13 +1202,15 @@ def get_dmarc_reports_from_inbox(host=None, user=None, password=None,
if type(msg_uids) == str:
msg_uids = [msg_uids]
- server.add_flags(msg_uids, [imapclient.DELETED])
+ for chunk in chunks(msg_uids, 10):
+ server.add_flags(chunk, [imapclient.DELETED])
+
server.expunge()
def move_messages(msg_uids, folder):
if type(msg_uids) == str:
msg_uids = [msg_uids]
- for chunk in chunks(msg_uids, 100):
+ for chunk in chunks(msg_uids, 10):
if move_supported:
server.move(chunk, folder)
else:
|
Move/delete <I> IMAP messages at a time
|
py
|
diff --git a/pep257.py b/pep257.py
index <HASH>..<HASH> 100755
--- a/pep257.py
+++ b/pep257.py
@@ -142,7 +142,7 @@ class Error(object):
def __init__(self, filename, source, docstring, context,
explanation, message, start=None, end=None):
- self.filename = filename
+ self.filename = filename.split('/')[-1]
self.source = source
self.docstring = docstring
self.context = context
|
Print only file name, not full path.
|
py
|
diff --git a/pandas/core/frame.py b/pandas/core/frame.py
index <HASH>..<HASH> 100644
--- a/pandas/core/frame.py
+++ b/pandas/core/frame.py
@@ -7284,11 +7284,11 @@ class DataFrame(NDFrame):
When ``values`` is a Series or DataFrame:
>>> df = pd.DataFrame({'A': [1, 2, 3], 'B': ['a', 'b', 'f']})
- >>> other = DataFrame({'A': [1, 3, 3, 2], 'B': ['e', 'f', 'f', 'e']})
- >>> df.isin(other)
+ >>> df2 = pd.DataFrame({'A': [1, 3, 3, 2], 'B': ['e', 'f', 'f', 'e']})
+ >>> df.isin(df2)
A B
0 True False
- 1 False False # Column A in `other` has a 3, but not at index 1.
+ 1 False False # Column A in `df2` has a 3, but not at index 1.
2 True True
"""
if isinstance(values, dict):
|
DOC: isin() docstring change DataFrame to pd.DataFrame (#<I>)
|
py
|
diff --git a/sendgrid/message.py b/sendgrid/message.py
index <HASH>..<HASH> 100644
--- a/sendgrid/message.py
+++ b/sendgrid/message.py
@@ -119,6 +119,8 @@ class Mail(SMTPAPIHeader):
self.reply_to = replyto
def add_attachment(self, name, file_):
+ if sys.version_info < (3, 0) and isinstance(name, unicode):
+ name = name.encode('utf-8')
if isinstance(file_, str): # filepath
with open(file_, 'rb') as f:
self.files[name] = f.read()
|
Encode unicode object file attachment names to Python str objects Currently, the method sendgrid.message.Mail.add_attachment_stream encodes unicode object filenames to Python str objects. However, the method sendgrid.message.Mail.add_attachment does not do the same encoding. This commit fixes that.
|
py
|
diff --git a/lenstronomy/Workflow/fitting_sequence.py b/lenstronomy/Workflow/fitting_sequence.py
index <HASH>..<HASH> 100644
--- a/lenstronomy/Workflow/fitting_sequence.py
+++ b/lenstronomy/Workflow/fitting_sequence.py
@@ -278,8 +278,7 @@ class FittingSequence(object):
samples, means, logZ, logZ_err, logL, results_object = sampler.run(kwargs_run)
elif sampler_type == 'DYPOLYCHORD':
- if 'resume_dyn_run' in kwargs_run and \
- kwargs_run['resume_dyn_run'] is True:
+ if 'resume_dyn_run' in kwargs_run and kwargs_run['resume_dyn_run'] is True:
resume_dyn_run = True
else:
resume_dyn_run = False
@@ -295,8 +294,7 @@ class FittingSequence(object):
remove_output_dir=remove_output_dir,
resume_dyn_run=resume_dyn_run,
use_mpi=self._mpi)
- samples, means, logZ, logZ_err, logL, results_object \
- = sampler.run(dypolychord_dynamic_goal, kwargs_run)
+ samples, means, logZ, logZ_err, logL, results_object = sampler.run(dypolychord_dynamic_goal, kwargs_run)
elif sampler_type == 'DYNESTY':
sampler = DynestySampler(self.likelihoodModule,
|
update state of FittingSequence manually possible
|
py
|
diff --git a/podcast/feeds.py b/podcast/feeds.py
index <HASH>..<HASH> 100644
--- a/podcast/feeds.py
+++ b/podcast/feeds.py
@@ -135,6 +135,9 @@ class ShowFeed(Feed):
def item_description(self, item):
return '%s' % item.description
+ def item_guid(self, item):
+ return self.item_enclosure_url()
+
def item_pubdate(self, item):
return item.pub_date
|
Item guid points to enclosure URL in show feed
|
py
|
diff --git a/geoviews/operation/projection.py b/geoviews/operation/projection.py
index <HASH>..<HASH> 100644
--- a/geoviews/operation/projection.py
+++ b/geoviews/operation/projection.py
@@ -185,6 +185,10 @@ class project_quadmesh(_project_operation):
zs[mask] = np.NaN
params = get_param_values(element)
+ if PX.ndim < 2:
+ PX = PX.reshape(zs.shape)
+ if PY.ndim < 2:
+ PY = PY.reshape(zs.shape)
return QuadMesh((PX, PY, zs), crs=self.projection, **params)
|
Fixed projecting of rectilinear QuadMesh (#<I>)
|
py
|
diff --git a/python/proton/reactor.py b/python/proton/reactor.py
index <HASH>..<HASH> 100644
--- a/python/proton/reactor.py
+++ b/python/proton/reactor.py
@@ -20,6 +20,7 @@
from __future__ import absolute_import
from ._reactor import Container, ApplicationEvent, EventInjector, Handler,\
+ LinkOption, ReceiverOption, SenderOption,\
AtLeastOnce, AtMostOnce, DynamicNodeProperties, Filter, Selector, DurableSubscription, Copy, Move,\
Reactor
@@ -28,6 +29,9 @@ __all__ = [
'ApplicationEvent',
'EventInjector',
'Handler',
+ 'LinkOption',
+ 'ReceiverOption',
+ 'SenderOption',
'AtLeastOnce',
'AtMostOnce',
'DynamicNodeProperties',
|
PROTON-<I>: [Python] Added back some needed symbols removed previously
|
py
|
diff --git a/trionyx/trionyx/auditlog.py b/trionyx/trionyx/auditlog.py
index <HASH>..<HASH> 100644
--- a/trionyx/trionyx/auditlog.py
+++ b/trionyx/trionyx/auditlog.py
@@ -36,7 +36,7 @@ def model_instance_diff(old, new):
new_value = get_field_value(new, field)
if old_value != new_value:
- diff[field.name] = (renderer.render_field(old, field.name), renderer.render_field(new, field.name))
+ diff[field.name] = (get_rendered_value(config.model, field.name, old_value), get_rendered_value(config.model, field.name, new_value))
return diff if diff else None
@@ -58,13 +58,18 @@ def get_field_value(obj, field):
pass
else:
try:
- return smart_text(getattr(obj, field.name, None))
+ return getattr(obj, field.name, None)
except ObjectDoesNotExist:
pass
return field.default if field.default is not models.NOT_PROVIDED else None
+def get_rendered_value(ModelClass, field_name, value):
+ model = ModelClass(**{field_name:value})
+ return renderer.render_field(model, field_name)
+
+
def create_log(instance, changes, action):
"""Create a new log entry"""
AuditLogEntry.objects.create(
|
[BUGFIX] Fix rendering on None type error
|
py
|
diff --git a/hcalendar.py b/hcalendar.py
index <HASH>..<HASH> 100644
--- a/hcalendar.py
+++ b/hcalendar.py
@@ -82,11 +82,13 @@ class vEvent(object):
soup = subs if subs else [soup]
content = ''
for elem in soup:
- if elem.name == 'abbr':
+ if elem.name == 'a' and 'href' in elem.attrs:
+ content += elem['href']
+ elif elem.name == 'abbr' and 'title' in elem.attrs:
content += elem['title']
- elif elem.name == 'time':
+ elif elem.name == 'time' and 'datetime' in elem.attrs:
content += elem['datetime']
- elif elem.name in ['img', 'area']:
+ elif elem.name in ['img', 'area'] and 'alt' in elem.attrs:
content += elem['alt']
else:
content += self._getContent(elem)
|
Check for the existence of tag attributes before accessing them
|
py
|
diff --git a/nlppln/wfgenerator.py b/nlppln/wfgenerator.py
index <HASH>..<HASH> 100644
--- a/nlppln/wfgenerator.py
+++ b/nlppln/wfgenerator.py
@@ -12,13 +12,14 @@ class WorkflowGenerator(WFGenerator):
self.load(step_file='https://raw.githubusercontent.com/nlppln/'
'pattern-docker/master/pattern.cwl')
- def save(self, fname, validate=True, wd=False, inline=False, relative=True,
- pack=False, encoding='utf-8'):
+ def save(self, fname, mode=None, validate=True, wd=False, inline=False,
+ relative=True, pack=False, encoding='utf-8'):
"""Save workflow to file
For nlppln, the default is to save workflows with relative paths.
"""
super(WorkflowGenerator, self).save(fname,
+ mode=mode,
validate=validate,
wd=wd,
inline=inline,
|
Update save method Because scriptcwl has a new way of saving.
|
py
|
diff --git a/ceph_deploy/mds.py b/ceph_deploy/mds.py
index <HASH>..<HASH> 100644
--- a/ceph_deploy/mds.py
+++ b/ceph_deploy/mds.py
@@ -108,6 +108,16 @@ def create_mds(distro, name, cluster, init):
],
timeout=7
)
+ elif init == 'systemd':
+ remoto.process.run(
+ conn,
+ [
+ 'systemctl',
+ 'enable',
+ 'ceph-mds@{name}'.format(name=name),
+ ],
+ timeout=7
+ )
if distro.is_el:
system.enable_service(distro.conn)
|
mds: enable on create with systemd Enable MDS services for distros having systemd.
|
py
|
diff --git a/pyzotero/zotero.py b/pyzotero/zotero.py
index <HASH>..<HASH> 100644
--- a/pyzotero/zotero.py
+++ b/pyzotero/zotero.py
@@ -37,7 +37,7 @@ import datetime
import re
import pytz
from urlparse import urlparse
-from xml.dom import minidom
+import xml.etree.ElementTree as et
import zotero_errors as ze
@@ -140,9 +140,10 @@ class Zotero(object):
Return a list of etags parsed out of the XML response
"""
# Parse Atom as straight XML in order to get the etags FFS
- self.xmldoc = minidom.parseString(incoming)
- return [c.attributes['zapi:etag'].value for
- c in self.xmldoc.getElementsByTagName('content')]
+ atom_ns = '{http://www.w3.org/2005/Atom}'
+ tree = et.fromstring(incoming)
+ return [entry.attrib['{http://zotero.org/ns/api}etag'] for
+ entry in tree.findall('{0}entry/{0}content'.format(atom_ns))]
def _cache(self, template, key):
""" Add a retrieved template to the cache for 304 checking
|
Replace MiniDOM with ElementTree for XML parsing
|
py
|
diff --git a/nodeconductor/structure/views.py b/nodeconductor/structure/views.py
index <HASH>..<HASH> 100644
--- a/nodeconductor/structure/views.py
+++ b/nodeconductor/structure/views.py
@@ -1238,7 +1238,10 @@ class ResourceViewSet(viewsets.GenericViewSet):
return response
data = []
- for resources_url in SupportedServices.get_resources(request).values():
+ types = self.request.query_params.getlist('resource_type', [])
+ for resource_type, resources_url in SupportedServices.get_resources(request).items():
+ if types != [] and resource_type not in types:
+ continue
response = fetch_data(resources_url)
if response.total and response.total > len(response.data):
response = fetch_data(resources_url, querystring='page_size=%d' % response.total)
|
Enable filtering resource by resource_type (NC-<I>)
|
py
|
diff --git a/modin/pandas/io.py b/modin/pandas/io.py
index <HASH>..<HASH> 100644
--- a/modin/pandas/io.py
+++ b/modin/pandas/io.py
@@ -23,7 +23,7 @@ from ..data_management.partitioning.axis_partition import (
)
from ..data_management.data_manager import PandasDataManager
-PQ_INDEX_REGEX = re.compile("__index_level_\d+__")
+PQ_INDEX_REGEX = re.compile("__index_level_\d+__") # noqa W605
# Parquet
|
Ignoring flake8 error in regex (#<I>)
|
py
|
diff --git a/config/kube_config.py b/config/kube_config.py
index <HASH>..<HASH> 100644
--- a/config/kube_config.py
+++ b/config/kube_config.py
@@ -141,9 +141,10 @@ class KubeConfigLoader(object):
self._config_persister = config_persister
def _refresh_credentials():
- credentials, project_id = google.auth.default(
- scopes=['https://www.googleapis.com/auth/cloud-platform']
- )
+ credentials, project_id = google.auth.default(scopes=[
+ 'https://www.googleapis.com/auth/cloud-platform',
+ 'https://www.googleapis.com/auth/userinfo.email'
+ ])
request = google.auth.transport.requests.Request()
credentials.refresh(request)
return credentials
|
Add email scope to GCP provided credential refresh
|
py
|
diff --git a/src/hamster/applet.py b/src/hamster/applet.py
index <HASH>..<HASH> 100755
--- a/src/hamster/applet.py
+++ b/src/hamster/applet.py
@@ -423,7 +423,7 @@ class HamsterApplet(object):
"""main window display and positioning"""
self.button.set_active(is_active)
- if not is_active:
+ if is_active == False:
self.window.hide()
return True
@@ -527,8 +527,8 @@ class HamsterApplet(object):
"""button events"""
def on_overview(self, menu_item):
- self.__show_toggle(False)
dialogs.overview.show(self.applet)
+ self.__show_toggle(False)
def show_overview(self, menu_item, verb):
return self.on_overview(menu_item)
@@ -540,8 +540,8 @@ class HamsterApplet(object):
dialogs.about.show(self.window)
def show_preferences(self, menu_item, verb):
- self.__show_toggle(False)
dialogs.prefs.show(self.applet)
+ self.__show_toggle(False)
"""signals"""
|
calling show_toggle after showing the requested window as otherwise it seems to have some positioning strangeness
|
py
|
diff --git a/progressbar/bar.py b/progressbar/bar.py
index <HASH>..<HASH> 100644
--- a/progressbar/bar.py
+++ b/progressbar/bar.py
@@ -533,7 +533,7 @@ class ProgressBar(StdRedirectMixin, ResizableMixin, ProgressBarBase):
except StopIteration:
self.finish()
raise
- except GeneratorExit:
+ except GeneratorExit: # pragma: no cover
self.finish(dirty=True)
raise
|
ignoring "impossible" use case for test coverage
|
py
|
diff --git a/salt/client/ssh/shell.py b/salt/client/ssh/shell.py
index <HASH>..<HASH> 100644
--- a/salt/client/ssh/shell.py
+++ b/salt/client/ssh/shell.py
@@ -125,7 +125,7 @@ class Shell(object):
Return the string to execute ssh-copy-id
'''
if self.passwd and salt.utils.which('sshpass'):
- return 'sshpass -p "{0}" {1} {2} "{3} -p {4} {5}@{6}"'.format(
+ return "sshpass -p '{0}' {1} {2} '{3} -p {4} {5}@{6}'".format(
self.passwd,
'ssh-copy-id',
'-i {0}.pub'.format(self.priv),
@@ -141,7 +141,7 @@ class Shell(object):
have two commands
'''
if self.passwd and salt.utils.which('sshpass'):
- return 'sshpass -p "{0}" {1} {2} {3} -p {4} {5}@{6}'.format(
+ return "sshpass -p '{0}' {1} {2} {3} -p {4} {5}@{6}".format(
self.passwd,
'ssh-copy-id',
'-i {0}.pub'.format(self.priv),
|
Use single quotes in command string to prevent shell expansion of passwords When salt-ssh asks for a password to deploy its key it fails if the password contains a '$' because the generated shell command expands the '$' sign and the following characters as shell variable. Using single quotes prevents shell expansion and passwords containig '$' signs work again.
|
py
|
diff --git a/pymysql/tests/test_nextset.py b/pymysql/tests/test_nextset.py
index <HASH>..<HASH> 100644
--- a/pymysql/tests/test_nextset.py
+++ b/pymysql/tests/test_nextset.py
@@ -1,6 +1,11 @@
from pymysql.tests import base
from pymysql import util
+try:
+ import unittest2 as unittest
+except ImportError:
+ import unittest
+
class TestNextset(base.PyMySQLTestCase):
@@ -26,3 +31,20 @@ class TestNextset(base.PyMySQLTestCase):
cur.execute("SELECT 42")
self.assertEqual([(42,)], list(cur))
+
+ @unittest.expectedFailure
+ def test_multi_cursor(self):
+ cur1 = self.con.cursor()
+ cur2 = self.con.cursor()
+
+ cur1.execute("SELECT 1; SELECT 2;")
+ cur2.execute("SELECT 42")
+
+ self.assertEqual([(1,)], list(cur1))
+ self.assertEqual([(42,)], list(cur2))
+
+ r = cur1.nextset()
+ self.assertTrue(r)
+
+ self.assertEqual([(2,)], list(cur1))
+ self.assertIsNone(cur1.nextset())
|
Add multi cursor test currently failed.
|
py
|
diff --git a/pykechain/models/part2.py b/pykechain/models/part2.py
index <HASH>..<HASH> 100644
--- a/pykechain/models/part2.py
+++ b/pykechain/models/part2.py
@@ -92,7 +92,7 @@ class Part2(Base):
self._cached_children = None
self.properties = [Property2.create(p, client=self._client)
- for p in sorted(json['properties'], key=lambda p: p['order'])]
+ for p in sorted(json['properties'], key=lambda p: p.get('order', 0))]
def refresh(self, json=None, url=None, extra_params=None):
"""Refresh the object in place."""
|
Fall-back in case of json changes.
|
py
|
diff --git a/raiden/tasks.py b/raiden/tasks.py
index <HASH>..<HASH> 100644
--- a/raiden/tasks.py
+++ b/raiden/tasks.py
@@ -195,7 +195,7 @@ class AlarmTask(Runnable):
if missed_blocks > 2:
log.info(
'Missed block(s)',
- missed_blocks=missed_blocks,
+ missed_blocks=missed_blocks - 1,
latest_block=latest_block,
)
|
Fix block number for missed blocks message [no ci integration]
|
py
|
diff --git a/discord/ext/commands/core.py b/discord/ext/commands/core.py
index <HASH>..<HASH> 100644
--- a/discord/ext/commands/core.py
+++ b/discord/ext/commands/core.py
@@ -281,7 +281,15 @@ class Command:
else:
if origin is typing.Union:
errors = []
+ _NoneType = type(None)
for conv in converter.__args__:
+ # if we got to this part in the code, then the previous conversions have failed
+ # so we should just undo the view, return the default, and allow parsing to continue
+ # with the other parameters
+ if conv is _NoneType and param.kind != param.VAR_POSITIONAL:
+ ctx.view.undo()
+ return None if param.default is param.empty else param.default
+
try:
value = await self._actual_conversion(ctx, conv, argument, param)
except CommandError as e:
@@ -317,10 +325,12 @@ class Command:
raise MissingRequiredArgument(param)
return param.default
+ previous = view.index
if consume_rest_is_special:
argument = view.read_rest().strip()
else:
argument = quoted_word(view)
+ view.previous = previous
return (await self.do_conversion(ctx, converter, argument, param))
|
[commands] Allow for backtracking parsing with typing.Optional Original code by zephyrkul. This new parsing mode allows for backtracking in case of failure when a typing.Union[..., NoneType] or a typing.Optional[...] is used. This means that if a type would fail to parse, the view is undo'd to a previous state, passing the default parameter to the callback, and then continuing on the next parameter as if nothing had happened.
|
py
|
diff --git a/tests/unit/test_http.py b/tests/unit/test_http.py
index <HASH>..<HASH> 100644
--- a/tests/unit/test_http.py
+++ b/tests/unit/test_http.py
@@ -95,6 +95,7 @@ class TestHTTP(object):
assert response.consume() == 'toby'
h.stop()
+ @pytest.mark.skipif(True, reason='TODO')
def test_post_chunked(self):
print
print
|
skip failing post chunk test for the moment
|
py
|
diff --git a/StudDP.py b/StudDP.py
index <HASH>..<HASH> 100755
--- a/StudDP.py
+++ b/StudDP.py
@@ -85,14 +85,24 @@ class APIWrapper(object):
for i, folder in enumerate(folders):
folders[i]['path'] = os.path.join(self.__local_path, course['title'])
+ import traceback
while folders:
folder = folders.pop()
+ path = '/api/documents/{}/folder/{}' \
+ .format(course['course_id'], folder['folder_id'])
+
+ count = 0
try:
- path = '/api/documents/{}/folder/{}' \
- .format(course['course_id'], folder['folder_id'])
- temp = json.loads(self.__get(path).text)
- except (ValueError, AttributeError):
- LOG.error('Error on loading %s.', path)
+ txt = ''
+ while not txt and count < 10:
+ txt = self.__get(path).text
+ if 'HTTP/1.1 401 Unauthorized' == txt:
+ txt = ''
+ time.sleep(0.5)
+ count += 1
+ temp = json.loads(txt)
+ except (ValueError, AttributeError) as e:
+ LOG.error('Error on loading %s. %s', path, e)
continue
for key in ['folders', 'documents']:
|
added retry for downloads on Unauthorized
|
py
|
diff --git a/spyderlib/spyder.py b/spyderlib/spyder.py
index <HASH>..<HASH> 100644
--- a/spyderlib/spyder.py
+++ b/spyderlib/spyder.py
@@ -1286,6 +1286,14 @@ class MainWindow(QMainWindow):
except ImportError:
rope_version = not_installed
import spyderlib.qt.QtCore
+ qt_api = os.environ['QT_API']
+ qt_lib = {'pyqt': 'PyQt4', 'pyside': 'PySide'}[qt_api]
+ if qt_api == 'pyqt':
+ import sip
+ try:
+ qt_lib += (" (API v%d)" % sip.getapi('QString'))
+ except AttributeError:
+ pass
QMessageBox.about(self,
_("About %s") % "Spyder",
_("""<b>%s %s</b>
@@ -1323,7 +1331,7 @@ class MainWindow(QMainWindow):
__project_url__, __forum_url__,
platform.python_version(),
spyderlib.qt.QtCore.__version__,
- os.environ['QT_API'], spyderlib.qt.__version__,
+ qt_lib, spyderlib.qt.__version__,
platform.system()) )
#---- Global callbacks (called from plugins)
|
Spyder's about dialog box: added PyQt's API version number
|
py
|
diff --git a/sorl/thumbnail/fields.py b/sorl/thumbnail/fields.py
index <HASH>..<HASH> 100644
--- a/sorl/thumbnail/fields.py
+++ b/sorl/thumbnail/fields.py
@@ -65,7 +65,7 @@ class ImageFormField(forms.FileField):
else:
raw_data = data['content']
if not default.engine.is_valid_image(raw_data):
- raise forms.ValidationError(self.error_messages['invalid_image'])
+ raise forms.ValidationError(self.default_error_messages['invalid_image'])
if hasattr(f, 'seek') and callable(f.seek):
f.seek(0)
return f
|
Raise correct error message when image is not valid according to the image engine
|
py
|
diff --git a/psyplot/data.py b/psyplot/data.py
index <HASH>..<HASH> 100755
--- a/psyplot/data.py
+++ b/psyplot/data.py
@@ -2566,7 +2566,8 @@ class InteractiveArray(InteractiveBase):
self.arr._variable = res._variable
self.arr._coords = res._coords
try:
- self.arr._indexes = res.indexes
+ self.arr._indexes = (
+ res._indexes.copy() if res._indexes is not None else None)
except AttributeError: # res.indexes not existent for xr<0.12
pass
self.arr.name = saved_name
@@ -2619,7 +2620,8 @@ class InteractiveArray(InteractiveBase):
self.arr._variable = res._variable
self.arr._coords = res._coords
try:
- self.arr._indexes = res.indexes
+ self.arr._indexes = (
+ res._indexes.copy() if res._indexes is not None else None)
except AttributeError: # res.indexes not existent for xr<0.12
pass
# update to old attributes
|
Use DataArray._indexes rather than DataArray.indexes for update The other one implictly transforms into a Indexes object from pandas
|
py
|
diff --git a/slave/core.py b/slave/core.py
index <HASH>..<HASH> 100644
--- a/slave/core.py
+++ b/slave/core.py
@@ -148,7 +148,7 @@ class Command(object):
if len(parsed) == 1:
return parsed[0]
- return parsed
+ return tuple(parsed)
def write(self, value):
"""Constructs the cmd string and writes it to the connection."""
|
Command returns a tuple instead of a list, closes #<I>.
|
py
|
diff --git a/hazelcast/client.py b/hazelcast/client.py
index <HASH>..<HASH> 100644
--- a/hazelcast/client.py
+++ b/hazelcast/client.py
@@ -47,7 +47,7 @@ _logger = logging.getLogger(__name__)
class HazelcastClient(object):
- """Hazelcast client instance to access access and manipulate
+ """Hazelcast client instance to access and manipulate
distributed data structures on the Hazelcast clusters.
Keyword Args:
|
Clean up comment. Word 'access' used twice consecutively. (#<I>)
|
py
|
diff --git a/sgqlc/types/__init__.py b/sgqlc/types/__init__.py
index <HASH>..<HASH> 100644
--- a/sgqlc/types/__init__.py
+++ b/sgqlc/types/__init__.py
@@ -1534,6 +1534,9 @@ class ContainerType(BaseType, metaclass=ContainerTypeMeta):
'''
def __init__(self, json_data, selection_list=None):
+ assert json_data is None or isinstance(json_data, dict), \
+ '%r (%s) is not a JSON Object' % (
+ json_data, type(json_data).__name__)
object.__setattr__(self, '__selection_list__', selection_list)
self.__populate_fields(json_data)
|
assert json_data is None or a JSON object (dict)
|
py
|
diff --git a/toml.py b/toml.py
index <HASH>..<HASH> 100644
--- a/toml.py
+++ b/toml.py
@@ -523,8 +523,11 @@ def load_array(a):
new_a = []
start_group_index = 1
end_group_index = 2
+ in_str = False
while end_group_index < len(a[1:-1]):
- if a[end_group_index] == '}':
+ if a[end_group_index] == '"' or a[end_group_index] == "'":
+ in_str = not in_str
+ if a[end_group_index] == '}' and not in_str:
# Increase end_group_index by 1 to get the closing bracket
end_group_index += 1
new_a.append(a[start_group_index:end_group_index])
|
Support } whithin string in inline objects within arrays
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
HERE = path.abspath(path.dirname(__file__))
-__version__ = '0.6.1-5'
+__version__ = '0.6.1-6'
__project_name__ = 'common'
def hack_find_packages(include_str):
|
incrementing setup.py for more testing
|
py
|
diff --git a/validators/__init__.py b/validators/__init__.py
index <HASH>..<HASH> 100644
--- a/validators/__init__.py
+++ b/validators/__init__.py
@@ -9,10 +9,10 @@ file that comes with the source code, or http://www.gnu.org/licenses/gpl.txt.
from .chain import ReturnEarly, chainable, make_chain
from .validators import (required, optional, nonempty, boolean, istype, isin,
gte, lte, match, url, timestamp, deprecated,
- content_type, min_len, instanceof)
+ min_len, instanceof)
from .helpers import OR, NOT, spec_validator
__all__ = ['ReturnEarly', 'chainable', 'make_chain', 'required', 'optional',
'nonempty', 'boolean', 'istype', 'isin', 'gte', 'lte', 'match',
'url', 'timestamp', 'OR', 'NOT', 'spec_validator', 'deprecated',
- 'content_type', min_len, instanceof]
+ 'min_len', 'instanceof']
|
fixed last instance of content_type sticking around
|
py
|
diff --git a/tests/test_mongo.py b/tests/test_mongo.py
index <HASH>..<HASH> 100644
--- a/tests/test_mongo.py
+++ b/tests/test_mongo.py
@@ -20,7 +20,7 @@ class TestMongo(object):
config = yaml.load(config_file.read())
self.mongo_host = config['MongoBackend']['host']
- self.mongo_port = int(config['MongoBackend']['port'])
+ self.mongo_port = config['MongoBackend']['port']
try:
try:
|
Remove int holdover from ConfigParser
|
py
|
diff --git a/pyemma/_base/progress/reporter/notebook.py b/pyemma/_base/progress/reporter/notebook.py
index <HASH>..<HASH> 100644
--- a/pyemma/_base/progress/reporter/notebook.py
+++ b/pyemma/_base/progress/reporter/notebook.py
@@ -35,6 +35,12 @@ class my_tqdm_notebook(tqdm_notebook):
from IPython.core.display import display
display(container)
+ # HTML encoding
+ try: # Py3
+ from html import escape
+ except ImportError: # Py2
+ from cgi import escape
+
def print_status(s='', close=False, bar_style=None, desc=None):
# Note: contrary to native tqdm, s='' does NOT clear bar
# goal is to keep all infos if error happens so user knows
@@ -60,7 +66,6 @@ class my_tqdm_notebook(tqdm_notebook):
# Print stats
if s: # never clear the bar (signal: s='')
s = s.replace('||', '') # remove inesthetical pipes
- from html import escape
s = escape(s) # html escape special characters (like '?')
ptext.value = s
|
[progress] fix python2 issue with html escaping
|
py
|
diff --git a/thefuck/rules/sudo.py b/thefuck/rules/sudo.py
index <HASH>..<HASH> 100644
--- a/thefuck/rules/sudo.py
+++ b/thefuck/rules/sudo.py
@@ -1,6 +1,7 @@
def match(command, settings):
return ('permission denied' in command.stderr.lower()
or 'EACCES' in command.stderr
+ or 'error: you cannot perform this operation unless you are root.' in command.stderr
or 'pkg: Insufficient privileges' in command.stderr)
|
make sudo rule work with pacman
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -31,9 +31,9 @@ setup(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
],
license='BSD',
)
|
No longer bothering with Python <I>, add <I>
|
py
|
diff --git a/sonnet/src/reshape_test.py b/sonnet/src/reshape_test.py
index <HASH>..<HASH> 100644
--- a/sonnet/src/reshape_test.py
+++ b/sonnet/src/reshape_test.py
@@ -193,7 +193,7 @@ class FlattenTest(test_utils.TestCase, parameterized.TestCase):
inputs = tf.ones([batch_size] + in_shape)
mod = reshape.Flatten()
output = mod(inputs)
- flattened_size = np.prod(in_shape)
+ flattened_size = np.prod(in_shape, dtype=int)
self.assertEqual(output.shape, [batch_size, flattened_size])
def testFlatten_unknownBatchSize(self):
@@ -221,7 +221,8 @@ class FlattenTest(test_utils.TestCase, parameterized.TestCase):
mod = reshape.Flatten(preserve_dims=preserve_dims)
output = mod(inputs)
flattened_shape = (
- in_shape[:preserve_dims] + [np.prod(in_shape[preserve_dims:])])
+ in_shape[:preserve_dims] +
+ [np.prod(in_shape[preserve_dims:], dtype=int)])
self.assertEqual(output.shape, flattened_shape)
@parameterized.parameters(5, 6, 7, 10)
|
Fixed cases where tf.TensorShape was constructed with float dimensions This is a prerequisite for making TensorShape and Dimension more strict about the types of their arguments. PiperOrigin-RevId: <I> Change-Id: Id2be<I>daabf8e<I>d<I>c<I>c<I>a
|
py
|
diff --git a/argcomplete/__init__.py b/argcomplete/__init__.py
index <HASH>..<HASH> 100644
--- a/argcomplete/__init__.py
+++ b/argcomplete/__init__.py
@@ -284,7 +284,7 @@ class CompletionFinder(object):
if not completer.completing:
self._orig_callable(parser, namespace, values, option_string=option_string)
- elif self._orig_class == argparse._SubParsersAction:
+ elif issubclass(self._orig_class, argparse._SubParsersAction):
debug("orig class is a subparsers action: patching and running it")
patch(self._name_parser_map[values[0]])
self._orig_callable(parser, namespace, values, option_string=option_string)
|
Allow overridden _SubParsersAction Use an `issubclass` check rather than `==` to allow subclassing of `argparse._SubParsersAction` in applications. Allows useful extensions such as aliasing of subcommands: <URL>
|
py
|
diff --git a/microsoftbotframework/msbot.py b/microsoftbotframework/msbot.py
index <HASH>..<HASH> 100644
--- a/microsoftbotframework/msbot.py
+++ b/microsoftbotframework/msbot.py
@@ -64,7 +64,7 @@ class MsBot(Flask):
else:
self.state = None
- def run(self, host=None, port=None, debug=None, load_dotenv=True, **options):
+ def run(self, host=None, port=None, debug=None, **options):
# Set the flask config if it is in the config file / environment vars
host = self.mbf_config.get_config(host, 'HOST', root='flask')
port = self.mbf_config.get_config(port, 'PORT', root='flask')
|
removed load_dotenv from run args
|
py
|
diff --git a/src/python/grpcio_tests/tests/unit/_server_shutdown_scenarios.py b/src/python/grpcio_tests/tests/unit/_server_shutdown_scenarios.py
index <HASH>..<HASH> 100644
--- a/src/python/grpcio_tests/tests/unit/_server_shutdown_scenarios.py
+++ b/src/python/grpcio_tests/tests/unit/_server_shutdown_scenarios.py
@@ -81,7 +81,7 @@ def run_test(args):
thread.daemon = True
thread.start()
port = port_queue.get()
- channel = grpc.insecure_channel('[::]:%d' % port)
+ channel = grpc.insecure_channel('localhost:%d' % port)
multi_callable = channel.unary_unary(FORK_EXIT)
result, call = multi_callable.with_call(REQUEST, wait_for_ready=True)
os.wait()
|
bazel docker image does not support ipv6
|
py
|
diff --git a/spyderlib/widgets/projectexplorer.py b/spyderlib/widgets/projectexplorer.py
index <HASH>..<HASH> 100644
--- a/spyderlib/widgets/projectexplorer.py
+++ b/spyderlib/widgets/projectexplorer.py
@@ -436,7 +436,7 @@ class ExplorerTreeWidget(OneColumnTree):
actions = []
- if not only_folders:
+ if items and not only_folders:
open_file_act = create_action(self,
text=translate('ProjectExplorer', 'Open'),
triggered=lambda: self.open_file_from_menu(items))
|
Project explorer/bugfix: error when menu pops up and project list is empty
|
py
|
diff --git a/scriptworker/constants.py b/scriptworker/constants.py
index <HASH>..<HASH> 100644
--- a/scriptworker/constants.py
+++ b/scriptworker/constants.py
@@ -283,7 +283,7 @@ DEFAULT_CONFIG = frozendict({
# mozilla-esr68 too.
'project:releng:googleplay:aurora': 'esr68',
'project:releng:googleplay:beta': 'esr68',
- 'project:releng:googleplay:release': 'release',
+ 'project:releng:googleplay:release': 'esr68',
'project:releng:signing:cert:nightly-signing': 'all-nightly-branches',
'project:releng:signing:cert:release-signing': 'all-release-branches',
|
Bug <I> - Move Fennec Release to the esr<I> channel
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,8 +8,21 @@ execfile(os.path.join(os.path.abspath(os.path.dirname(__file__)),
setup(
name='doctor',
version=__version__, # noqa -- flake8 should ignore this line
- description='Helpers for using JSON schema in Python apps.',
+ description=('A module that assists in using JSON schemas to validate data '
+ 'in Flask APIs and generate API documentation.'),
url='https://github.com/upsight/doctor',
+ license='MIT',
+ classifiers=[
+ 'Intended Audience :: Developers',
+ 'License :: OSI Approved :: MIT License',
+ 'Operating System :: OS Independent',
+ 'Programming Language :: Python',
+ 'Programming Language :: Python :: 2.7',
+ ],
+ keywords=['python', 'flask', 'json', 'jsonschema', 'validation',
+ 'documentation', 'sphinx'],
+ author='Upsight',
+ author_email='[email protected]',
packages=[
'doctor',
'doctor.docs',
|
Updating setup.py Adding required fields for pypi.
|
py
|
diff --git a/rocketchat/calls/channels/get_history.py b/rocketchat/calls/channels/get_history.py
index <HASH>..<HASH> 100644
--- a/rocketchat/calls/channels/get_history.py
+++ b/rocketchat/calls/channels/get_history.py
@@ -9,7 +9,7 @@ class GetRoomHistory(RocketChatBase):
endpoint = '/api/v1/channels.history'
def build_endpoint(self, **kwargs):
- if kwargs.has_key('oldest'):
+ if 'oldest' in kwargs:
return '{endpoint}?roomId={room_id}&oldest={oldest}'.format(
endpoint=self.endpoint,
oldest=kwargs.get('oldest'),
|
fixed history method for Python 3 compatibility
|
py
|
diff --git a/napalm/junos/junos.py b/napalm/junos/junos.py
index <HASH>..<HASH> 100644
--- a/napalm/junos/junos.py
+++ b/napalm/junos/junos.py
@@ -383,7 +383,7 @@ class JunOSDriver(NetworkDriver):
def discard_config(self):
"""Discard changes (rollback 0)."""
- self.device.cu.rollback(rb_id=0)
+ self.device.cu.rollback(rb_id=0, ignore_warning=self.ignore_warning)
if not self.lock_disable and not self.session_config_lock:
self._unlock()
if self.config_private:
|
if we are ignoring warnings, also ignore them on discard
|
py
|
diff --git a/graphics/nbinput.py b/graphics/nbinput.py
index <HASH>..<HASH> 100644
--- a/graphics/nbinput.py
+++ b/graphics/nbinput.py
@@ -102,7 +102,7 @@ class _GetchMacCarbon:
# number is converted to an ASCII character with chr() and
# returned.
- (what, msg, when, where, mod) = Carbon.Evt.GetNextEvent(0x0008)[1]
+ _, msg, _, _, _ = Carbon.Evt.GetNextEvent(0x0008)[1]
return chr(msg & 0x000000FF)
|
Cleaned up nbinput mac a bit
|
py
|
diff --git a/test_wptools.py b/test_wptools.py
index <HASH>..<HASH> 100755
--- a/test_wptools.py
+++ b/test_wptools.py
@@ -20,9 +20,9 @@ class ExtractTestCase(unittest.TestCase):
def test_html_lead(self):
from wptools.extract import html_lead
- data = ("<div>templates</div>"
+ data = ("<div/>"
"<p>lead</p><div>lead</div>"
- "<ol>references</ol>")
+ "<ol/><table/>")
ans = html_lead(data)
self.assertEqual(ans, "<p>lead</p>\n<div>lead</div>")
|
added <table/> to test_html_lead()
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -47,7 +47,8 @@ setup(
],
entry_points={"console_scripts": ["fusesoc = fusesoc.main:main"]},
setup_requires=[
- "setuptools_scm",
+ "setuptools_scm < 7.0; python_version<'3.7'",
+ "setuptools_scm; python_version>='3.7'",
],
install_requires=[
"edalize>=0.2.3",
|
fix: `python<I>` build by using different `setuptools_scm` version fixes: #<I>
|
py
|
diff --git a/vcs/backends/base.py b/vcs/backends/base.py
index <HASH>..<HASH> 100644
--- a/vcs/backends/base.py
+++ b/vcs/backends/base.py
@@ -427,3 +427,12 @@ class BaseInMemoryChangeset(object):
"""
raise NotImplementedError
+ def reset(self):
+ """
+ Resets this instance to initial state (cleans ``added``, ``changed`` and
+ ``removed`` lists).
+ """
+ self.added = []
+ self.changed = []
+ self.removed = []
+
|
Added reset method for InMemoryChangeset at base backend --HG-- branch : workdir
|
py
|
diff --git a/pygam/tests/test_GAM_methods.py b/pygam/tests/test_GAM_methods.py
index <HASH>..<HASH> 100644
--- a/pygam/tests/test_GAM_methods.py
+++ b/pygam/tests/test_GAM_methods.py
@@ -45,11 +45,25 @@ def test_LogisticGAM_accuracy(default):
def test_PoissonGAM_exposure(coal):
"""
- check that we can fit a Poisson GAM on real data
+ check that we can fit a Poisson GAM with exposure, and it scales predictions
"""
X, y = coal
gam = PoissonGAM().fit(X, y, exposure=np.ones_like(y))
- assert(gam._is_fitted)
+ assert((gam.predict(X, exposure=np.ones_like(y)*2) == 2 *gam.predict(X)).all())
+
+def test_PoissonGAM_loglike(coal):
+ """
+ check that our loglikelihood is scaled by exposure
+
+ predictions that are twice as large with twice the exposure
+ should have lower loglikelihood
+ """
+ X, y = coal
+ exposure = np.ones_like(y)
+ gam_high_var = PoissonGAM().fit(X, y * 2, exposure=exposure * 2)
+ gam_low_var = PoissonGAM().fit(X, y, exposure=exposure)
+
+ assert gam_high_var.loglikelihood(X, y * 2, exposure * 2) < gam_low_var.loglikelihood(X, y, exposure)
def test_large_GAM(coal):
"""
|
testing poisson loglikelihood with exposure
|
py
|
diff --git a/dynash/dynash.py b/dynash/dynash.py
index <HASH>..<HASH> 100755
--- a/dynash/dynash.py
+++ b/dynash/dynash.py
@@ -580,6 +580,33 @@ class DynamoDBShell(Cmd):
if self.consumed:
print "consumed units:", item.consumed_units
+ def do_rmattr(self, line):
+ "rmattr [:tablename] [!fieldname:expectedvalue] [-v] {haskkey,[rangekey]} attr [attr...]"
+ table, line = self.get_table_params(line)
+ expected, line = self.get_expected(line)
+
+ args = self.getargs(line)
+
+ if "-v" in args:
+ ret = "ALL_OLD"
+ args.remove("-v")
+ else:
+ ret = None
+
+ hkey = self.get_typed_key_value(table, args[0], True)
+ rkey = self.get_typed_key_value(table, args[1], False) if len(args) > 1 else None
+
+ item = table.new_item(hash_key=hkey, range_key=rkey)
+
+ for arg in args:
+ item.delete_attribute(arg)
+
+ item = item.save(expected_value=expected, return_values=ret)
+ self.pprint(item)
+
+ if self.consumed:
+ print "consumed units:", item.consumed_units
+
def do_scan(self, line):
"""
scan [:tablename] [--batch=#] [-{max}] [-c] [+filter_attribute:filter_value] [attributes,...]
|
Added new command "rmattr" to remove attributes from an item
|
py
|
diff --git a/treeherder/perf/management/commands/remove_duplicate_signatures.py b/treeherder/perf/management/commands/remove_duplicate_signatures.py
index <HASH>..<HASH> 100644
--- a/treeherder/perf/management/commands/remove_duplicate_signatures.py
+++ b/treeherder/perf/management/commands/remove_duplicate_signatures.py
@@ -119,8 +119,8 @@ class Command(BaseCommand):
(id, platform) in
MachinePlatform.objects.values_list('id', 'platform')
}
- signature_hashes_seen = set()
for project in projects:
+ signature_hashes_seen = set()
self.parent_signature_id_map = {}
self.revised_signature_map = {}
i = 0
|
Bug <I> - Another fix to the signature cleanup script
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -22,7 +22,6 @@ install_reqs = parse_requirements('requirements.txt',
session=False)
# reqs is a list of requirement
-# e.g. ['django==1.5.1', 'mezzanine==1.4.6']
reqs = [str(ir.req) for ir in install_reqs]
setup(
|
removing comment that confuses versioneye
|
py
|
diff --git a/run_deploy_job_wr.py b/run_deploy_job_wr.py
index <HASH>..<HASH> 100755
--- a/run_deploy_job_wr.py
+++ b/run_deploy_job_wr.py
@@ -26,6 +26,7 @@ def main():
'artifacts': {'artifacts': [
'artifacts/machine*/*log*',
'artifacts/*.jenv',
+ 'artifacts/cache.yaml',
]},
'bucket': 'juju-qa-data',
}, config_file)
|
Add cache.yaml to list of files archived to s3.
|
py
|
diff --git a/tornado/ioloop.py b/tornado/ioloop.py
index <HASH>..<HASH> 100644
--- a/tornado/ioloop.py
+++ b/tornado/ioloop.py
@@ -102,7 +102,7 @@ class IOLoop(object):
"""Returns a global IOLoop instance.
Most single-threaded applications have a single, global IOLoop.
- Use this method instead of passing around IOLoop instances
+ Use this method instead of passing around IOLoop instances
throughout your code.
A common pattern for classes that depend on IOLoops is to use
@@ -136,7 +136,7 @@ class IOLoop(object):
self._events.pop(fd, None)
try:
self._impl.unregister(fd)
- except OSError:
+ except (OSError, IOError):
logging.debug("Error deleting fd from IOLoop", exc_info=True)
def start(self):
|
Catch IOError in addition to OSError in IOLoop.remove_handler (friendfeed's epoll module throws OSError, while the one in the standard library throws IOError)
|
py
|
diff --git a/emit/multilang.py b/emit/multilang.py
index <HASH>..<HASH> 100644
--- a/emit/multilang.py
+++ b/emit/multilang.py
@@ -1,6 +1,9 @@
'class to communicate with other languages over stdin/out'
import logging
-import msgpack
+try:
+ import msgpack
+except ImportError:
+ import msgpack_pure
import shlex
from subprocess import Popen, PIPE
|
get the one msgpack reference I missed
|
py
|
diff --git a/angr/storage/paged_memory.py b/angr/storage/paged_memory.py
index <HASH>..<HASH> 100644
--- a/angr/storage/paged_memory.py
+++ b/angr/storage/paged_memory.py
@@ -483,13 +483,12 @@ class SimPagedMemory:
pass
elif isinstance(self._memory_backer, cle.Clemory):
# find permission backer associated with the address
- # fall back to read-write if we can't find any...
- flags = Page.PROT_READ | Page.PROT_WRITE
+ # fall back to default (read-write-maybe-exec) if can't find any
for start, end in self._permission_map:
if start <= new_page_addr < end:
flags = self._permission_map[(start, end)]
+ new_page.permissions = claripy.BVV(flags, 3)
break
- new_page.permissions = claripy.BVV(flags, 3)
# for each clemory backer which intersects with the page, apply its relevant data
for backer_addr, backer in self._memory_backer.backers(new_page_addr):
|
Fix sources of truth for page permissions - fix a rex testcase
|
py
|
diff --git a/core/match_filter.py b/core/match_filter.py
index <HASH>..<HASH> 100644
--- a/core/match_filter.py
+++ b/core/match_filter.py
@@ -469,6 +469,10 @@ def match_filter(template_names, templates, stream, threshold,
# Set up a trace object for the cccsum as this is easier to plot and
# maintins timeing
if plotvar:
+ import matplotlib
+ matplotlib.use('Agg')
+ import matplotlib.pyplot as plt
+ plt.ioff()
stream_plot=copy.deepcopy(stream[0])
# Downsample for plotting
stream_plot.decimate(int(stream[0].stats.sampling_rate/20))
|
Added non-interactive plotting Former-commit-id: <I>d<I>fb<I>adfaae3ea<I>caa2d<I>a8f
|
py
|
diff --git a/molo/core/templatetags/core_tags.py b/molo/core/templatetags/core_tags.py
index <HASH>..<HASH> 100644
--- a/molo/core/templatetags/core_tags.py
+++ b/molo/core/templatetags/core_tags.py
@@ -688,7 +688,11 @@ def get_next_article(context, article):
try:
return next_article.translated_pages.get(language__locale=locale_code)
except:
- return next_article
+ if next_article.language.locale == locale_code or not \
+ SiteSettings.for_site(
+ context['request'].site).show_only_translated_pages:
+ return next_article
+ return None
@register.assignment_tag(takes_context=True)
|
handle show only translated pages for next article section
|
py
|
diff --git a/salt/modules/cp.py b/salt/modules/cp.py
index <HASH>..<HASH> 100644
--- a/salt/modules/cp.py
+++ b/salt/modules/cp.py
@@ -121,7 +121,6 @@ def cache_file(path):
payload['load'] = auth.crypticle.dumps(load)
socket.send_pyobj(payload)
data = auth.crypticle.loads(socket.recv_pyobj())
- print data
if not data:
break
fn_.write(data)
|
Remove print from cp.py
|
py
|
diff --git a/satpy/resample.py b/satpy/resample.py
index <HASH>..<HASH> 100644
--- a/satpy/resample.py
+++ b/satpy/resample.py
@@ -536,9 +536,12 @@ class NativeResampler(BaseResampler):
c_size = max(x[0] for x in d_arr.chunks)
def _calc_chunks(c, c_size):
- return tuple(([c_size] * int(sum(c) // c_size)) +
- [sum(c) % c_size])
- new_chunks = [_calc_chunks(x, c_size // repeats[axis])
+ whole_chunks = [c_size] * int(sum(c) // c_size)
+ remaining = sum(c) - sum(whole_chunks)
+ if remaining:
+ whole_chunks += [remaining]
+ return tuple(whole_chunks)
+ new_chunks = [_calc_chunks(x, int(c_size // repeats[axis]))
for axis, x in enumerate(d_arr.chunks)]
d_arr = d_arr.rechunk(new_chunks)
|
Fix chunk calculations in native resampler for uneven chunk sizes
|
py
|
diff --git a/pnc_cli/buildconfigurations.py b/pnc_cli/buildconfigurations.py
index <HASH>..<HASH> 100644
--- a/pnc_cli/buildconfigurations.py
+++ b/pnc_cli/buildconfigurations.py
@@ -156,13 +156,26 @@ def delete_build_configuration(id=None, name=None):
:param name:
:return:
"""
+
to_delete_id = get_config_id(id, name)
if not to_delete_id:
return
- response = utils.checked_api_call(configs_api, 'delete_specific', id=to_delete_id)
- if response:
- return response.content
+ #ensure that this build configuration is not a dependency of any other build configuration.
+ isDep = False
+ for config in list_build_configurations():
+ dep_ids = [str(val) for val in config.dependency_ids]
+ if dep_ids is not None and to_delete_id in dep_ids:
+ isDep = True
+ logging.error("BuildConfiguration ID {} is a dependency of BuildConfiguration {}.".format(to_delete_id, config.name))
+
+
+ if not isDep:
+ response = utils.checked_api_call(configs_api, 'delete_specific', id=to_delete_id)
+ if response:
+ return response.content
+ else:
+ logging.warn("No action taken.")
@arg("name", help="Name for the new BuildConfiguration.")
|
NCL-<I> Adding in a check for a BuildConfiguration being a dependency of another when attempting to delete. This check will need to be refined once there is a better way to retrieve ALL BuildConfigurations
|
py
|
diff --git a/stl/__about__.py b/stl/__about__.py
index <HASH>..<HASH> 100644
--- a/stl/__about__.py
+++ b/stl/__about__.py
@@ -1,6 +1,6 @@
__package_name__ = 'numpy-stl'
__import_name__ = 'stl'
-__version__ = '2.12.0'
+__version__ = '2.13.0'
__author__ = 'Rick van Hattem'
__author_email__ = '[email protected]'
__description__ = ' '.join('''
|
Incrementing version to <I>
|
py
|
diff --git a/examples/fp.py b/examples/fp.py
index <HASH>..<HASH> 100644
--- a/examples/fp.py
+++ b/examples/fp.py
@@ -47,10 +47,12 @@ factor = @ _ factor mk_map
primary = decimal mk_aref
| ~ _ integer mk_literal
| name mk_call
- | ([<=*+-]) _ mk_op
+ | ([<=>*+-]) !opchar _ mk_op
| \[ _ list \] _ mk_list
| \( _ exp \) _
+opchar = [@/\\?<=>*+-]
+
list = exp , _ list
| exp
|
@@ -117,8 +119,7 @@ iszero == [id, ~0] =.
divisible == mod iszero.
euler1 == iota ?([[id, ~3] divisible, [id, ~5] divisible] or) /+.
-"""
-notyet = r"""
+
sort == [length, ~2] < -> id;
[1, id] distl [?< @2 sort, ?= @2, ?> @2 sort] concat.
"""
|
fix: parser conflict for - and ->; missing > operator
|
py
|
diff --git a/eqcorrscan/core/match_filter.py b/eqcorrscan/core/match_filter.py
index <HASH>..<HASH> 100644
--- a/eqcorrscan/core/match_filter.py
+++ b/eqcorrscan/core/match_filter.py
@@ -666,8 +666,7 @@ def match_filter(template_names, template_list, st, threshold,
if (tr.stats.station, tr.stats.channel) not in chans[i]:
continue
else:
- pick_tm = detecttime + (tr.stats.starttime -
- detecttime)
+ pick_tm = detecttime #should this pick time be just the detect time
wv_id = WaveformStreamID(network_code=tr.stats.network,
station_code=tr.stats.station,
channel_code=tr.stats.channel)
|
pick time to output to catalog Should the pick time be just the detect time and not detecttime + template start time - detect time (which I think is just template start time)? Since plotting the events in catalog will trim to around the pick time. So choosing the template start time as pick time doesn't cut the stream around where the actual detection was?
|
py
|
diff --git a/custodian/qchem/jobs.py b/custodian/qchem/jobs.py
index <HASH>..<HASH> 100644
--- a/custodian/qchem/jobs.py
+++ b/custodian/qchem/jobs.py
@@ -278,7 +278,7 @@ class QchemJob(Job):
out_file_object.writelines(header_lines)
out_file_object.writelines(sub_out)
if rc < 0 and rc != -99999:
- out_file_object.writelines(["Application {} exit codes: {}\n".format(qc_jobid, rc)])
+ out_file_object.writelines(["Application {} exit codes: {}\n".format(qc_jobid, rc), '\n', '\n'])
if log_file_object:
with open(sub_log_filename) as sub_log_file_object:
sub_log = sub_log_file_object.readlines()
|
append multiple blank lines after exit code
|
py
|
diff --git a/glue_vispy_viewers/scatter/layer_style_widget.py b/glue_vispy_viewers/scatter/layer_style_widget.py
index <HASH>..<HASH> 100644
--- a/glue_vispy_viewers/scatter/layer_style_widget.py
+++ b/glue_vispy_viewers/scatter/layer_style_widget.py
@@ -28,7 +28,7 @@ class ScatterLayerStyleWidget(QtGui.QWidget):
size_vmin = FloatLineProperty('ui.value_size_vmin')
size_vmax = FloatLineProperty('ui.value_size_vmax')
size = FloatLineProperty('ui.value_fixed_size')
- size_scaling = ValueProperty('ui.slider_size_scaling')
+ size_scaling = ValueProperty('ui.slider_size_scaling', value_range=(0.1, 10), log=True)
# Color-related GUI elements
color_mode = CurrentComboTextProperty('ui.combo_color_mode')
|
Make use of log= option in ValueProperty for size_scaling
|
py
|
diff --git a/qface/generator.py b/qface/generator.py
index <HASH>..<HASH> 100644
--- a/qface/generator.py
+++ b/qface/generator.py
@@ -56,8 +56,8 @@ class Generator(object):
logger.info('write {0}'.format(path))
data = self.render(template, context)
if self.hasDifferentContent(data, path):
- if not preserve and path.exists():
- print('skip changed file: {0}'.format(path))
+ if path.exists() and preserve:
+ print('preserve changed file: {0}'.format(path))
else:
print('write changed file: {0}'.format(path))
path.open('w').write(data)
|
fixes logic issue around preserving changed file flag
|
py
|
diff --git a/docker/api/secret.py b/docker/api/secret.py
index <HASH>..<HASH> 100644
--- a/docker/api/secret.py
+++ b/docker/api/secret.py
@@ -2,12 +2,13 @@ import base64
import six
+from .. import errors
from .. import utils
class SecretApiMixin(object):
@utils.minimum_version('1.25')
- def create_secret(self, name, data, labels=None):
+ def create_secret(self, name, data, labels=None, driver=None):
"""
Create a secret
@@ -15,6 +16,8 @@ class SecretApiMixin(object):
name (string): Name of the secret
data (bytes): Secret data to be stored
labels (dict): A mapping of labels to assign to the secret
+ driver (DriverConfig): A custom driver configuration. If
+ unspecified, the default ``internal`` driver will be used
Returns (dict): ID of the newly created secret
"""
@@ -30,6 +33,14 @@ class SecretApiMixin(object):
'Labels': labels
}
+ if driver is not None:
+ if utils.version_lt(self._version, '1.31'):
+ raise errors.InvalidVersion(
+ 'Secret driver is only available for API version > 1.31'
+ )
+
+ body['Driver'] = driver
+
url = self._url('/secrets/create')
return self._result(
self._post_json(url, data=body), True
|
Add support for secret driver in create_secret
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.