diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/yarl/__init__.py b/yarl/__init__.py index <HASH>..<HASH> 100644 --- a/yarl/__init__.py +++ b/yarl/__init__.py @@ -10,7 +10,7 @@ import idna from .quoting import _Quoter, _Unquoter -__version__ = '1.2.5' +__version__ = '1.2.5a0' __all__ = ('URL',)
Bump to <I>a0
py
diff --git a/processors/generic_processor.py b/processors/generic_processor.py index <HASH>..<HASH> 100644 --- a/processors/generic_processor.py +++ b/processors/generic_processor.py @@ -809,7 +809,10 @@ class GenericCommitHandler(processor.CommitHandler): self.cache_mgr._rename_path(old_path, new_path) def deleteall_handler(self, filecmd): - raise NotImplementedError(self.deleteall_handler) + self.debug("deleting all files (and also all directories)") + for path, fileid in self.cache_mgr.file_ids.items(): + del self.inventory[fileid] + self.cache_mgr._delete_path(path) def bzr_file_id_and_new(self, path): """Get a Bazaar file identifier and new flag for a path.
Implement the 'deleteall' command.
py
diff --git a/stellar_base/builder.py b/stellar_base/builder.py index <HASH>..<HASH> 100644 --- a/stellar_base/builder.py +++ b/stellar_base/builder.py @@ -96,6 +96,7 @@ class Builder(object): def append_path_payment_op(self, destination, send_code, send_issuer, send_max, dest_code, dest_issuer, dest_amount, path, source=None): # path: a list of asset tuple which contains code and issuer, [(code,issuer),(code,issuer)] + # for native asset you can delivery ('xlm','') send_asset = Asset(send_code, send_issuer) dest_asset = Asset(dest_code, dest_issuer) @@ -112,7 +113,7 @@ class Builder(object): 'dest_amount': str(dest_amount), 'path': assets } - op = Payment(opts) + op = PathPayment(opts) self.append_op(op) def append_allow_trust_op(self, trustor, asset_code, authorize, source=None):
[critical] fix pathpayment in builder
py
diff --git a/arcrest/gptypes.py b/arcrest/gptypes.py index <HASH>..<HASH> 100644 --- a/arcrest/gptypes.py +++ b/arcrest/gptypes.py @@ -29,7 +29,7 @@ class GPMultiValue(object): return cls @property def _json_struct(self): - return [x._json_struct for x in self._values] + return [getattr(x, '_json_struct', x) for x in self._values] @classmethod def fromJson(cls, val): return cls(val)
Allow bare values in multivalue (such as float)
py
diff --git a/nats/aio/client.py b/nats/aio/client.py index <HASH>..<HASH> 100644 --- a/nats/aio/client.py +++ b/nats/aio/client.py @@ -49,7 +49,7 @@ INBOX_PREFIX_LEN = len(INBOX_PREFIX) + 22 + 1 DEFAULT_PENDING_SIZE = 1024 * 1024 DEFAULT_BUFFER_SIZE = 32768 DEFAULT_RECONNECT_TIME_WAIT = 2 # in seconds -DEFAULT_MAX_RECONNECT_ATTEMPTS = 10 +DEFAULT_MAX_RECONNECT_ATTEMPTS = 60 DEFAULT_PING_INTERVAL = 120 # in seconds DEFAULT_MAX_OUTSTANDING_PINGS = 2 DEFAULT_MAX_PAYLOAD_SIZE = 1048576
Set default max reconnects to <I> as in Go client
py
diff --git a/dockerpty/pty.py b/dockerpty/pty.py index <HASH>..<HASH> 100644 --- a/dockerpty/pty.py +++ b/dockerpty/pty.py @@ -16,6 +16,7 @@ import sys import signal +from ssl import SSLError import dockerpty.io as io import dockerpty.tty as tty @@ -226,5 +227,9 @@ class PseudoTerminal(object): self.resize() while True: _ready = io.select(pumps, timeout=60) - if all([p.flush() is None for p in pumps]): - break + try: + if all([p.flush() is None for p in pumps]): + break + except SSLError as e: + if 'The operation did not complete' not in e.strerror: + raise e
Catch errors caused by SSL sockets not having data ready Just because `select()` has returned, doesn't mean the socket has actual data ready: <URL>
py
diff --git a/pyvisa-py/highlevel.py b/pyvisa-py/highlevel.py index <HASH>..<HASH> 100644 --- a/pyvisa-py/highlevel.py +++ b/pyvisa-py/highlevel.py @@ -312,7 +312,7 @@ class PyVisaLibrary(highlevel.VisaLibraryBase): if ret[1] < 0: raise errors.VisaIOError(ret[1]) - return ret[0] # return number of bytes written + return ret def get_attribute(self, session, attribute): """Retrieves the state of an attribute.
write() returns bytes written & status code
py
diff --git a/delphi/AnalysisGraph.py b/delphi/AnalysisGraph.py index <HASH>..<HASH> 100644 --- a/delphi/AnalysisGraph.py +++ b/delphi/AnalysisGraph.py @@ -206,10 +206,6 @@ class AnalysisGraph(nx.DiGraph): ) ).resample(res)[0] - # Algorithm: - # for each pair of nodes i, j in the graph: - # for each path in the set of shortest simple paths between i and j: - # p <- product of betas for each link along the path. for e in self.edges(data=True): e[2]["ConditionalProbability"] = constructConditionalPDF(gb, rs, e) e[2]["betas"] = np.tan(
Removed comment about algorithms in AnalysisGraph.py
py
diff --git a/pandas/io/tests/test_common.py b/pandas/io/tests/test_common.py index <HASH>..<HASH> 100644 --- a/pandas/io/tests/test_common.py +++ b/pandas/io/tests/test_common.py @@ -1,7 +1,6 @@ """ Tests for the pandas.io.common functionalities """ -import nose import mmap import os from os.path import isabs @@ -98,15 +97,18 @@ class TestMMapWrapper(tm.TestCase): 'test_mmap.csv') def test_constructor_bad_file(self): - if is_platform_windows(): - raise nose.SkipTest("skipping construction error messages " - "tests on windows") - non_file = StringIO('I am not a file') non_file.fileno = lambda: -1 - msg = "Invalid argument" - tm.assertRaisesRegexp(mmap.error, msg, common.MMapWrapper, non_file) + # the error raised is different on Windows + if is_platform_windows(): + msg = "The parameter is incorrect" + err = OSError + else: + msg = "Invalid argument" + err = mmap.error + + tm.assertRaisesRegexp(err, msg, common.MMapWrapper, non_file) target = open(self.mmap_file, 'r') target.close()
TST: Fix MMapWrapper init test for Windows Turns out Windows errors differently when an invalid `fileno` is passed into the `mmap` constructor, so there's no need to skip the test (xref: <I>b<I>).
py
diff --git a/tpot/gp_deap.py b/tpot/gp_deap.py index <HASH>..<HASH> 100644 --- a/tpot/gp_deap.py +++ b/tpot/gp_deap.py @@ -80,16 +80,9 @@ def varOr(population, toolbox, lambda_, cxpb, mutpb): for _ in range(lambda_): op_choice = np.random.random() if op_choice < cxpb: # Apply crossover - - idxs = np.random.choice(crossover_eligible_individuals, size=2, replace=False) - ind1, ind2 = toolbox.clone(population[idxs[0]]), toolbox.clone(population[idxs[1]]) - ind_str = str(ind1) - num_loop = 0 - while ind_str == str(ind1) and num_loop < MAX_MUT_LOOPS: - ind1, ind2 = toolbox.mate(ind1, ind2) - num_loop += 1 - if ind_str != str(ind1): # check if crossover happened - del ind1.fitness.values + idxs = np.random.choice(crossover_eligible_individuals, size=2, replace=False) + ind1, _ = toolbox.mate(ind1, ind2) + del ind1.fitness.values offspring.append(ind1) elif op_choice < cxpb + mutpb: # Apply mutation idx = np.random.randint(0, len(population))
Refactored varOr crossover part, since now our crossover function (1) already guarantees a unique new individual, if possible within <I> iterations and (2) for this purpose also copies internally the individuals (something which should have been done in the previous while loop instead).
py
diff --git a/insights/specs/default.py b/insights/specs/default.py index <HASH>..<HASH> 100644 --- a/insights/specs/default.py +++ b/insights/specs/default.py @@ -669,6 +669,7 @@ class DefaultSpecs(Specs): xinetd_conf = glob_file(["/etc/xinetd.conf", "/etc/xinetd.d/*"]) yum_conf = simple_file("/etc/yum.conf") yum_list_available = simple_command("yum -C --noplugins list available") + yum_log = simple_file("/var/log/yum.log") yum_repolist = simple_command("/usr/bin/yum -C --noplugins repolist") yum_repos_d = glob_file("/etc/yum.repos.d/*.repo") zipl_conf = simple_file("/etc/zipl.conf")
Add item yum_log (#<I>)
py
diff --git a/billy/importers/filters.py b/billy/importers/filters.py index <HASH>..<HASH> 100644 --- a/billy/importers/filters.py +++ b/billy/importers/filters.py @@ -2,6 +2,13 @@ import re import importlib +def filter_by_array(filter_array, obj): + for fltr in filter_array: + for key in filter_array[fltr]: + obj = filter_object(fltr, key, obj) + return obj + + def filter_object(filter_path, object_path, obj): module, func = filter_path.rsplit(".", 1) mod = importlib.import_module(module) @@ -14,7 +21,10 @@ def run_filter(fltr, object_path, obj): root, new_path = object_path.split(".", 1) obj[root] = run_filter(fltr, new_path, obj[root]) return obj - fltr_obj = obj[object_path] + try: + fltr_obj = obj[object_path] + except KeyError: + return obj # Eek, bad object path. Bail. if isinstance(fltr_obj, basestring): obj[object_path] = fltr(fltr_obj)
Adding a catch around the object_path assignment
py
diff --git a/sphinx_autodoc_annotation.py b/sphinx_autodoc_annotation.py index <HASH>..<HASH> 100644 --- a/sphinx_autodoc_annotation.py +++ b/sphinx_autodoc_annotation.py @@ -12,7 +12,11 @@ def get_class_link(obj): return None def add_annotation_content(documenter): - sig = inspect.signature(documenter.object) + try: + sig = inspect.signature(documenter.object) + except ValueError: + # Can't extract signature, do nothing + return for param in sig.parameters.values(): arg_link = get_class_link(param.annotation) if arg_link:
Don't choke on objects with no signature
py
diff --git a/buildbot/status/builder.py b/buildbot/status/builder.py index <HASH>..<HASH> 100644 --- a/buildbot/status/builder.py +++ b/buildbot/status/builder.py @@ -2023,6 +2023,7 @@ class SlaveStatus: admin = None host = None + version = None connected = False graceful_shutdown = False
Make sure version is always defined in SlaveStatus, just like host and admin.
py
diff --git a/examples/json.py b/examples/json.py index <HASH>..<HASH> 100644 --- a/examples/json.py +++ b/examples/json.py @@ -50,8 +50,9 @@ def test(): "int": 1, "string": "hello", "a list": [1, 2, 3], - "escapes": "\n", - "nested": {"x": "y"} + "escapes": "\n \u24D2", + "nested": {"x": "y"}, + "other": [true, false, null] } """ ) @@ -59,8 +60,9 @@ def test(): "int": 1, "string": "hello", "a list": [1, 2, 3], - "escapes": "\n", + "escapes": "\n ⓒ", "nested": {"x": "y"}, + "other": [True, False, None], } )
Expanded tests for JSON example.
py
diff --git a/resolwe/flow/migrations/0028_add_data_location.py b/resolwe/flow/migrations/0028_add_data_location.py index <HASH>..<HASH> 100644 --- a/resolwe/flow/migrations/0028_add_data_location.py +++ b/resolwe/flow/migrations/0028_add_data_location.py @@ -5,7 +5,7 @@ from __future__ import unicode_literals import os from django.conf import settings -from django.db import migrations, models, transaction +from django.db import connection, migrations, models, transaction import django.db.models.deletion @@ -21,6 +21,12 @@ def set_data_location(apps, schema_editor): data_location = DataLocation.objects.create(id=data.id, subpath=str(data.id)) data_location.data.add(data) + # Increment DataLocation id's sequence + if DataLocation.objects.exists(): + max_id = DataLocation.objects.order_by('id').last().id + with connection.cursor() as cursor: + cursor.execute("ALTER SEQUENCE flow_datalocation_id_seq RESTART WITH %s;", [str(max_id + 1)]) + class Migration(migrations.Migration):
Increment DataLocation sequence in migration
py
diff --git a/txaws/service.py b/txaws/service.py index <HASH>..<HASH> 100644 --- a/txaws/service.py +++ b/txaws/service.py @@ -52,7 +52,7 @@ class AWSServiceEndpoint(object): uri = "%s:%s" % (uri, self.port) return uri + self.path - +# XXX needs tests! class AWSServiceRegion(object): """ This object represents a collection of client factories that use the same
Added a TODO comment for tests.
py
diff --git a/sh.py b/sh.py index <HASH>..<HASH> 100644 --- a/sh.py +++ b/sh.py @@ -629,7 +629,7 @@ STDERR = -2 # Process open = Popen # Open Process = OProc class OProc(object): - _procs_to_cleanup = [] + _procs_to_cleanup = set() _registered_cleanup = False _default_window_size = (24, 80) @@ -773,7 +773,7 @@ class OProc(object): if stderr is not STDOUT: os.close(self._slave_stderr_fd) if logging_enabled: self.log.debug("started process") - if not persist: OProc._procs_to_cleanup.append(self) + if not persist: OProc._procs_to_cleanup.add(self) if self.call_args["tty_in"]: @@ -924,7 +924,6 @@ class OProc(object): def _cleanup_procs(): for proc in OProc._procs_to_cleanup: proc.kill() - proc.wait() def _handle_exit_code(self, exit_code): @@ -982,6 +981,8 @@ class OProc(object): self._input_thread.join() self._output_thread.join() + OProc._procs_to_cleanup.discard(self) + return self.exit_code
using set instead of list for processes to cleanup
py
diff --git a/amino/maybe.py b/amino/maybe.py index <HASH>..<HASH> 100644 --- a/amino/maybe.py +++ b/amino/maybe.py @@ -1,5 +1,4 @@ -from typing import TypeVar, Generic, Callable, Union, Any -from typing import Tuple # NOQA +from typing import TypeVar, Generic, Callable, Union, Any, cast from functools import wraps, partial from operator import eq, is_not import inspect @@ -7,7 +6,7 @@ import traceback from amino import boolean from amino.tc.base import Implicits -from amino.func import call_by_name, I +from amino.func import call_by_name, I, curried A = TypeVar('A') B = TypeVar('B') @@ -50,6 +49,16 @@ class Maybe(Generic[A], Implicits, implicits=True): def getattr(obj, attr): return Maybe.check(getattr(obj, attr, None)) + @staticmethod + @curried + def iff(cond: bool, a: Union[A, Callable[[], A]]) -> 'Maybe[A]': + return cast(Maybe, Just(call_by_name(a))) if cond else Empty() + + @staticmethod + @curried + def iff_m(cond: bool, a: Union[A, Callable[[], 'Maybe[A]']]) -> 'Maybe[A]': + return cast(Maybe, call_by_name(a)) if cond else Empty() + @property def _get(self) -> Union[A, None]: pass
`Maybe.iff{,_m}`
py
diff --git a/cassandra/metadata.py b/cassandra/metadata.py index <HASH>..<HASH> 100644 --- a/cassandra/metadata.py +++ b/cassandra/metadata.py @@ -924,7 +924,7 @@ class Token(object): return self.value == other.value def __hash__(self): - return self.value + return hash(self.value) def __repr__(self): return "<%s: %r>" % (self.__class__.__name__, self.value)
Fix Token's __hash__ method so that BytesToken instances to be added to token_to_host_owner (currently the driver is broken on ByteOrderedPartitioner)
py
diff --git a/tornado/queues.py b/tornado/queues.py index <HASH>..<HASH> 100644 --- a/tornado/queues.py +++ b/tornado/queues.py @@ -232,6 +232,16 @@ class Queue(Generic[_T]): scale as `tornado.ioloop.IOLoop.time`, normally `time.time`), or a `datetime.timedelta` object for a deadline relative to the current time. + + .. note:: + + The ``timeout`` argument of this method differs from that + of the standard library's `queue.Queue.get`. That method + interprets numeric values as relative timeouts; this one + interprets them as absolute deadlines and requires + ``timedelta`` objects for relative timeouts (consistent + with other timeouts in Tornado). + """ future = Future() # type: Future[_T] try:
queues: Add a note to Queue.get about difference from stdlib Tornado's interpretation of the timeout argument differs from the stdlib's queue. Fixes #<I>
py
diff --git a/tools/run_tests/run_tests.py b/tools/run_tests/run_tests.py index <HASH>..<HASH> 100755 --- a/tools/run_tests/run_tests.py +++ b/tools/run_tests/run_tests.py @@ -511,7 +511,7 @@ class PythonLanguage(object): config.run, timeout_seconds=5*60, environ=dict(list(environment.items()) + - [('GRPC_PYTHON_TESTRUNNER_FILTER', suite_name)]), + [('GRPC_PYTHON_TESTRUNNER_FILTER', str(suite_name))]), shortname='%s.test.%s' % (config.name, suite_name),) for suite_name in tests_json for config in self.pythons]
Fix test runner failures for Python on Windows
py
diff --git a/json2html/jsonconv.py b/json2html/jsonconv.py index <HASH>..<HASH> 100644 --- a/json2html/jsonconv.py +++ b/json2html/jsonconv.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + ''' JSON 2 HTML convertor ===================== @@ -12,7 +14,6 @@ Michel Müller 2015-1-31: Added bootstrap option, converting table-like JSON LICENSE: MIT -------- ''' -# -*- coding: utf-8 -*- import json import ordereddict
moved the coding hashbang to the top
py
diff --git a/quantrisk/timeseries.py b/quantrisk/timeseries.py index <HASH>..<HASH> 100644 --- a/quantrisk/timeseries.py +++ b/quantrisk/timeseries.py @@ -615,9 +615,12 @@ def extract_interesting_date_ranges(df_rets): df_rets_dupe.index = df_rets_dupe.index.map(pd.Timestamp) ranges = OrderedDict() for name, (start, end) in periods.iteritems(): - period = df_rets_dupe.loc[start:end] - if len(period) == 0: + try: + period = df_rets_dupe.loc[start:end] + if len(period) == 0: + continue + ranges[name] = period + except: continue - ranges[name] = period return ranges
BUG Added try/except to pass over out of bounds date regions df_rets_dupe.loc[start:end] was giving me an error on my local machine when the date was out of bounds. This solves the problem for the time being.
py
diff --git a/did/plugins/jira.py b/did/plugins/jira.py index <HASH>..<HASH> 100644 --- a/did/plugins/jira.py +++ b/did/plugins/jira.py @@ -161,6 +161,11 @@ class JiraStats(StatsGroup): raise ReportError( "No Jira url set in the [{0}] section".format(option)) self.url = config["url"].rstrip("/") + # Optional SSO url + if "sso_url" in config: + self.sso_url = config["sso_url"] + else: + self.sso_url = self.url + "/step-auth-gss" # Make sure we have project set if "project" not in config: raise ReportError( @@ -193,5 +198,6 @@ class JiraStats(StatsGroup): urllib2.HTTPRedirectHandler, urllib2.HTTPCookieProcessor(cookie), urllib2_kerberos.HTTPKerberosAuthHandler) - self._session.open(self.url + "/step-auth-gss") + log.debug(u"Connecting to {0}".format(self.sso_url)) + self._session.open(self.sso_url) return self._session
enabling loging into JBoss Jira self.url + "/step-auth-gss" returns <I> on issues.jboss.org, so optional sso_url parameter was added to Jira plugin for jboss.org, it can be configured as follows: [jboss.org] type = jira ... sso_url = <URL>
py
diff --git a/cleverhans/utils_tf.py b/cleverhans/utils_tf.py index <HASH>..<HASH> 100644 --- a/cleverhans/utils_tf.py +++ b/cleverhans/utils_tf.py @@ -81,11 +81,6 @@ def tf_model_train(sess, x, y, predictions, X_train, Y_train, save=False, prev = time.time() for batch in range(nb_batches): - if batch % 100 == 0 and batch > 0: - print("Batch " + str(batch)) - cur = time.time() - print("\tTook " + str(cur - prev) + " seconds") - prev = cur # Compute batch start and end indices start, end = batch_indices(batch, len(X_train), FLAGS.batch_size) @@ -95,6 +90,9 @@ def tf_model_train(sess, x, y, predictions, X_train, Y_train, save=False, y: Y_train[start:end], keras.backend.learning_phase(): 1}) assert end >= len(X_train) # Check that all examples were used + cur = time.time() + print("\tEpoch took " + str(cur - prev) + " seconds") + prev = cur if evaluate is not None: evaluate()
less frequent printing now that training is faster
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,22 +1,20 @@ #!/usr/bin/env python try: - raise ImportError from setuptools import setup except ImportError: from distutils.core import setup from sys import version_info -# First attempt to generate extension source with cffi, -# then fallback to already generated source. try: from librtmp_ffi.verifier import verifier extension = verifier.get_extension() except ImportError: - from distutils.extension import Extension - extension = Extension("_binding", ["librtmp_ffi/__pycache__/_binding.c"], - libraries=["rtmp"]) + import sys + sys.stderr.write("Error: CFFI (required for setup) is not available.\n") + sys.stderr.write("Please use 'pip install cffi', or equivalent.\n") + sys.exit(1) install_requires = ["cffi>=0.6"]
setup.py: Remove the use of pre-generated _binding.c.
py
diff --git a/vex/main.py b/vex/main.py index <HASH>..<HASH> 100644 --- a/vex/main.py +++ b/vex/main.py @@ -2,6 +2,7 @@ """ import sys import os +import shutil from vex import config from vex.options import get_options from vex.run import get_environ, run @@ -161,6 +162,12 @@ def _main(environ, argv): make_path = os.path.abspath(os.path.join(ve_base, ve_name)) if options.python is None: options.python = vexrc.get_default_python(environ) + if options.python and hasattr(shutil, "which"): + if not shutil.which(options.python): + raise exceptions.InvalidVirtualenv( + "the python specified in vexrc isn't executable: " + "{!r}".format(options.python) + ) handle_make(environ, options, make_path) ve_path = make_path elif options.path:
pre-emptively warn if vexrc python looks like it won't run otherwise virtualenv generates what would be a confusing error, e.g.: 'The path squabbles (from --python=squabbles) does not exist'
py
diff --git a/luigi/worker.py b/luigi/worker.py index <HASH>..<HASH> 100644 --- a/luigi/worker.py +++ b/luigi/worker.py @@ -785,7 +785,7 @@ class Worker(object): if isinstance(dependency, Target): raise Exception('requires() can not return Target objects. Wrap it in an ExternalTask class') elif not isinstance(dependency, Task): - raise Exception('requires() must return Task objects') + raise Exception('requires() must return Task objects but {} is a {}'.format(dependency, type(dependency))) def _check_complete_value(self, is_complete): if is_complete not in (True, False):
More verbose exceptions for mistyped depenencies (#<I>)
py
diff --git a/visidata/mainloop.py b/visidata/mainloop.py index <HASH>..<HASH> 100644 --- a/visidata/mainloop.py +++ b/visidata/mainloop.py @@ -133,6 +133,10 @@ def mainloop(self, scr): return sheet = self.activeSheet + + if not sheet: + continue + threading.current_thread().sheet = sheet vd.drawThread = threading.current_thread()
[mainloop-] hold progression to sheet display until sheet is loaded
py
diff --git a/tests/performanceplatform/collector/webtrends/test_reports.py b/tests/performanceplatform/collector/webtrends/test_reports.py index <HASH>..<HASH> 100644 --- a/tests/performanceplatform/collector/webtrends/test_reports.py +++ b/tests/performanceplatform/collector/webtrends/test_reports.py @@ -37,7 +37,7 @@ class TestCollector(unittest.TestCase): ".reports.requests_with_backoff.get") def test_collect_parse_and_push(self, mock_get, mock_post): mock_get().json.return_value = get_fake_response() - query = {'frequency': 'daily', 'report_id': 'whoop'} + query = {'report_id': 'whoop'} options = { 'row_type_name': 'browser', 'mappings': {'Visits': 'visitors'}, @@ -76,12 +76,6 @@ class TestCollector(unittest.TestCase): ] mock_post.assert_called_once_with(posted_data, chunk_size=100) - @patch( - "performanceplatform.collector.webtrends" - ".reports.requests_with_backoff.get") - def test_collect_when_specified_start_and_end_and_hourly(self, mock_get): - pass - @patch("performanceplatform.collector.webtrends" ".reports.requests_with_backoff.get") def test_collect_when_specified_start_and_end_and_daily(self, mock_get):
Remove unnecessary code from tests - As far as I can tell having `frequency` in the query does nothing - There's an empty test - I can't figure out whether this test is needed or not based on its name
py
diff --git a/werkzeug/datastructures.py b/werkzeug/datastructures.py index <HASH>..<HASH> 100644 --- a/werkzeug/datastructures.py +++ b/werkzeug/datastructures.py @@ -9,6 +9,7 @@ :license: BSD, see LICENSE for more details. """ import re +import sys import codecs import mimetypes from itertools import repeat @@ -2461,7 +2462,8 @@ class FileStorage(object): # This might not be if the name attribute is bytes due to the # file being opened from the bytes API. if not PY2 and isinstance(filename, bytes): - filename = filename.decode('utf-8', 'replace') + filename = filename.decode(sys.getfilesystemencoding(), + 'replace') self.filename = filename if headers is None:
utf-8 to filesystem encoding
py
diff --git a/__init__.py b/__init__.py index <HASH>..<HASH> 100644 --- a/__init__.py +++ b/__init__.py @@ -9,18 +9,30 @@ global tzinfo tzinfo = pytz.UTC month_to_number = { + "Jan": 1, "January": 1, + "Feb": 2, "Febuary": 2, "February": 2, + "Mar": 3, "March": 3, + "Apr": 4, "April": 4, "May": 5, + "Jun": 6, "June": 6, + "Jul": 7, "July": 7, + "Aug": 8, "August": 8, + "Sep": 9, + "Sept": 9, "September": 9, + "Oct": 10, "October": 10, + "Nov": 11, "November": 11, + "Dec": 12, "December": 12 }
added abbreviated months to dict
py
diff --git a/ledgerblue/comm.py b/ledgerblue/comm.py index <HASH>..<HASH> 100644 --- a/ledgerblue/comm.py +++ b/ledgerblue/comm.py @@ -120,7 +120,7 @@ class HIDDongleHIDAPI(Dongle, DongleWait): response = result[dataStart : dataLength + dataStart] if self.debug: print("HID <= %s%.2x" % (hexstr(response), sw)) - if sw != 0x9000: + if sw != 0x9000 and ((sw >> 8) != 0x61): possibleCause = "Unknown reason" if sw == 0x6982: possibleCause = "Have you uninstalled the existing CA with resetCustomCA first?"
Also accept the generic status <I>xx
py
diff --git a/quickcache/quickcache.py b/quickcache/quickcache.py index <HASH>..<HASH> 100644 --- a/quickcache/quickcache.py +++ b/quickcache/quickcache.py @@ -119,6 +119,8 @@ class QuickCache(object): elif isinstance(value, set): return 'S' + self._hash( ','.join(sorted(map(self._serialize_for_key, value)))) + elif value is None: + return 'N' else: raise ValueError('Bad type "{}": {}'.format(type(value), value))
make sure quickcache can accept None as an arg
py
diff --git a/seleniumbase/console_scripts/rich_helper.py b/seleniumbase/console_scripts/rich_helper.py index <HASH>..<HASH> 100755 --- a/seleniumbase/console_scripts/rich_helper.py +++ b/seleniumbase/console_scripts/rich_helper.py @@ -37,7 +37,9 @@ def display_code(code): def fix_emoji_spacing(code): try: # Fix the display width of certain emojis that take up two spaces - double_width_emojis = ["🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️"] + double_width_emojis = [ + "🗺️", "🖼️", "🗄️", "⏺️", "♻️", "🗂️", "🖥️", "🕹️", "🎞️" + ] for emoji in double_width_emojis: if emoji in code: code = code.replace(emoji, emoji + " ")
Update double_width_emojis list to improve "rich" printing
py
diff --git a/django_webtest_tests/testapp_tests/tests.py b/django_webtest_tests/testapp_tests/tests.py index <HASH>..<HASH> 100644 --- a/django_webtest_tests/testapp_tests/tests.py +++ b/django_webtest_tests/testapp_tests/tests.py @@ -235,7 +235,7 @@ class TestSession(WebTest): def test_session_not_set(self): response = self.app.get('/') self.assertEqual(response.status_int, 200) - self.assertEquals({}, self.app.session) + self.assertEqual({}, self.app.session) def test_sessions_disabled(self): from django.conf import settings @@ -246,8 +246,8 @@ class TestSession(WebTest): response = self.app.get('/') self.assertEqual(response.status_int, 200) - self.assertEquals({}, self.app.session) + self.assertEqual({}, self.app.session) def test_session_not_empty(self): response = self.app.get(reverse('set_session')) - self.assertEquals('foo', self.app.session['test']) + self.assertEqual('foo', self.app.session['test'])
use assertEqual instead of deprecated assertEquals
py
diff --git a/tests/test_hub.py b/tests/test_hub.py index <HASH>..<HASH> 100644 --- a/tests/test_hub.py +++ b/tests/test_hub.py @@ -9,6 +9,7 @@ from __future__ import absolute_import, print_function import os +import sys import gc import time import signal @@ -44,6 +45,10 @@ class TestHub(UnitTest): def test_sigint(self): # The Hub should exit on CTRL-C (SIGINT). + # On windows, sending SIGINT kills all processes attached to a console, + # including the test driver. + if sys.platform.startswith('win'): + raise SkipTest('test skipped on Windows') def send_sigint(): time.sleep(0.01) os.kill(os.getpid(), signal.SIGINT)
Don't run SIGINT test on Windows.
py
diff --git a/flask_sqlalchemy/__init__.py b/flask_sqlalchemy/__init__.py index <HASH>..<HASH> 100644 --- a/flask_sqlalchemy/__init__.py +++ b/flask_sqlalchemy/__init__.py @@ -805,10 +805,13 @@ class SQLAlchemy(object): of an application not initialized that way or connections will leak. """ - if app.config.get('SQLALCHEMY_DATABASE_URI') is None: - warnings.warn(UserWarning('SQLALCHEMY_DATABASE_URI not set. Defaulting to sqlite.')) + if 'SQLALCHEMY_DATABASE_URI' not in app.config: + warnings.warn( + 'SQLALCHEMY_DATABASE_URI not set. Defaulting to ' + '"sqlite:///:memory:".' + ) - app.config.setdefault('SQLALCHEMY_DATABASE_URI', 'sqlite://') + app.config.setdefault('SQLALCHEMY_DATABASE_URI', 'sqlite:///:memory:') app.config.setdefault('SQLALCHEMY_BINDS', None) app.config.setdefault('SQLALCHEMY_NATIVE_UNICODE', None) app.config.setdefault('SQLALCHEMY_ECHO', False)
be explicit about sqlite memory uri
py
diff --git a/awkward/version.py b/awkward/version.py index <HASH>..<HASH> 100644 --- a/awkward/version.py +++ b/awkward/version.py @@ -30,7 +30,7 @@ import re -__version__ = "0.2.0" +__version__ = "0.2.1" version = __version__ version_info = tuple(re.split(r"[-\.]", __version__))
test pushing to finish-implementation branch
py
diff --git a/pyuploadcare/resources/file.py b/pyuploadcare/resources/file.py index <HASH>..<HASH> 100644 --- a/pyuploadcare/resources/file.py +++ b/pyuploadcare/resources/file.py @@ -87,14 +87,16 @@ class File: def __str__(self): return self.cdn_url - def set_effects(self, effects: Union[str, ImageTransformation]) -> None: - self.default_effects = str(effects) + def set_effects( + self, effects: Optional[Union[str, ImageTransformation]] = None + ) -> None: + effects = str(effects) if effects else "" + self.default_effects = effects def _build_effects( self, effects: Optional[Union[str, ImageTransformation]] = None ): - effects = str(effects) - + effects = str(effects) if effects else "" if self.default_effects is not None: effects = ( f"{self.default_effects}-/{effects}"
fix file copy (#<I>) * fixed building cdn path for file copy
py
diff --git a/benchexec/containerexecutor.py b/benchexec/containerexecutor.py index <HASH>..<HASH> 100644 --- a/benchexec/containerexecutor.py +++ b/benchexec/containerexecutor.py @@ -326,7 +326,7 @@ class ContainerExecutor(baseexecutor.BaseExecutor): # If the current directory is within one of the bind mounts we create, # we need to cd into this directory again, otherwise we would not see the bind mount, # but the directory behind it. Thus we always set cwd to force a change of directory. - cwd = cwd or os.path.abspath(os.curdir) + cwd = os.path.abspath(cwd or os.curdir) def grandchild(): """Setup everything inside the process that finally exec()s the tool."""
Make sure to always cd to an absolute path in container, otherwise it has not the desired effect.
py
diff --git a/flask_cors.py b/flask_cors.py index <HASH>..<HASH> 100644 --- a/flask_cors.py +++ b/flask_cors.py @@ -34,6 +34,8 @@ CONFIG_OPTIONS = ['CORS_ORIGINS', 'CORS_METHODS', 'CORS_HEADERS', 'CORS_MAX_AGE', 'CORS_SEND_WILDCARD', 'CORS_ALWAYS_SEND', 'CORS_AUTOMATIC_OPTIONS', 'CORS_VARY_HEADER'] +FLASK_CORS_EVALUATED = '_FLASK_CORS_EVALUATED' + _defaults_dict = dict(origins='*', always_send=True, automatic_options=True, @@ -145,7 +147,7 @@ def cross_origin(*args, **kwargs): resp = make_response(f(*args, **kwargs)) _set_cors_headers(resp, options) - resp._FLASK_CORS_EVALUATED = True # Mark response as evaluated + setattr(resp, FLASK_CORS_EVALUATED, True) return resp @@ -259,7 +261,7 @@ def _set_cors_headers(resp, options): ''' # If CORS has already been evaluated via the decorator, skip - if hasattr(resp, '_FLASK_CORS_EVALUATED'): + if hasattr(resp, FLASK_CORS_EVALUATED): return resp request_origin = request.headers.get('Origin', None)
Make _FLASK_CORS_EVALUATED a constant for readability
py
diff --git a/ddlgenerator/ddlgenerator.py b/ddlgenerator/ddlgenerator.py index <HASH>..<HASH> 100755 --- a/ddlgenerator/ddlgenerator.py +++ b/ddlgenerator/ddlgenerator.py @@ -210,7 +210,8 @@ class Table(object): Table.table_index += 1 self.table_name = reshape.clean_key_name(self.table_name) - if not hasattr(self.data, 'append') and not hasattr(self.data, '__next__'): + if not hasattr(self.data, 'append') and not hasattr(self.data, '__next__') \ + and not hasattr(self.data, 'next'): self.data = [self.data,] self.data = reshape.walk_and_clean(self.data)
do not embed pymongo cursor in list
py
diff --git a/javaproperties/__main__.py b/javaproperties/__main__.py index <HASH>..<HASH> 100644 --- a/javaproperties/__main__.py +++ b/javaproperties/__main__.py @@ -46,7 +46,7 @@ def main(): setproperty(args.file, args.outfile, {args.key: args.value}, args.preserve_timestamp) elif args.cmd == 'delete': - setproperty(args.file, args.outfile, {k: None for k in args.key}, + setproperty(args.file, args.outfile, dict.fromkeys(args.key), args.preserve_timestamp) else: assert False, 'No path defined for command {0!r}'.format(args.cmd)
Python <I> doesn't have dict comprehensions
py
diff --git a/grimoire_elk/elk/enrich.py b/grimoire_elk/elk/enrich.py index <HASH>..<HASH> 100644 --- a/grimoire_elk/elk/enrich.py +++ b/grimoire_elk/elk/enrich.py @@ -324,7 +324,7 @@ class Enrich(ElasticItems): for item in items: if current >= max_items: try: - r = self.requests.put(url, data=bulk_json) + r = self.requests.put(url, headers=HEADERS_JSON, data=bulk_json) r.raise_for_status() json_size = sys.getsizeof(bulk_json) / (1024 * 1024) logger.debug("Added %i items to %s (%0.2f MB)", total, url, json_size) @@ -333,7 +333,7 @@ class Enrich(ElasticItems): logger.error("Unicode error in enriched items") logger.debug(bulk_json) safe_json = str(bulk_json.encode('ascii', 'ignore'), 'ascii') - self.requests.put(url, data=safe_json) + self.requests.put(url, headers=HEADERS_JSON, data=safe_json) bulk_json = "" current = 0
[enrich] Add missing headers in PUT queries to support ES6
py
diff --git a/mavproxy.py b/mavproxy.py index <HASH>..<HASH> 100755 --- a/mavproxy.py +++ b/mavproxy.py @@ -541,7 +541,7 @@ def param_load_file(filename, wildcard): continue # some parameters should not be loaded from file if a[0] in ['SYSID_SW_MREV', 'SYS_NUM_RESETS', 'ARSPD_OFFSET', 'GND_ABS_PRESS', - 'GND_TEMP', 'CMD_TOTAL', 'CMD_INDEX', 'LOG_LASTFILE' ]: + 'GND_TEMP', 'CMD_TOTAL', 'CMD_INDEX', 'LOG_LASTFILE', 'FENCE_TOTAL' ]: continue if not fnmatch.fnmatch(a[0].upper(), wildcard.upper()): continue @@ -1324,6 +1324,9 @@ def main_loop(): except select.error: continue + if mpstate is None: + return + for fd in rin: for master in mpstate.mav_master: if fd == master.fd:
exclude FENCE_TOTAL from load from file
py
diff --git a/neomodel/util.py b/neomodel/util.py index <HASH>..<HASH> 100644 --- a/neomodel/util.py +++ b/neomodel/util.py @@ -97,7 +97,7 @@ class Database(local): u = urlparse(_url) if u.netloc.find('@') > -1: - credentials, self.host = u.netloc.rsplit('@') + credentials, self.host = u.netloc.rsplit('@', 1) self.user, self.password, = credentials.split(':') self.url = ''.join([u.scheme, '://', self.host, u.path, u.query]) neo4j.authenticate(self.host, self.user, self.password)
rsplit fix maxsplit=1 (as more than one @ is expected)
py
diff --git a/webapp/app/main.py b/webapp/app/main.py index <HASH>..<HASH> 100644 --- a/webapp/app/main.py +++ b/webapp/app/main.py @@ -21,7 +21,7 @@ def _unpack(stream): font = TTFont(BytesIO(stream.read(fontlen))) yield (desc, font) -def checkfont(desc, font): +def check_font(desc, font): #familyName = desc['familyName'] #weightName = desc['weightName'] #isItalic = desc['isItalic']
fix typo (issue #<I>)
py
diff --git a/openquake/commonlib/readinput.py b/openquake/commonlib/readinput.py index <HASH>..<HASH> 100644 --- a/openquake/commonlib/readinput.py +++ b/openquake/commonlib/readinput.py @@ -380,9 +380,9 @@ def get_site_collection(oqparam): an :class:`openquake.commonlib.oqvalidation.OqParam` instance """ mesh = get_mesh(oqparam) + req_site_params = get_gsim_lt(oqparam).req_site_params if oqparam.inputs.get('site_model'): sm = get_site_model(oqparam) - req_site_params = set(sm.dtype.names) - {'lon', 'lat'} try: # in the future we could have elevation in the site model depth = sm['depth'] @@ -420,7 +420,6 @@ def get_site_collection(oqparam): # a None sitecol is okay when computing the ruptures only return else: # use the default site params - req_site_params = get_gsim_lt(oqparam).req_site_params sitecol = site.SiteCollection.from_points( mesh.lons, mesh.lats, mesh.depths, oqparam, req_site_params) ss = os.environ.get('OQ_SAMPLE_SITES')
Restored req_site_params as it was [skip CI]
py
diff --git a/tests/classes_test.py b/tests/classes_test.py index <HASH>..<HASH> 100755 --- a/tests/classes_test.py +++ b/tests/classes_test.py @@ -118,7 +118,7 @@ class GSFontTest(unittest.TestCase): self.assertEqual(len(font.glyphs), 0) self.assertEqual(len(font.masters), 0) - self.assertEqual(font.masters, []) + self.assertEqual(list(font.masters), list(())) self.assertEqual(len(font.instances), 0) self.assertEqual(font.instances, []) self.assertEqual(len(font.customParameters), 0)
Test not accepting masters as tuple (after proxy implementation), ensure both be turned into lists for comparison
py
diff --git a/ella/core/models/publishable.py b/ella/core/models/publishable.py index <HASH>..<HASH> 100644 --- a/ella/core/models/publishable.py +++ b/ella/core/models/publishable.py @@ -54,7 +54,7 @@ class Publishable(models.Model): # denormalized fields # the placement's publish_from - publish_from = models.DateTimeField(_('Publish from'), editable=False, default=PUBLISH_FROM_WHEN_EMPTY) + publish_from = models.DateTimeField(_('Publish from'), editable=False, default=PUBLISH_FROM_WHEN_EMPTY, db_index=True) class Meta: app_label = 'core'
Index on denormalised publish_from
py
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py index <HASH>..<HASH> 100644 --- a/salt/client/ssh/__init__.py +++ b/salt/client/ssh/__init__.py @@ -479,7 +479,7 @@ class SSH(object): cmd_args = arg_str.split(None, 1) fun = cmd_args[0] args = [cmd_args[1]] - + job_load = { 'jid': jid, 'tgt_type': self.tgt_type,
Delete trailing whitespace to appeas the pylint gods.
py
diff --git a/claripy/__init__.py b/claripy/__init__.py index <HASH>..<HASH> 100644 --- a/claripy/__init__.py +++ b/claripy/__init__.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # pylint: disable=F0401,W0401,W0603, -__version__ = "9.2.2.dev0" +__version__ = "9.2.3.dev0" if bytes is str: raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
Update version to <I>.dev0 [ci skip]
py
diff --git a/thumbor/threadpool.py b/thumbor/threadpool.py index <HASH>..<HASH> 100644 --- a/thumbor/threadpool.py +++ b/thumbor/threadpool.py @@ -8,10 +8,10 @@ # http://www.opensource.org/licenses/mit-license # Copyright (c) 2011 globo.com [email protected] -import asyncio from concurrent.futures import ThreadPoolExecutor from thumbor.utils import logger +from tornado.ioloop import IOLoop class ThreadPool: @@ -44,7 +44,7 @@ class ThreadPool: return operation(*args) async def _execute_in_pool(self, operation, *args): - loop = asyncio.get_running_loop() + loop = IOLoop.current() return await loop.run_in_executor(self.pool, operation, *args) async def queue(self, operation, *args):
threadpool: Use python <I> supported method to get running loop. Fixes #<I>. Using IOloop.current instead of asyncio.get_running_loop allows support for py<I> since later is not available in py<I>.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -200,8 +200,8 @@ CLASSIFIERS = [ MAJOR = 0 MINOR = 10 -MICRO = 0 -ISRELEASED = True +MICRO = 1 +ISRELEASED = False VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) QUALIFIER = ''
RLS: bump to <I>dev
py
diff --git a/psy_simple/plotters.py b/psy_simple/plotters.py index <HASH>..<HASH> 100644 --- a/psy_simple/plotters.py +++ b/psy_simple/plotters.py @@ -1610,7 +1610,14 @@ class LineColors(Formatoption): np.linspace(0., 1., len(list(self.iter_data)), endpoint=True))) except (ValueError, TypeError, KeyError): - self.color_cycle = cycle(slist(value)) + try: + # do not use safe_list, because it might be a generator + validate_color(value) + except (ValueError, TypeError, AttributeError): + pass + else: + value = [value] + self.color_cycle = cycle(iter(value)) if changed: self.colors = [ next(self.color_cycle) for arr in self.iter_data]
Fixed bug for colors and color_cycles
py
diff --git a/fastly/fastly.py b/fastly/fastly.py index <HASH>..<HASH> 100755 --- a/fastly/fastly.py +++ b/fastly/fastly.py @@ -504,7 +504,7 @@ class FastlyConnection(object): def list_syslogs(self, service_id, version_number): content = self._fetch("/service/%s/version/%d/syslog" % (service_id, version_number)) - return map(lambda x: FastlyHealthCheck(self, x), content) + return map(lambda x: FastlySyslog(self, x), content) def create_syslog(self, service_id, version_number, name, address=None, ipv4=None, hostname=None, port=514, _format=None):
Updating a bug in the syslog code.
py
diff --git a/insteonplm/aldb.py b/insteonplm/aldb.py index <HASH>..<HASH> 100644 --- a/insteonplm/aldb.py +++ b/insteonplm/aldb.py @@ -45,10 +45,6 @@ class ALDB(object): # self.log.debug('Ignoring device setitem with no cat: %s', value) # return - if not isinstance(device, DeviceBase): - self.log.info(device) - raise ValueError - self._devices[key] = device self.log.debug('New INSTEON Device %r: %s (%02x:%02x)',
Get rid of check agains DeviceBase
py
diff --git a/argresolver/resolver.py b/argresolver/resolver.py index <HASH>..<HASH> 100644 --- a/argresolver/resolver.py +++ b/argresolver/resolver.py @@ -64,8 +64,7 @@ class Resolver(utils.Loggable): if sig.defaults is not None: defaults = {k: v for k, v in list(zip(sig.args[-len(sig.defaults):], sig.defaults))} named_args = list(zip(sig.args, args)) - unset = (set(sig.args) - - set([name for name, _ in named_args]) + unset = (set(sig.args) - set([name for name, _ in named_args]) .union(set([name for name, _ in kwargs.items()]))) for u in unset: kwargs[u] = Missing @@ -256,6 +255,6 @@ class EnvironmentResolver(Resolver): newval = os.environ.get(lookup, Missing) hasdefault = defaults.get(name, Missing) is not Missing if newval is Missing and not hasdefault: - self.logger.warn("Cannot resolve argument '{}' of {}. Try to set environment variable '{}'".format( + self.logger.warning("Cannot resolve argument '{}' of {}. Try to set environment variable '{}'".format( name, cls, lookup)) return newval
Fixes linting errors for warn -> warning and whitespace after op
py
diff --git a/tests/functional/cli/test_cli.py b/tests/functional/cli/test_cli.py index <HASH>..<HASH> 100644 --- a/tests/functional/cli/test_cli.py +++ b/tests/functional/cli/test_cli.py @@ -3,7 +3,7 @@ import json import pytest import responses -from gitlab import __version__ +from gitlab import __version__, config @pytest.fixture @@ -30,9 +30,13 @@ def test_version(script_runner): @pytest.mark.script_launch_mode("inprocess") -def test_defaults_to_gitlab_com(script_runner, resp_get_project): - # Runs in-process to intercept requests to gitlab.com - ret = script_runner.run("gitlab", "project", "get", "--id", "1") +def test_defaults_to_gitlab_com(script_runner, resp_get_project, monkeypatch): + with monkeypatch.context() as m: + # Ensure we don't pick up any config files that may already exist in the local + # environment. + m.setattr(config, "_DEFAULT_FILES", []) + # Runs in-process to intercept requests to gitlab.com + ret = script_runner.run("gitlab", "project", "get", "--id", "1") assert ret.success assert "id: 1" in ret.stdout
chore: fix functional test failure if config present Fix functional test failure if config present and configured with token. Closes: #<I>
py
diff --git a/bpc8583/isoClient.py b/bpc8583/isoClient.py index <HASH>..<HASH> 100755 --- a/bpc8583/isoClient.py +++ b/bpc8583/isoClient.py @@ -165,7 +165,7 @@ if __name__ == '__main__': transactions = None try: - optlist, args = getopt.getopt(sys.argv[1:], 'vhp:s:t:m:f:', ['verbose', 'help', 'port=', 'server=', 'terminal=', 'merchant=', 'file=']) + optlist, args = getopt.getopt(sys.argv[1:], 'hp:s:t:m:f:v', ['help', 'port=', 'server=', 'terminal=', 'merchant=', 'file=', 'verbose']) for opt, arg in optlist: if opt in ('-v', '--verbose'): verbosity = True @@ -194,7 +194,7 @@ if __name__ == '__main__': sys.exit() term = Terminal(host=ip, port=port, id=terminal_id, merchant=merchant_id) - card = Card() + card = Card() if trxn_file: - transactions = parse_transactions_file(arg, term, card) + transactions = parse_transactions_file(trxn_file, term, card) main(term, card, transactions, verbosity)
Fixed bug with xml file command line option
py
diff --git a/src/toil/batchSystems/torque.py b/src/toil/batchSystems/torque.py index <HASH>..<HASH> 100644 --- a/src/toil/batchSystems/torque.py +++ b/src/toil/batchSystems/torque.py @@ -50,7 +50,8 @@ class TorqueBatchSystem(AbstractGridEngineBatchSystem): # Limit qstat to current username to avoid clogging the batch system on heavily loaded clusters #job_user = os.environ.get('USER') #process = subprocess.Popen(['qstat', '-u', job_user], stdout=subprocess.PIPE) - process = subprocess.Popen(['qstat'], stdout=subprocess.PIPE) + # -x shows exit status in PBSPro + process = subprocess.Popen(['qstat', '-x'], stdout=subprocess.PIPE) stdout, stderr = process.communicate() # qstat supports XML output which is more comprehensive, but PBSPro does not support it @@ -113,7 +114,7 @@ class TorqueBatchSystem(AbstractGridEngineBatchSystem): line = line.strip() if line.startswith("failed") and int(line.split()[1]) == 1: return 1 - if line.startswith("exit_status"): + if line.startswith("exit_status") or line.startswith("EXIT_STATUS"): status = line.split(' = ')[1] logger.debug('Exit Status: ' + status) return int(status)
Adding PBSPro qstat particularities pointed out by @hcraT, thanks
py
diff --git a/stripe/version.py b/stripe/version.py index <HASH>..<HASH> 100644 --- a/stripe/version.py +++ b/stripe/version.py @@ -1 +1 @@ -VERSION = '1.27.1' +VERSION = '1.27.0'
Revert version in version.py as well
py
diff --git a/pymc/StepMethods.py b/pymc/StepMethods.py index <HASH>..<HASH> 100644 --- a/pymc/StepMethods.py +++ b/pymc/StepMethods.py @@ -800,7 +800,9 @@ class AdaptiveMetropolis(StepMethod): """ if not stochastic.dtype in float_dtypes and not stochastic.dtype in integer_dtypes: return 0 - if np.iterable(stochastic.value): + # Algorithm is not well-suited to sparse datasets. Dont use if less than + # 25 percent of values are nonzero + if np.iterable(stochastic.value) and (len(stochastic.value.nonzero()[0]) < 0.25*len(stochastic.value)): return 2 else: return 0
Prevented selection of AdaptiveMetropolis for stochastics that are sparse (result in singular matrices) git-svn-id: <URL>
py
diff --git a/bcbio/pipeline/variation.py b/bcbio/pipeline/variation.py index <HASH>..<HASH> 100644 --- a/bcbio/pipeline/variation.py +++ b/bcbio/pipeline/variation.py @@ -29,7 +29,7 @@ def _analyze_recalibration(recal_file, fastq1, fastq2, dirs, config): if fastq2: cl.append(fastq2) cl.append("--workdir=%s" % dirs["work"]) - cl.append("--input-format=%s" % qual_opts[qual_format]) + cl.append("--input_format=%s" % qual_opts[qual_format]) subprocess.check_call(cl) def _get_dbsnp_file(config, sam_ref):
Handle standard quality format when assessing re-calibration
py
diff --git a/cobald/utility/concurrent/meta_runner.py b/cobald/utility/concurrent/meta_runner.py index <HASH>..<HASH> 100644 --- a/cobald/utility/concurrent/meta_runner.py +++ b/cobald/utility/concurrent/meta_runner.py @@ -22,6 +22,7 @@ class MetaRunner(object): self.runners = { runner.flavour: runner() for runner in (TrioRunner, AsyncioRunner, ThreadRunner) } + self._lock = threading.Lock() self.running = threading.Event() self.running.clear() @@ -35,7 +36,9 @@ class MetaRunner(object): """Run all runners until completion""" self._logger.info('starting all runners') try: - self.running.set() + with self._lock: + assert not self.running.set(), 'cannot re-run: %s' % self + self.running.set() thread_runner = self.runners[threading] for runner in self.runners.values(): if runner is not thread_runner:
meta runner checks running state before running
py
diff --git a/epdb/epdb.py b/epdb/epdb.py index <HASH>..<HASH> 100644 --- a/epdb/epdb.py +++ b/epdb/epdb.py @@ -34,7 +34,6 @@ from pdb import _saferepr class Epdb(pdb.Pdb): _historyPath = os.path.expanduser('~/.epdbhistory') - prompt = '(Epdb) ' multiline_prompt = '| ' fail_silently_on_ioerror = False # if set to True, ignore calls to epdb # when there is no usable device @@ -58,6 +57,7 @@ class Epdb(pdb.Pdb): self._completer = erlcompleter.ECompleter() self._oldHistory = [] + self.prompt = '(Epdb) ' def store_old_history(self): historyLen = readline.get_current_history_length() @@ -299,7 +299,7 @@ class Epdb(pdb.Pdb): return self.multiline(origLine) try: self.save_history() - return pdb.Pdb.default(self, line) + return pdb.Pdb.default(self, origLine) finally: self.read_history()
fixes: prompt cannot be set until after Pdb.__init__ is called, call Pdb.default with origLine
py
diff --git a/km3pipe/__version__.py b/km3pipe/__version__.py index <HASH>..<HASH> 100644 --- a/km3pipe/__version__.py +++ b/km3pipe/__version__.py @@ -9,7 +9,7 @@ Pep 386 compliant version info. (1, 2, 0, 'beta', 2) => "1.2b2" """ -version_info = (0, 6, 6, 'final', 0) +version_info = (0, 6, 7, 'final', 0) def _get_version(version_info): """Return a PEP 386-compliant version number."""
Changes version to <I>
py
diff --git a/hpOneView/servers.py b/hpOneView/servers.py index <HASH>..<HASH> 100644 --- a/hpOneView/servers.py +++ b/hpOneView/servers.py @@ -162,13 +162,10 @@ class servers(object): def add_enclosure(self, enclosure, blocking=True, verbose=False): task, body = self._con.post(uri['enclosures'], enclosure) if blocking is True: - try: - if enclosure['firmwareBaselineUri'] is None: - task = self._activity.wait4task(task, tout=600, verbose=verbose) - else: - task = self._activity.wait4task(task, tout=3600, verbose=verbose) - except: - task = self._activity.wait4task(task, tout=600, verbose=verbose) + if enclosure['firmwareBaselineUri'] is None: + task = self._activity.wait4task(task, tout=600, verbose=verbose) + else: + task = self._activity.wait4task(task, tout=3600, verbose=verbose) entity = self._activity.get_task_associated_resource(task) enclosure = self._con.get(entity['resourceUri']) return enclosure
Remove the case or raising an exceptoin when another exception could be occuring
py
diff --git a/tests/test_genanki.py b/tests/test_genanki.py index <HASH>..<HASH> 100644 --- a/tests/test_genanki.py +++ b/tests/test_genanki.py @@ -99,6 +99,24 @@ class TestWithCollection: assert imported_deck['name'] == 'foodeck' + def test_generated_deck_has_valid_cards(self): + """ + Generates a deck with several notes and verifies that the nid/ord combinations on the generated cards make sense. + + Catches a bug that was fixed in 08d8a139. + """ + deck = genanki.Deck(123456, 'foodeck') + deck.add_note(genanki.Note(TEST_CN_MODEL, ['a', 'b', 'c'])) # 2 cards + deck.add_note(genanki.Note(TEST_CN_MODEL, ['d', 'e', 'f'])) # 2 cards + deck.add_note(genanki.Note(TEST_CN_MODEL, ['g', 'h', 'i'])) # 2 cards + + self.import_package(genanki.Package(deck)) + + cards = [self.col.getCard(i) for i in self.col.findCards('')] + + # the bug causes us to fail to generate certain cards (e.g. the second card for the second note) + assert len(cards) == 6 + def test_card_isEmpty__with_2_fields__succeeds(self): """Tests for a bug in an early version of genanki where notes with <4 fields were not supported.""" deck = genanki.Deck(123456, 'foodeck')
Add test for bug introduced in <I>d8a<I>
py
diff --git a/coursera/coursera_dl.py b/coursera/coursera_dl.py index <HASH>..<HASH> 100755 --- a/coursera/coursera_dl.py +++ b/coursera/coursera_dl.py @@ -45,13 +45,14 @@ import errno import logging import os import re -import requests import string import StringIO import subprocess import sys import time +import requests + try: from BeautifulSoup import BeautifulSoup except ImportError:
coursera: Move third-party imports after the stdlib ones.
py
diff --git a/backtrader/feeds/yahoo.py b/backtrader/feeds/yahoo.py index <HASH>..<HASH> 100644 --- a/backtrader/feeds/yahoo.py +++ b/backtrader/feeds/yahoo.py @@ -297,6 +297,8 @@ class YahooFinanceData(YahooFinanceCSVData): self.f = None return + crumb = urlquote(crumb) + # urldown/ticker?period1=posix1&period2=posix2&interval=1d&events=history&crumb=crumb # Try to download
Fix: crumb in feeds.YahooFinanceData (#<I>) - authorization cookie crumb may contain '/' as character
py
diff --git a/phonenumber_field/phonenumber.py b/phonenumber_field/phonenumber.py index <HASH>..<HASH> 100644 --- a/phonenumber_field/phonenumber.py +++ b/phonenumber_field/phonenumber.py @@ -70,8 +70,7 @@ def to_python(value): phone_number = PhoneNumber(raw_input=value) elif isinstance(value, phonenumbers.phonenumber.PhoneNumber) and \ not isinstance(value, PhoneNumber): - phone_number = self.field.attr_class() - phone_number.merge_from(value) + phone_number = Phonenumber(value) elif isinstance(value, PhoneNumber): phone_number = value return phone_number \ No newline at end of file
Update phonenumber_field/phonenumber.py Fixed a bug which appeared when you tried to create new model instance with phonenumber information.
py
diff --git a/conway.py b/conway.py index <HASH>..<HASH> 100755 --- a/conway.py +++ b/conway.py @@ -36,24 +36,21 @@ def main(): print term.civis, # hide cursor print term.clear, - while True: + for frame_end in seconds_from_now(0.05): try: - target_time = time() + 0.05 - board = next_board(board, die) draw(board, term, cells) # If the pattern is stuck in a loop, give it a nudge: if detector.is_bored_of(board): - board.update( - random_board(width - 1, height - 1, NUDGING_LOAD_FACTOR)) + board.update(random_board(width - 1, + height - 1, + NUDGING_LOAD_FACTOR)) stdout.flush() # Cap FPS: - now = time() - if now < target_time: - sleep(target_time - now) + sleep_until(frame_end) except KeyboardInterrupt: break finally: @@ -61,6 +58,18 @@ def main(): print term.cnorm +def sleep_until(target_time): + now = time() + if now < target_time: + sleep(target_time - now) + + +def seconds_from_now(seconds): + """Infinitely yield timestamps a given number of seconds from now.""" + while True: + yield time() + seconds + + def cell_strings(term): """Return the strings that represent each possible living cell state.
Make main() comprise a more tolerable percentage of timing code.
py
diff --git a/trie/binary.py b/trie/binary.py index <HASH>..<HASH> 100644 --- a/trie/binary.py +++ b/trie/binary.py @@ -64,6 +64,8 @@ class BinaryTrie(object): nodetype, left_child, right_child = parse_node(self.db[node_hash]) # Key-value node descend if nodetype == LEAF_TYPE: + if keypath: + return None return right_child elif nodetype == KV_TYPE: # Keypath too short
fix: make key who's prefix is the same as one of the existing key invalid
py
diff --git a/appnexus/model.py b/appnexus/model.py index <HASH>..<HASH> 100644 --- a/appnexus/model.py +++ b/appnexus/model.py @@ -84,12 +84,12 @@ class Campaign(Model): return Profile.find_one(id=self.profile_id) -def gen_services(services_list): +def create_models(services_list): for service in services_list: model = type(service, (Model,), {}) globals().setdefault(service, model) -gen_services(services_list) +create_models(services_list) __all__ = ["Model", "services_list"] + services_list
This is not a generator, and it's not outputing services but models
py
diff --git a/imgaug/imgaug.py b/imgaug/imgaug.py index <HASH>..<HASH> 100644 --- a/imgaug/imgaug.py +++ b/imgaug/imgaug.py @@ -892,13 +892,6 @@ def draw_text(img, y, x, text, color=(0, 255, 0), size=25): if img.dtype == np.float32: img = img.astype(np.uint8) - for i in range(len(color)): - val = color[i] - if isinstance(val, float): - val = int(val * 255) - val = np.clip(val, 0, 255) - color[i] = val - img = PIL_Image.fromarray(img) font = PIL_ImageFont.truetype(DEFAULT_FONT_FP, size) context = PIL_ImageDraw.Draw(img)
Remove dubious color normalization from draw_text()
py
diff --git a/test/test.py b/test/test.py index <HASH>..<HASH> 100644 --- a/test/test.py +++ b/test/test.py @@ -27,7 +27,7 @@ for entry in os.listdir(sys.argv[1]): # Run the test. test_fname = gold_fname[:-len(suffix):] + '.py' command = '%s %s'%(sys.executable, test_fname) - #print(command) + print(command) p = Popen(command, shell=True, stdout=PIPE, stderr=STDOUT) test_lines = p.stdout.readlines()
Echo command to stdout.
py
diff --git a/splinter/driver/__init__.py b/splinter/driver/__init__.py index <HASH>..<HASH> 100644 --- a/splinter/driver/__init__.py +++ b/splinter/driver/__init__.py @@ -17,9 +17,6 @@ class DriverAPI(object): def reload(self): raise NotImplementedError - def switch_to_frame(self, id): - raise NotImplementedError - def get_iframe(self, id): raise NotImplementedError
switch_to_frame method removed, use the new get_iframe instead.
py
diff --git a/packages/aws-cdk/lib/init-templates/sample-app/python/%name.PythonModule%/%name.PythonModule%_stack.template.py b/packages/aws-cdk/lib/init-templates/sample-app/python/%name.PythonModule%/%name.PythonModule%_stack.template.py index <HASH>..<HASH> 100644 --- a/packages/aws-cdk/lib/init-templates/sample-app/python/%name.PythonModule%/%name.PythonModule%_stack.template.py +++ b/packages/aws-cdk/lib/init-templates/sample-app/python/%name.PythonModule%/%name.PythonModule%_stack.template.py @@ -7,9 +7,9 @@ from aws_cdk import ( ) -class %name.PascalCased%Stack(core.Stack): +class % name.PascalCased % Stack(core.Stack): - def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: + def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) queue = sqs.Queue(
Change id to construct_id in Python sample-app
py
diff --git a/path.py b/path.py index <HASH>..<HASH> 100644 --- a/path.py +++ b/path.py @@ -87,7 +87,7 @@ if PY2: codecs.register_error('surrogateescape', surrogate_escape) ############################################################################## -__version__ = '6.0' +__version__ = '6.1' __all__ = ['path', 'CaseInsensitivePattern']
Bumped to <I> in preparation for next release.
py
diff --git a/flask_resty/view.py b/flask_resty/view.py index <HASH>..<HASH> 100644 --- a/flask_resty/view.py +++ b/flask_resty/view.py @@ -348,12 +348,14 @@ class ModelView(ApiView): # Flushing allows checking invariants without committing. self.session.flush() except IntegrityError: + flask.current_app.logger.exception("flush failed") raise ApiError(409, {'code': 'invalid_data.conflict'}) def commit(self): try: self.session.commit() except IntegrityError: + flask.current_app.logger.exception("commit failed") raise ApiError(409, {'code': 'invalid_data.conflict'}) def set_item_meta(self, item):
Log database integrity errors (#<I>)
py
diff --git a/treeherder/model/management/commands/run_sql.py b/treeherder/model/management/commands/run_sql.py index <HASH>..<HASH> 100644 --- a/treeherder/model/management/commands/run_sql.py +++ b/treeherder/model/management/commands/run_sql.py @@ -78,14 +78,15 @@ class Command(BaseCommand): for datasource in datasources: self.stdout.write("--------------------------") - db = MySQLdb.connect( + conn = MySQLdb.connect( host=datasource.host, db=datasource.name, user=settings.TREEHERDER_DATABASE_USER, passwd=settings.TREEHERDER_DATABASE_PASSWORD) try: - cursor = db.cursor() + cursor = conn.cursor() cursor.execute(sql_code) + conn.commit() self.stdout.write("Sql code executed on {}:".format(datasource)) for row in cursor: self.stdout.write(" {}".format(row)) @@ -96,3 +97,4 @@ class Command(BaseCommand): finally: if cursor: cursor.close() + conn.close()
Bug <I> - run_sql: Commit the transaction after executing it Otherwise any changes are discarded. The connection is also now closed properly after we're done with it.
py
diff --git a/agon/models.py b/agon/models.py index <HASH>..<HASH> 100644 --- a/agon/models.py +++ b/agon/models.py @@ -44,6 +44,12 @@ class TargetStat(models.Model): class Meta: unique_together = [("target_content_type", "target_object_id")] + + @classmethod + def update_points(cls, given, lookup_params): + return cls._default_manager.filter(**lookup_params).update( + points = models.F("points") + given, + ) def award_points(target, key): @@ -66,10 +72,7 @@ def award_points(target, key): points_given = lookup_point_value(key) - updated = TargetStat.objects.filter(**lookup_params).update( - points = models.F("points") + points_given, - ) - if not updated: + if not TargetStat.update_points(points_given, lookup_params): try: sid = transaction.savepoint() TargetStat._default_manager.create( @@ -78,6 +81,7 @@ def award_points(target, key): transaction.savepoint_commit(sid) except IntegrityError, e: transaction.savepoint_rollback(sid) + TargetStat.update_points(points_given, lookup_params) points_awarded.send(sender=target.__class__, target=target, key=key)
better abstraction and ensure update_points is called when IntegrityError is raised
py
diff --git a/linguist/mixins.py b/linguist/mixins.py index <HASH>..<HASH> 100644 --- a/linguist/mixins.py +++ b/linguist/mixins.py @@ -108,11 +108,10 @@ class ModelMeta(models.base.ModelBase): def __new__(cls, name, bases, attrs): - from .fields import TranslationField, CacheDescriptor + from .fields import CacheDescriptor meta = None default_language = utils.get_fallback_language() - default_language_field = None if 'Meta' in attrs and hasattr(attrs['Meta'], 'linguist'): validate_meta(attrs['Meta'].linguist) @@ -139,10 +138,10 @@ class ModelMeta(models.base.ModelBase): for field in meta['fields']: - if not field in all_fields: + if field not in all_fields: raise ImproperlyConfigured( - "There is no field %(field)s in model %(name)s, "\ - "as specified in Meta's translate attribute" % \ + "There is no field %(field)s in model %(name)s, " + "as specified in Meta's translate attribute" % dict(field=field, name=name)) original_fields[field] = all_fields[field]
pep8 fixes in mixins.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ with open(path.join(here, 'README.rst'), encoding='utf-8') as f: setup( name='encviewfuse.commons', - version='0.2.2', + version='0.2.3', description='Common encryption utilities for the implementation of simple encryption/decryption views.', long_description=long_description, url='https://github.com/seiferma/encviewfuse.commons',
Updated version and travis config.
py
diff --git a/basis/models.py b/basis/models.py index <HASH>..<HASH> 100644 --- a/basis/models.py +++ b/basis/models.py @@ -57,6 +57,10 @@ class BasisModel(TimeStampModel, PersistentModel): abstract = True def save(self, *args, **kwargs): + try: + kwargs['current_user'] = self.current_user + except AttributeError: + pass self.__set_user(kwargs) super(BasisModel, self).save(*args, **kwargs)
Added handling for update In the case of model creation, the `current_user` is given through kwargs, so the current logic works fine. However, in the `update` case, `rest_framework` uses `setattr`, so `current_user` is only accessible through `self.current_user`.
py
diff --git a/tests/tests.py b/tests/tests.py index <HASH>..<HASH> 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -986,27 +986,6 @@ class RegressionSuite(TestCase): @unittest.skipUnless(LIVE_TEST, "requires HOST variable to be set") -class TestLoadBalancer(DbTestCase): - def test_second(self): - server = settings.CONNECT_KWARGS['server'] - if '\\' in server: - server, _ = server.split('\\') - lb = SimpleLoadBalancer(['badserver', server]) - with connect(load_balancer=lb, *settings.CONNECT_ARGS, **settings.CONNECT_KWARGS) as conn: - with conn.cursor() as cur: - cur.execute('select 1') - cur.fetchall() - - def test_none(self): - lb = SimpleLoadBalancer(['badserver']) - with self.assertRaises(LoginError): - with connect(load_balancer=lb, *settings.CONNECT_ARGS, **settings.CONNECT_KWARGS) as conn: - with conn.cursor() as cur: - cur.execute('select 1') - cur.fetchall() - - [email protected](LIVE_TEST, "requires HOST variable to be set") class TestIntegrityError(DbTestCase): def test_primary_key(self): cursor = self.conn.cursor()
Removed TestLoadBalancer LoadBalancer is now deprecated
py
diff --git a/mot/model_building/model_builders.py b/mot/model_building/model_builders.py index <HASH>..<HASH> 100644 --- a/mot/model_building/model_builders.py +++ b/mot/model_building/model_builders.py @@ -1,8 +1,6 @@ import numpy as np import copy - from six import string_types - from mot.cl_data_type import SimpleCLDataType from mot.cl_routines.mapping.calc_dependent_params import CalculateDependentParameters from mot.cl_routines.mapping.codec_runner import CodecRunner @@ -143,7 +141,8 @@ class OptimizeModelBuilder(OptimizeModelInterface): Returns: Returns self for chainability """ - self._model_functions_info.set_parameter_value(model_param_name, value) + if not self._model_functions_info.is_fixed(model_param_name): + self._model_functions_info.set_parameter_value(model_param_name, value) return self def set_initial_parameters(self, initial_params):
Adds fixed check in the init value method. This to prevent overwriting fixations by initialization
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -31,6 +31,7 @@ setup( 'License :: OSI Approved :: BSD License', 'Programming Language :: C', 'Programming Language :: Python', + 'Programming Language :: Python :: 3', 'Topic :: Scientific/Engineering', 'Operating System :: OS Independent', ],
oops, add Python 3 to setup.py for PyPI category inclusion.
py
diff --git a/circlator/tasks/all.py b/circlator/tasks/all.py index <HASH>..<HASH> 100644 --- a/circlator/tasks/all.py +++ b/circlator/tasks/all.py @@ -140,6 +140,7 @@ def run(): min_read_length=options.b2r_min_read_length, contigs_to_use=options.b2r_only_contigs, discard_unmapped=options.b2r_discard_unmapped, + verbose=options.verbose, ) bam_filter.run()
Pass verbose option to bam2reads
py
diff --git a/libgreader/__init__.py b/libgreader/__init__.py index <HASH>..<HASH> 100644 --- a/libgreader/__init__.py +++ b/libgreader/__init__.py @@ -1,21 +1,12 @@ # -*- coding: utf-8 -*- -""" -libG(oogle)Reader -Copyright (C) 2010 Matt Behrens <[email protected]> http://asktherelic.com - -Python library for working with the unofficial Google Reader API. -Google may break this at anytime, I am not responsible for damages from that -breakage, but I will try my best to fix it. - -Uses HTTPS for all requests to and from Google. - -Licensing included in LICENSE.txt -""" +# libgreader +# Copyright (C) 2012 Matt Behrens <[email protected]> +# Python library for the Google Reader API __author__ = "Matt Behrens <[email protected]>" -__version__ = "0.6.0beta1" -__credits__ = "Matt Behrens <[email protected]>, Stephane Angel aka Twidi <[email protected]>" +__version__ = "0.6.0" +__copyright__ = "Copyright (C) 2012 Matt Behrens" from googlereader import GoogleReader from auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method
simplify. <I> is what version I'm working toward
py
diff --git a/centinel/backend.py b/centinel/backend.py index <HASH>..<HASH> 100644 --- a/centinel/backend.py +++ b/centinel/backend.py @@ -248,17 +248,18 @@ def sync(config): logging.error("Unable to create user: %s" % str(exp)) return - # send all results (.tar.bz2 + .json + .pcap.bz2) + # send all results (.json + .bz2) result_files = (glob.glob(os.path.join(config['dirs']['results_dir'], - '[!_]*.tar.bz2')) + + '[!_]*.bz2')) + glob.glob(os.path.join(config['dirs']['results_dir'], '[!_]*.json'))) # only upload pcaps if it is allowed - if config['results']['upload_pcaps']: - result_files = (result_files + - glob.glob(os.path.join(config['dirs']['results_dir'], - '[!_]*.pcap.bz2'))) + if config['results']['upload_pcaps'] is False: + for pcap_file in glob.glob(os.path.join(config['dirs']['results_dir'], + '[!_]*.pcap.bz2')): + if pcap_file in result_files: + result_files.remove(pcap_file) for path in result_files: try:
results upload mechanism now supporst compressed external files
py
diff --git a/allennlp/common/tee_logger.py b/allennlp/common/tee_logger.py index <HASH>..<HASH> 100644 --- a/allennlp/common/tee_logger.py +++ b/allennlp/common/tee_logger.py @@ -23,11 +23,11 @@ class TeeLogger: def write(self, message): self.terminal.write(message) # We'll special case a particular thing that keras does, to make the log file more - # readable. Keras uses ^H characters to get the training line to update for each batch + # readable. TQDM uses carriage returns to get the training line to update for each batch # without adding more lines to the terminal output. Displaying those in a file won't work # correctly, so we'll just make sure that each batch shows up on its one line. - if '\x08' in message: - message = message.replace('\x08', '') + if '\r' in message: + message = message.replace('\r', '') if not message or message[-1] != '\n': message += '\n' self.log.write(message)
Change backspace to carriage return in TeeLogger. (#<I>)
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,6 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + import os.path import sys @@ -7,6 +10,11 @@ full_version = '' root_dir = os.path.abspath(os.path.dirname(__file__)) +py_version = sys.version_info[:2] + +if py_version < (3, 4): + raise Exception("python-mattermost-driver requires Python >= 3.4.") + readme_file = os.path.join(root_dir, 'README.rst') with open(readme_file, encoding='utf-8') as f: long_description = f.read() @@ -15,10 +23,6 @@ version_module = os.path.join(root_dir, 'src', 'mattermostdriver', 'version.py') with open(version_module, encoding='utf-8') as f: exec(f.read()) -py_version = sys.version_info[:2] - -if py_version < (3, 4): - raise Exception("python-mattermost-driver requires Python >= 3.4.") setup( name='mattermostdriver',
made the setup.py not fail with error on python 2
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ sqlalchemy_requires = ["sqlalchemy<=1.3.99999,>=0.9", "sqlalchemy-utils<=0.36.99 axonserver_requires = ["axonclient<=0.0.99999"] -ray_requires = ["ray<=0.8.99999", "psutil", "setproctitle"] +ray_requires = ["ray<=0.8.5", "psutil", "setproctitle"] thespian_requires = ["thespian<=3.10.99999"]
Adjusted version of dependency on Ray (stalling on Travis).
py
diff --git a/a10_neutron_lbaas/tests/unit/v2/test_handler_member.py b/a10_neutron_lbaas/tests/unit/v2/test_handler_member.py index <HASH>..<HASH> 100644 --- a/a10_neutron_lbaas/tests/unit/v2/test_handler_member.py +++ b/a10_neutron_lbaas/tests/unit/v2/test_handler_member.py @@ -76,10 +76,10 @@ class TestMembers(test_base.HandlerTestBase): server_args = {} if conn_limit is not None: if conn_limit > 0 and conn_limit <= 8000000: - server_args['conn_limit'] = conn_limit + server_args['conn-limit'] = conn_limit if conn_resume is not None: if conn_resume > 0 and conn_resume <= 1000000: - server_args['conn_resume'] = conn_resume + server_args['conn-resume'] = conn_resume self.a.last_client.slb.server.create.assert_called_with( name, ip, status=status,
correction in unit tests for conn-resume and conn-limit
py
diff --git a/ethereum/__init__.py b/ethereum/__init__.py index <HASH>..<HASH> 100644 --- a/ethereum/__init__.py +++ b/ethereum/__init__.py @@ -17,11 +17,11 @@ GIT_DESCRIBE_RE = re.compile( __version__ = None try: - _dist = get_distribution('pyethapp') + _dist = get_distribution('ethereum') # Normalize case for Windows systems dist_loc = os.path.normcase(_dist.location) here = os.path.normcase(__file__) - if not here.startswith(os.path.join(dist_loc, 'pyethapp')): + if not here.startswith(os.path.join(dist_loc, 'ethereum')): # not installed, but there is another version that *is* raise DistributionNotFound __version__ = _dist.version
must check right distribution name not pyethapp see issue #<I>
py
diff --git a/cms_lab_publications/admin.py b/cms_lab_publications/admin.py index <HASH>..<HASH> 100644 --- a/cms_lab_publications/admin.py +++ b/cms_lab_publications/admin.py @@ -68,8 +68,8 @@ class PublicationAdmin(admin.ModelAdmin): fieldsets = [ fieldset_pubmed_query, - fieldset_files, fieldset_pubmed_metadata, + fieldset_files, ] inlines = [
Place metadata fieldset immediately after query fieldset
py