diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/tests/eng/test_external.py b/tests/eng/test_external.py index <HASH>..<HASH> 100644 --- a/tests/eng/test_external.py +++ b/tests/eng/test_external.py @@ -20,7 +20,8 @@ def test_feature_engine(): except ImportError: pass else: - from ballet.eng.feature_engine import YeoJohnsonTransformer # noqa f401 + from ballet.eng.feature_engine import ( # noqa f401 + YeoJohnsonTransformer,) def test_featuretools():
rewrap isort has not released patch to <I> yet for #<I>
py
diff --git a/analyzers/TalosReputation/TalosReputation.py b/analyzers/TalosReputation/TalosReputation.py index <HASH>..<HASH> 100755 --- a/analyzers/TalosReputation/TalosReputation.py +++ b/analyzers/TalosReputation/TalosReputation.py @@ -30,12 +30,13 @@ class TalosReputation(Analyzer): if self.data_type == 'ip': try: data = self.get_data() - + headers={ 'Host':'talosintelligence.com', 'Referer':'https://talosintelligence.com/reputation_center/lookup?search={}'.format(data), - 'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:52.0) Gecko/20100101 Firefox/52.0', - 'Accept':'*/*' + 'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.14; rv:69.0) Gecko/20100101 Firefox/69.0', + 'Accept':'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', + 'Accept-Encoding': 'gzip, deflate' } response_details = requests.get('https://talosintelligence.com/sb_api/query_lookup',
changed user-agent and accept/accept-encoding to make Talos reverse-proxy happy
py
diff --git a/torext/handlers/base.py b/torext/handlers/base.py index <HASH>..<HASH> 100644 --- a/torext/handlers/base.py +++ b/torext/handlers/base.py @@ -181,9 +181,9 @@ class BaseHandler(tornado.web.RequestHandler): chunk could be any type of (str, dict, list) """ assert chunk is not None, 'None cound not be written in write_json' + self.set_header("Content-Type", "application/json; charset=UTF-8") if isinstance(chunk, dict) or isinstance(chunk, list): chunk = self.json_encode(chunk) - self.set_header("Content-Type", "application/json; charset=UTF-8") # convert chunk to utf8 before `RequestHandler.write()` # so that if any error occurs, we can catch and log it
fix json header unset in write_json when chunk is str
py
diff --git a/paramiko/transport.py b/paramiko/transport.py index <HASH>..<HASH> 100644 --- a/paramiko/transport.py +++ b/paramiko/transport.py @@ -966,6 +966,8 @@ class Transport (threading.Thread): supplied, this method returns ``None``. :returns: server supplied banner (`str`), or ``None``. + + .. versionadded:: 1.13 """ if not self.active or (self.auth_handler is None): return None
Add a missing versionadded for get_banner
py
diff --git a/workbench_cli/workbench_cli/workbench_shell.py b/workbench_cli/workbench_cli/workbench_shell.py index <HASH>..<HASH> 100644 --- a/workbench_cli/workbench_cli/workbench_shell.py +++ b/workbench_cli/workbench_cli/workbench_shell.py @@ -170,9 +170,7 @@ class WorkbenchShell(object): def chunks(data, chunk_size): """ Yield chunk_size chunks from data.""" for i in xrange(0, len(data), chunk_size): - compress = lz4.dumps(data[i:i+chunk_size]) - print 'compression: %d' % (len(compress)*100/chunk_size) - yield compress + yield lz4.dumps(data[i:i+chunk_size]) def file_chunker(self, raw_bytes, filename, type_tag): """Split up a large file into chunks and send to Workbench"""
just some cleanup around lz4 compression
py
diff --git a/pyad2usb/devices.py b/pyad2usb/devices.py index <HASH>..<HASH> 100644 --- a/pyad2usb/devices.py +++ b/pyad2usb/devices.py @@ -428,6 +428,8 @@ class SerialDevice(Device): time.sleep(0.001) except (OSError, serial.SerialException), err: + timer.cancel() + raise util.CommError('Error reading from AD2SERIAL device: {0}'.format(str(err))) else: if got_line:
Fixed a potential timer issue.
py
diff --git a/discord/ext/commands/core.py b/discord/ext/commands/core.py index <HASH>..<HASH> 100644 --- a/discord/ext/commands/core.py +++ b/discord/ext/commands/core.py @@ -215,6 +215,14 @@ class Command(_BaseCommand): If ``True``\, cooldown processing is done after argument parsing, which calls converters. If ``False`` then cooldown processing is done first and then the converters are called second. Defaults to ``False``. + extras: :class:`dict` + A dict of user provided extras to attach to the Command. + + .. note:: + This object may be copied by the library. + + + .. versionadded:: 2.0 """ def __new__(cls, *args, **kwargs): @@ -258,6 +266,7 @@ class Command(_BaseCommand): self.usage = kwargs.get('usage') self.rest_is_raw = kwargs.get('rest_is_raw', False) self.aliases = kwargs.get('aliases', []) + self.extras = kwargs.get('extras', {}) if not isinstance(self.aliases, (list, tuple)): raise TypeError("Aliases of a command must be a list or a tuple of strings.")
[commands] Add Command.extras
py
diff --git a/examples/basic_usage.py b/examples/basic_usage.py index <HASH>..<HASH> 100644 --- a/examples/basic_usage.py +++ b/examples/basic_usage.py @@ -58,11 +58,9 @@ def main(): reports = ts.get_latest_reports(token) for report in reports: - break; - - result = ts.get_report_details(token, report['id']) - print("Getting Report Details using '%s': \n\t%s" % (report['id'], json.dumps(result, indent=4))) - print() + result = ts.get_report_details(token, report['id']) + print("Getting Report Details using '%s': \n%s" % (report['id'], json.dumps(result, indent=4))) + print() if do_query_indicators: print("Querying correlated indicators with search string '%s' (first 100)" % search_string)
Change do_report_details in basic_usage to get details of 5 different reports
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -59,7 +59,7 @@ setup( # You can just specify the packages manually here if your project is # simple. Or you can use find_packages(). - packages=find_packages(exclude=['examples', 'testing']), + packages=find_packages(exclude=['docs', 'testing']), # List run-time dependencies here. These will be installed by pip when your # project is installed. For an analysis of "install_requires" vs pip's
setup.py update changed ‘examples’ to ‘docs’ for ignored packages.
py
diff --git a/openquake/calculators/event_based.py b/openquake/calculators/event_based.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/event_based.py +++ b/openquake/calculators/event_based.py @@ -332,13 +332,12 @@ def set_counts(dstore, dsetname): def set_random_years(dstore, name, investigation_time): """ - Sort the `events` array and attach year labels sensitive to the + Set on the `events` dataset year labels sensitive to the SES ordinal and the investigation time. """ events = dstore[name].value - eids = numpy.sort(events['eid']) years = numpy.random.choice(investigation_time, len(events)) + 1 - year_of = dict(zip(eids, years)) + year_of = dict(zip(numpy.sort(events['eid']), years)) # eid -> year for event in events: idx = event['ses'] - 1 # starts from 0 event['year'] = idx * investigation_time + year_of[event['eid']]
Updated docstring [skip CI] Former-commit-id: <I>fb4ccb5d0d<I>ee<I>a<I>d7f<I>d
py
diff --git a/appmetrics/histogram.py b/appmetrics/histogram.py index <HASH>..<HASH> 100644 --- a/appmetrics/histogram.py +++ b/appmetrics/histogram.py @@ -352,6 +352,7 @@ class Histogram(object): @contextmanager def time(self): + """A context manager which times execution of a piece of code""" t1 = time.time() yield t2 = time.time()
Added a comment for `time`.
py
diff --git a/tests/test_lens.py b/tests/test_lens.py index <HASH>..<HASH> 100644 --- a/tests/test_lens.py +++ b/tests/test_lens.py @@ -1,3 +1,5 @@ +import collections + import pytest from lenses import lens, baselens @@ -101,8 +103,18 @@ def test_lens_getitem(): assert lens([1, 2, 3]).getitem_(1).get() == 2 +def test_lens_getitem_direct(): + assert lens([1, 2, 3])[1].get() == 2 + + def test_lens_getattr(): - assert lens(3).getattr_('denominator').get() == 1 + nt = collections.namedtuple('nt', 'attr') + assert lens(nt(3)).getattr_('attr').get() == 3 + + +def test_lens_getattr_direct(): + nt = collections.namedtuple('nt', 'attr') + assert lens(nt(3)).attr.get() == 3 def test_lens_both():
added test for Lens.__getattr__ and Lens.__getitem__
py
diff --git a/timepiece/utils.py b/timepiece/utils.py index <HASH>..<HASH> 100644 --- a/timepiece/utils.py +++ b/timepiece/utils.py @@ -303,7 +303,7 @@ def date_filter(func): return inner_decorator -def get_hours(entries): +def get_hours(entries): hours = {'total': 0} for entry in entries: hours['total'] += entry['hours']
[#<I>] PEP 8 Fix
py
diff --git a/adminrestrict/tests.py b/adminrestrict/tests.py index <HASH>..<HASH> 100755 --- a/adminrestrict/tests.py +++ b/adminrestrict/tests.py @@ -16,10 +16,9 @@ class ModelTests(TestCase): def setUp(self): self.user = User.objects.create_user(username="foo", password="bar") - def test_blocked_ip(self): + def test_allow_all_if_empty(self): resp = self.client.post("/admin/", data={'username':"foo", 'password':"bar"}) - self.assertEqual(resp.status_code, 302) - self.assertTrue(resp.content.decode().startswith("Access to admin is denied")) + self.assertEqual(resp.status_code, 200) def test_allowed_ip(self): a = AllowedIP.objects.create(ip_address="127.0.0.1")
Changed test to allowed with empty table
py
diff --git a/codecov/__init__.py b/codecov/__init__.py index <HASH>..<HASH> 100644 --- a/codecov/__init__.py +++ b/codecov/__init__.py @@ -27,10 +27,22 @@ try: except ImportError: # pragma: no cover from urllib import urlencode -try: - from shlex import quote -except ImportError: # pragma: no cover - from pipes import quote +quote = None +if sys.platform == 'win32': # pragma: no cover + try: + # https://github.com/python/cpython/blob/3.7/Lib/subprocess.py#L174-L175 + from subprocess import list2cmdline + + def quote(arg): + return list2cmdline([arg]) + except ImportError: + pass + +if quote is None: + try: + from shlex import quote + except ImportError: # pragma: no cover + from pipes import quote import subprocess
Fix command line quoting for Windows (#<I>)
py
diff --git a/snapcast/control/server.py b/snapcast/control/server.py index <HASH>..<HASH> 100644 --- a/snapcast/control/server.py +++ b/snapcast/control/server.py @@ -277,7 +277,10 @@ class Snapserver(object): def _on_group_mute(self, data): """Handle group mute.""" - self._groups.get(data.get('id')).update_mute(data) + group = self._groups.get(data.get('id')) + group.update_mute(data) + for clientID in group.clients: + self._clients.get(clientID).callback() def _on_group_stream_changed(self, data): """Handle group stream change.""" @@ -333,6 +336,8 @@ class Snapserver(object): for group in self._groups.values(): if group.stream == data.get('id'): group.callback() + for clientID in group.clients: + self._clients.get(clientID).callback() def _on_stream_update(self, data): """Handle stream update.""" @@ -342,6 +347,8 @@ class Snapserver(object): for group in self._groups.values(): if group.stream == data.get('id'): group.callback() + for clientID in group.clients: + self._clients.get(clientID).callback() def set_on_update_callback(self, func): """Set on update callback function."""
fire client callbacks on stream update and group mute
py
diff --git a/pydoc-markdown/src/pydoc_markdown/contrib/renderers/hugo.py b/pydoc-markdown/src/pydoc_markdown/contrib/renderers/hugo.py index <HASH>..<HASH> 100644 --- a/pydoc-markdown/src/pydoc_markdown/contrib/renderers/hugo.py +++ b/pydoc-markdown/src/pydoc_markdown/contrib/renderers/hugo.py @@ -340,7 +340,7 @@ def install_hugo(to: str, version: str = None, extended: bool = True) -> None: if sys.platform.startswith('linux'): platform = 'Linux' - elif sys.pltform.startswith('win32'): + elif sys.platform.startswith('win32'): platform = 'Windows' elif sys.platform.startswith('darwin'): platform = 'macOS'
fix typo in pydoc_markdown/contrib/renderers/hugo.py (#<I>)
py
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -265,5 +265,6 @@ texinfo_documents = [ # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { 'https://docs.python.org/3/': None, - 'https://pyopenssl.readthedocs.org/en/latest/': None + 'https://pyopenssl.readthedocs.org/en/latest/': None, + 'http://babel.pocoo.org/docs/': None, }
Connect intersphinx to babel docs
py
diff --git a/clusters.py b/clusters.py index <HASH>..<HASH> 100644 --- a/clusters.py +++ b/clusters.py @@ -254,8 +254,12 @@ class ClusterEnsemble(object): @property def massrich_norm(self): """Normalization of Mass-Richness relation: + M200 = norm * (N200 / 20) ^ slope. + Changes to massrich_norm will propagate to all mass-dependant + variables. + :property: Returns normalization in Msun :property type: Quantity (float, with astropy.units of Msun) :setter: Sets normalization in Msun @@ -279,8 +283,12 @@ class ClusterEnsemble(object): @property def massrich_slope(self): """Slope of Mass-Richness relation: + M200 = norm * (N200 / 20) ^ slope. + Changes to massrich_slope will propagate to all mass-dependant + variables. + :property: Returns slope :property type: float :setter: Sets slope
be explicit about effects of changing mass-rich relation in docs
py
diff --git a/sos/archive.py b/sos/archive.py index <HASH>..<HASH> 100644 --- a/sos/archive.py +++ b/sos/archive.py @@ -340,14 +340,14 @@ class FileCacheArchive(Archive): # path case try: shutil.copy(src, dest) + except OSError as e: + self.log_info("File not collected: '%s'" % e) except IOError as e: # Filter out IO errors on virtual file systems. if src.startswith("/sys/") or src.startswith("/proc/"): pass else: self.log_info("caught '%s' copying '%s'" % (e, src)) - except OSError as e: - self.log_info("File not collected: '%s'" % e) # copy file attributes, skip SELinux xattrs for /sys and /proc try:
[archive] Re-order exception handling in add_file Re orders the order of exception handling within add_file(), to avoid an unreachable OSError as highlighted by LGTM.
py
diff --git a/scanpy/neighbors/__init__.py b/scanpy/neighbors/__init__.py index <HASH>..<HASH> 100644 --- a/scanpy/neighbors/__init__.py +++ b/scanpy/neighbors/__init__.py @@ -318,6 +318,11 @@ def compute_connectivities_umap( knn_indices=knn_indices, knn_dists=knn_dists, set_op_mix_ratio=set_op_mix_ratio, local_connectivity=local_connectivity) + + if isinstance(connectivities, tuple): + # In umap-learn 0.4, this returns (result, sigmas, rhos) + connectivities = connectivities[0] + distances = get_sparse_matrix_from_indices_distances_umap(knn_indices, knn_dists, n_obs, n_neighbors) return distances, connectivities.tocsr()
Preparations for UMAP <I> release (#<I>)
py
diff --git a/dallinger/models.py b/dallinger/models.py index <HASH>..<HASH> 100644 --- a/dallinger/models.py +++ b/dallinger/models.py @@ -37,23 +37,23 @@ class SharedMixin(object): #: a generic column that can be used to store experiment-specific details in #: String form. - property1 = Column(String(256), nullable=True, default=None) + property1 = Column(Text, nullable=True, default=None) #: a generic column that can be used to store experiment-specific details in #: String form. - property2 = Column(String(256), nullable=True, default=None) + property2 = Column(Text, nullable=True, default=None) #: a generic column that can be used to store experiment-specific details in #: String form. - property3 = Column(String(256), nullable=True, default=None) + property3 = Column(Text, nullable=True, default=None) #: a generic column that can be used to store experiment-specific details in #: String form. - property4 = Column(String(256), nullable=True, default=None) + property4 = Column(Text, nullable=True, default=None) #: a generic column that can be used to store experiment-specific details in #: String form. - property5 = Column(String(256), nullable=True, default=None) + property5 = Column(Text, nullable=True, default=None) #: boolean indicating whether the Network has failed which #: prompts Dallinger to ignore it unless specified otherwise. Objects are
Convert properties from String to Text (#<I>) * convert property1 from String to Text this allows strings of any length to be stored * convert extra properties to text type as well
py
diff --git a/tests/test_DemographyDebugger.py b/tests/test_DemographyDebugger.py index <HASH>..<HASH> 100644 --- a/tests/test_DemographyDebugger.py +++ b/tests/test_DemographyDebugger.py @@ -143,10 +143,12 @@ class TestBadMassMigrations(unittest.TestCase): setup_and_run_model(self.pop, d, 20) -# FIXME: many of the tests below this class -# have nothing to do with GlobalExtinction, yet -# the class name would suggest otherwise. -class TestDetectingExtinctions(unittest.TestCase): +class TestDetectingAbsenceOfAncestry(unittest.TestCase): + """ + These tests check for errors arising due to + there being no valid parental generation at some + point. + """ @classmethod def setUp(self): self.pop = fwdpy11.DiploidPopulation(100, 1.0)
Rename and document test. Closes #<I>
py
diff --git a/kubespawner/objects.py b/kubespawner/objects.py index <HASH>..<HASH> 100644 --- a/kubespawner/objects.py +++ b/kubespawner/objects.py @@ -19,7 +19,7 @@ from kubernetes.client.models import ( V1Service, V1ServiceSpec, V1ServicePort, V1beta1Ingress, V1beta1IngressSpec, V1beta1IngressRule, V1beta1HTTPIngressRuleValue, V1beta1HTTPIngressPath, - V1beta1IngressBackend + V1beta1IngressBackend, ) def make_pod( @@ -204,9 +204,7 @@ def make_pod( pod.spec.service_account_name = service_account if run_privileged: - notebook_container.security_context = V1SecurityContext( - privileged=True - ) + notebook_container.security_context = V1SecurityContext(privileged=True) notebook_container.resources.requests = {} if cpu_guarantee:
Esthetics: use trailing spaces I have come to realize the value of using trailing spaces when it comes to version control. We can end up with less clutter and potentially even avoiding merge conflicts if we use trailing spaces.
py
diff --git a/lewis/adapters/stream.py b/lewis/adapters/stream.py index <HASH>..<HASH> 100644 --- a/lewis/adapters/stream.py +++ b/lewis/adapters/stream.py @@ -501,7 +501,7 @@ class StreamAdapter(Adapter): if bound is None: raise RuntimeError( 'Unable to produce callable object for non-existing member \'{}\' ' - 'of device or interface.'.format(cmd.member)) + 'of device or interface.'.format(cmd.func)) for bound_cmd in bound: if bound_cmd.pattern in patterns:
Fix wrong variable name in _bind_commands This was a leftover from the old `Cmd` which was overlooked when this was refactored.
py
diff --git a/tests/unit/remote/test_s3.py b/tests/unit/remote/test_s3.py index <HASH>..<HASH> 100644 --- a/tests/unit/remote/test_s3.py +++ b/tests/unit/remote/test_s3.py @@ -83,3 +83,20 @@ def test_walk_files(remote): ] assert list(remote.walk_files(remote.path_info / "data")) == files + + +def test_copy_preserve_etag_across_buckets(remote): + s3 = remote.s3 + s3.create_bucket(Bucket="another") + + another = RemoteS3(None, {"url": "s3://another", "region": "us-east-1"}) + + from_info = remote.path_info / "foo" + to_info = another.path_info / "foo" + + remote.copy(from_info, to_info) + + from_etag = RemoteS3.get_etag(s3, "bucket", "foo") + to_etag = RemoteS3.get_etag(s3, "another", "foo") + + assert from_etag == to_etag
tests: same ETag across buckets
py
diff --git a/test/test_report_structured.py b/test/test_report_structured.py index <HASH>..<HASH> 100644 --- a/test/test_report_structured.py +++ b/test/test_report_structured.py @@ -219,8 +219,7 @@ def test_csv_file_output(capsys, tmpdir): test_file = str(target_dir) + 'test2' cfg['reports'] = [{'type': 'csv', 'file': test_file}] - with open(test_file, 'w') as fp: - execute_reports(cfg, 'someproject', collector) + execute_reports(cfg, 'someproject', collector) out, err = capsys.readouterr() assert out == ''
Unnecessary opening of file
py
diff --git a/pytest-pyramid-server/pytest_pyramid_server.py b/pytest-pyramid-server/pytest_pyramid_server.py index <HASH>..<HASH> 100644 --- a/pytest-pyramid-server/pytest_pyramid_server.py +++ b/pytest-pyramid-server/pytest_pyramid_server.py @@ -20,6 +20,7 @@ from wsgiref.simple_server import make_server from paste.deploy.loadwsgi import loadapp from pytest import yield_fixture +from pytest_server_fixtures import CONFIG from pytest_server_fixtures.http import HTTPTestServer @@ -83,8 +84,7 @@ class PyramidTestServer(HTTPTestServer): # Always print debug output for this process os.environ['DEBUG'] = '1' - # Discover externally accessable hostname so selenium can get to it - kwargs['hostname'] = kwargs.get('hostname', socket.gethostbyname(os.uname()[1])) + kwargs['hostname'] = kwargs.get('hostname', CONFIG.fixture_hostname) super(PyramidTestServer, self).__init__(preserve_sys_path=True, **kwargs)
Make pyramid server respect fixture config hostname, specific setup for selenium should be done higher in the stack
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -28,7 +28,7 @@ setup( author='Frederik Beaujean, Stephan Jahn', author_email='[email protected], [email protected]', license='GPLv2', - install_requires=['numpy', 'scipy', 'cython'], + install_requires=['numpy', 'scipy', 'cython', 'setuptools>=3.3'], extras_require={'testing': ['nose'], 'plotting': ['matplotlib'], 'parallelization': ['mpi4py']}, ext_modules=cythonize(extensions, compiler_directives=dict(profile=False, boundscheck=False,
[setup.py] force setuptools version >= <I> during setup
py
diff --git a/pandas_td/td.py b/pandas_td/td.py index <HASH>..<HASH> 100644 --- a/pandas_td/td.py +++ b/pandas_td/td.py @@ -132,10 +132,10 @@ class QueryEngine(object): html += '</pre>\n' # progress progress = None - for progress in re.findall(r'\n(\d{4}-\d{2}-\d{2}.*(?:\n .*)+)', output): + for progress in re.findall(r'\n(\d{4}-\d{2}-\d{2}.*\n\d{8}.*(?:\n *\[\d+\].*)+)', output): pass if progress: - html += '<pre>{0}</pre>'.format(progress) + html += '<code><small><small>{0}</small></small></code>'.format(progress) # finished at for rows, finished in re.findall(r'\n(\d+ rows.*)\n(finished at.*)', output): html += '{0}<br>'.format(rows)
fix parsing Presto's query progress
py
diff --git a/tests/test_settings.py b/tests/test_settings.py index <HASH>..<HASH> 100644 --- a/tests/test_settings.py +++ b/tests/test_settings.py @@ -4,12 +4,16 @@ import unittest import tempfile +import sys from os.path import basename, dirname, splitext -from PyQt5.QtCore import QSettings +from PyQt5.QtCore import QCoreApplication, QSettings from ReText import readListFromSettings, writeListToSettings, \ readFromSettings, writeToSettings +# Keep a reference so it is not garbage collected +app = QCoreApplication(sys.argv) + class TestSettings(unittest.TestCase): def setUp(self): self.tempFile = tempfile.NamedTemporaryFile(prefix='retext-', suffix='.ini')
test_settings: Keep QCoreApplication in memory So that QSettings destructor does not print a warning.
py
diff --git a/andes/core/model.py b/andes/core/model.py index <HASH>..<HASH> 100644 --- a/andes/core/model.py +++ b/andes/core/model.py @@ -2131,6 +2131,8 @@ from numpy import greater_equal, less_equal, greater, less # NOQA src = inspect.getsource(func) src = src.replace("def _lambdifygenerated(", f"def {func_name}(") + # remove `Indicator` + src = src.replace("Indicator", "") if self.parent.system.config.yapf_pycode: try:
Manually remove `Indicator` in the generated function.
py
diff --git a/woven/deployment.py b/woven/deployment.py index <HASH>..<HASH> 100644 --- a/woven/deployment.py +++ b/woven/deployment.py @@ -121,10 +121,8 @@ def deploy_files(local_dir, remote_dir, pattern = '',rsync_exclude=['*.pyc','.*' run(' '.join(['mkdir -pv',remote_staging_dir])).split('\n') created_list = [remote_staging_dir] - extra_opts = None - #upload into remote staging - rsync_project(local_dir=staging_dir,remote_dir=remote_staging_dir,extra_opts=extra_opts,exclude=rsync_exclude,delete=True) + rsync_project(local_dir=staging_dir,remote_dir=remote_staging_dir,exclude=rsync_exclude,delete=True) #create the final destination created_dir_list = mkdirs(remote_dir, use_sudo)
fixed issue with extra_opts for rsync custom port
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- +# # Copyright (C) 2014 by Alex Brandt <[email protected]> # # crumbs is freely distributable under the terms of an MIT-style license.
add coding to setup.py for unicode characters in python<I>
py
diff --git a/satpy/readers/agri_l1.py b/satpy/readers/agri_l1.py index <HASH>..<HASH> 100755 --- a/satpy/readers/agri_l1.py +++ b/satpy/readers/agri_l1.py @@ -54,9 +54,11 @@ class HDF_AGRI_L1(HDF5FileHandler): lut_key = ds_info.get('lut_key', dataset_id.name) data = self.get(file_key) lut = self.get(lut_key) + if data.ndim >= 2: + data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'}) # convert bytes to string - data.attrs['long_name'] = data.attrs['long_name'].decode("utf-8") + data.attrs['long_name'] = data.attrs['long_name'].decode("utf-8", 'ignore') data.attrs['band_names'] = data.attrs['band_names'].decode("utf-8") data.attrs['center_wavelength'] = data.attrs['center_wavelength'].decode("utf-8") @@ -64,6 +66,7 @@ class HDF_AGRI_L1(HDF5FileHandler): calibration = ds_info['calibration'] if calibration == 'counts': + data.attrs['units'] = ds_info['units'] ds_info['valid_range'] = data.attrs['valid_range'] return data
bugfix: ignore the Chinese character in long_name rename dimension name for satpy features
py
diff --git a/tests/test_behaviour.py b/tests/test_behaviour.py index <HASH>..<HASH> 100644 --- a/tests/test_behaviour.py +++ b/tests/test_behaviour.py @@ -18,7 +18,7 @@ STATE_TWO = "STATE_TWO" STATE_THREE = "STATE_THREE" -def wait_for_event(event, tries=100, sleep=0.1): +def wait_for_event(event, tries=200, sleep=0.1): counter = 0 while not event.is_set() and counter < tries: event.wait(sleep)
Increase wait tries for behaviour tests.
py
diff --git a/tests.py b/tests.py index <HASH>..<HASH> 100644 --- a/tests.py +++ b/tests.py @@ -1202,7 +1202,7 @@ def test_python_prerelease_release_postrelease(tmpdir, capsys): parse = ^ (?P<major>\d+)\.(?P<minor>\d+) # minimum 'N.N' (?: - (?P<prerel>[abc]|rc) # 'a' = alpha, 'b' = beta + (?P<prerel>[abc]|rc|dev) # 'a' = alpha, 'b' = beta # 'c' or 'rc' = release candidate (?: (?P<prerelversion>\d+(?:\.\d+)*) @@ -1245,9 +1245,12 @@ def test_python_prerelease_release_postrelease(tmpdir, capsys): main(['prerel']) assert '1.0' == file_content() - main(['minor', '--verbose']) + main(['minor']) assert '1.1dev' == file_content() + main(['prerel', '--verbose']) + assert '1.1a' == file_content() + def test_part_first_value(tmpdir): tmpdir.join("the_version.txt").write("0.9.4")
pep-<I> style test: also parse 'dev' prerel
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -62,13 +62,12 @@ install_requires = setup_requires + ['Mako>=1.0.1', 'pillow', 'h5py>=2.5', 'jinja2', - 'pycbc-mpld3>=0.3.dev0', + 'mpld3>=0.3', 'pyRXP>=2.1.0', 'pycbc-glue>=1.0.1', 'kombine', 'emcee>=2.2.0', 'corner>=2.0.1', - #'scikit-learn>=0.17.0', # travis does not like scikit-learn ] #FIXME Remove me when we bump to h5py > 2.5
depend on release mpld3 (#<I>)
py
diff --git a/src/comet.py b/src/comet.py index <HASH>..<HASH> 100644 --- a/src/comet.py +++ b/src/comet.py @@ -182,7 +182,8 @@ class CometJoyceClientRelay(JoyceRelay): def run(self): assert not self.running self.running = True - self._do_request() + if self.token is None: + self._do_request() self.hub.threadPool.execute_named(self.run_dispatcher, '%s.run_dispatcher' % self.l.name) self.hub.threadPool.execute_named(self.run_requester,
comet: client: only do initial request on None token
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -31,7 +31,7 @@ test_requirements = [ setup( name='graphql-ws', - version='0.1.1', + version='0.2.0', description="Websocket server for GraphQL subscriptions", long_description=readme + '\n\n' + history, author="Syrus Akbary",
Updated version to <I>
py
diff --git a/green/process.py b/green/process.py index <HASH>..<HASH> 100644 --- a/green/process.py +++ b/green/process.py @@ -297,6 +297,11 @@ def poolRunner(target, queue, coverage_number=None, omit_patterns=[]): # pragma: # Loading was successful, lets do this try: test.run(result) + # If your class setUpClass(self) method crashes, the test doesn't + # raise an exception, but it does an an entry to errors. + if result and getattr(result, 'errors', False): + queue.put(test) + queue.put(result) except: # Some frameworks like testtools record the error AND THEN let it # through to crash things. So we only need to manufacture another error
Crashes in setUpClass() are (stupidly) reported in a completely different way than any other failure or crash in unittest. Now we handle that way. Fixes #<I>.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,6 @@ install_requires = [ "celery>=4.1.0", "colorlog", "coverage", - "docker-compose", "flake8>=3.4.1", "future", "kombu>=4.1.0", @@ -51,7 +50,7 @@ sys.path.insert(0, os.path.join(os.path.dirname(__file__), "celery_loaders")) setup( name="celery-loaders", cmdclass={"build_py": build_py}, - version="1.0.4", + version="1.0.5", description="Celery Application and Task Loader Examples", long_description="Examples for loading celery applications with " + "easy-to-discover task modules",
removing docker compose req
py
diff --git a/matrix_client/client.py b/matrix_client/client.py index <HASH>..<HASH> 100644 --- a/matrix_client/client.py +++ b/matrix_client/client.py @@ -393,7 +393,7 @@ class Room(object): self.events.pop(0) for listener in self.listeners: - listener(event) + listener(self, event) def get_events(self): """ Get the most recent events for this room.
client: Pass room as arguement to event callbacks. When we get an event, the room part is striped away before the event callback on the room gets the event. So inside the callback there is no easy way to find in which room the event has happened.
py
diff --git a/pandas/core/series.py b/pandas/core/series.py index <HASH>..<HASH> 100644 --- a/pandas/core/series.py +++ b/pandas/core/series.py @@ -1603,7 +1603,7 @@ copy : boolean, default False path : string or None Output filepath. If None, write to stdout """ - f = open(path, 'wb') + f = open(path, 'w') csvout = csv.writer(f) csvout.writerows(self.iteritems()) f.close()
Fix bug writing Series to CSV in Python 3.
py
diff --git a/dvc/__init__.py b/dvc/__init__.py index <HASH>..<HASH> 100644 --- a/dvc/__init__.py +++ b/dvc/__init__.py @@ -7,7 +7,7 @@ import os import warnings -VERSION_BASE = '0.18.8' +VERSION_BASE = '0.18.9' __version__ = VERSION_BASE PACKAGEPATH = os.path.abspath(os.path.dirname(__file__))
dvc: bump to <I>
py
diff --git a/intranet/apps/eighth/models.py b/intranet/apps/eighth/models.py index <HASH>..<HASH> 100644 --- a/intranet/apps/eighth/models.py +++ b/intranet/apps/eighth/models.py @@ -214,17 +214,20 @@ class EighthActivity(AbstractBaseEighthModel): def save(self, *args, **kwargs): update_aid = False - if not self.pk: - # Not in database yet - if not self.aid: + + if not self.aid: + if self.pk: + self.aid = self.pk + else: update_aid = True - # Create and get an ID - super(EighthActivity, self).save(*args, **kwargs) - self.aid = self.pk - # Save again super(EighthActivity, self).save(*args, **kwargs) + if update_aid: + # Update aid with new ID and re-save + self.aid = self.pk + super(EighthActivity, self).save(*args, **kwargs) + class Meta: verbose_name_plural = "eighth activities"
correctly save AID and update with reg ID when appropriate
py
diff --git a/tweepy/models.py b/tweepy/models.py index <HASH>..<HASH> 100644 --- a/tweepy/models.py +++ b/tweepy/models.py @@ -231,6 +231,7 @@ class DirectMessage(Model): @classmethod def parse(cls, api, json): dm = cls(api) + setattr(dm, '_json', json) if "event" in json: json = json["event"] for k, v in json.items():
EHN: Save the raw JSON on DirectMessages as well. This brings the behavior of the `DirectMessage` model in line with other `Model`s.
py
diff --git a/src/accounts/views.py b/src/accounts/views.py index <HASH>..<HASH> 100644 --- a/src/accounts/views.py +++ b/src/accounts/views.py @@ -71,6 +71,7 @@ class UserProfileDetailView(UserProfileBaseMixin, DetailView): 'ticket': TicketCollabCount, } + messages = Message.objects.filter(from_address__user__pk=user.pk) for type in ['thread', 'ticket', 'wiki', 'changeset', 'attachment']: CounterClass = counter_class.get(type) if CounterClass: @@ -80,6 +81,8 @@ class UserProfileDetailView(UserProfileBaseMixin, DetailView): count_types[trans(type)] = 0 else: count_types[trans(type)] = counter.count + elif type == 'thread': + count_types[trans(type)] = messages.count() else: sqs = SearchQuerySet() for filter_or in fields_or_lookup: @@ -100,7 +103,6 @@ class UserProfileDetailView(UserProfileBaseMixin, DetailView): context['emails'] = query[:10] count_by = 'thread__mailinglist__name' - messages = Message.objects.filter(from_address__user__pk=user.pk) context['list_activity'] = dict(messages.values_list(count_by)\ .annotate(Count(count_by))\ .order_by(count_by))
Fixing emails count on area contribution chart of user profile
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -18,7 +18,7 @@ setup( (http://pythonpaste.org/webtest/) with django's testing framework.""", long_description = open('README.rst').read(), - install_requires = ['webtest'], + requires = ['webtest'], classifiers=[ 'Development Status :: 4 - Beta',
django-webtest doesn't use setuptools and distutils doesn't support install_requires
py
diff --git a/src/python/setup.py b/src/python/setup.py index <HASH>..<HASH> 100644 --- a/src/python/setup.py +++ b/src/python/setup.py @@ -27,7 +27,7 @@ def get_long_description(): setup( name='im-pipelines-utils', - version=os.environ.get('TRAVIS_TAG', '1.0.0'), + version=os.environ.get('GITHUB_REF_SLUG', '1.0.0'), author='Alan Christie', author_email='[email protected]', url='https://github.com/InformaticsMatters/pipelines-utils',
- Setup now expects SLUG
py
diff --git a/compliance/run-autobahn-tests.py b/compliance/run-autobahn-tests.py index <HASH>..<HASH> 100644 --- a/compliance/run-autobahn-tests.py +++ b/compliance/run-autobahn-tests.py @@ -4,6 +4,7 @@ from __future__ import print_function import sys +import os import os.path import argparse import errno @@ -202,6 +203,12 @@ def main(): else: cases = json.loads(cases) + # The autobahn test suite currently requires hash randomization to be + # disabled: + # https://github.com/python-hyper/wsproto/issues/55 + # https://github.com/crossbario/autobahn-testsuite/issues/80 + os.environ["PYTHONHASHSEED"] = "0" + setup_venv() if args.MODE == "client":
Disable hash randomization when running autobahn tests Fixes: gh-<I>
py
diff --git a/sentry_sdk/utils.py b/sentry_sdk/utils.py index <HASH>..<HASH> 100644 --- a/sentry_sdk/utils.py +++ b/sentry_sdk/utils.py @@ -64,7 +64,7 @@ def get_default_release(): try: release = ( subprocess.Popen( - ["git", "rev-parse", "--short", "HEAD"], + ["git", "rev-parse", "HEAD"], stdout=subprocess.PIPE, stderr=null, stdin=null,
Use full git sha as release name (#<I>) This fixes #<I>
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -88,11 +88,10 @@ def get_release_date (): MSVCP90Version = '9.0.21022.8' MSVCP90Token = '1fc8b3b9a1e18e3b' -data_files = [] if os.name == 'nt': - data_files.append(('share', ['doc/README.txt'])) + data_files = [] else: - data_files.append(('share/man/man1', ['doc/dosage.1'])) + data_files = [('share/man/man1', ['doc/dosage.1'])] def get_nt_platform_vars ():
Dont install readme.txt on windows.
py
diff --git a/pyblish_qml/app.py b/pyblish_qml/app.py index <HASH>..<HASH> 100644 --- a/pyblish_qml/app.py +++ b/pyblish_qml/app.py @@ -85,6 +85,7 @@ class Application(QtGui.QGuiApplication): host = ipc.client.Proxy() controller = control.Controller(host) + controller.finished.connect(lambda: window.alert(0)) context = engine.rootContext() context.setContextProperty("app", controller)
Alert the window on controller.finished signal #<I>
py
diff --git a/tensorlayer/layers.py b/tensorlayer/layers.py index <HASH>..<HASH> 100755 --- a/tensorlayer/layers.py +++ b/tensorlayer/layers.py @@ -1719,9 +1719,14 @@ class BatchNormLayer(Layer): beta = _get_variable('beta', params_shape, initializer=beta_init) - gamma = _get_variable('gamma', - params_shape, - initializer=gamma_init) + try: # TF12 + gamma = _get_variable('gamma', + params_shape, + initializer=gamma_init()) + except: # TF11 + gamma = _get_variable('gamma', + params_shape, + initializer=gamma_init) # trainable=False means : it prevent TF from updating this variable # from the gradient, we have to update this from the mean computed
Fix batchnormlayer compatibility to TF<I> Add compatibility to TF<I> cause by the change of ones_initializer api.
py
diff --git a/openquake/engine/db/models.py b/openquake/engine/db/models.py index <HASH>..<HASH> 100644 --- a/openquake/engine/db/models.py +++ b/openquake/engine/db/models.py @@ -1996,8 +1996,7 @@ class Gmf(djm.Model): """ hc = self.output.oq_job.hazard_calculation correl_model = hc.get_correl_model() - gsims = [logictree.GSIM[art.gsim]() for art in - AssocLtRlzTrtModel.objects.filter(rlz=self.lt_realization)] + gsims = self.lt_realization.get_gsim_instances() assert gsims, 'No GSIMs found for realization %d!' % \ self.lt_realization.id # look into hzdr.assoc_lt_rlz_trt_model imts = map(from_string, hc.intensity_measure_types) @@ -2410,6 +2409,14 @@ class LtRealization(djm.Model): db_table = 'hzrdr\".\"lt_realization' ordering = ['ordinal'] + def get_gsim_instances(self): + """ + Return the GSIM instances associated to the current realization + by looking at the association table. + """ + return [logictree.GSIM[art.gsim]() for art in + AssocLtRlzTrtModel.objects.filter(rlz=self)] + ## Tables in the 'riskr' schema.
Added a method LtRealization.get_gsims_instances
py
diff --git a/endpoints/interface/uwsgi/client.py b/endpoints/interface/uwsgi/client.py index <HASH>..<HASH> 100644 --- a/endpoints/interface/uwsgi/client.py +++ b/endpoints/interface/uwsgi/client.py @@ -205,7 +205,7 @@ class WebsocketClient(HTTPClient): kwargs['timeout'] = timeout try: try: - if not self.connected: self.connect() + if not self.connected: self.connect(path) with self.wstimeout(**kwargs) as timeout: kwargs['timeout'] = timeout
Client.connect() didn't take path when auto-connecting
py
diff --git a/src/scout_apm/core/socket.py b/src/scout_apm/core/socket.py index <HASH>..<HASH> 100644 --- a/src/scout_apm/core/socket.py +++ b/src/scout_apm/core/socket.py @@ -77,8 +77,8 @@ class CoreAgentSocket(threading.Thread): if self._started_event.is_set(): self._stop_event.set() self.command_queue.put(None, False) # unblock self.command_queue.get - self._stopped_event.wait(2 * SECOND) - if self._stopped_event.is_set(): + stopped = self._stopped_event.wait(2 * SECOND) + if stopped: return True else: logger.debug("CoreAgentSocket Failed to stop thread within timeout!")
Fix race condition in CoreAgentSocket.stop() (#<I>) I realized that this method has a read-after-write concurrency bug here - if the thread stops successfully but another restarts the thread between the call to `wait()` and `is_set()`, it could incorrectly report failure to stop. `wait()` already returns the value of the event so there's no need to reread here.
py
diff --git a/ht/conv_free_immersed.py b/ht/conv_free_immersed.py index <HASH>..<HASH> 100644 --- a/ht/conv_free_immersed.py +++ b/ht/conv_free_immersed.py @@ -28,7 +28,8 @@ __all__ = ['Nu_vertical_plate_Churchill', 'Nu_horizontal_cylinder_Churchill', 'Nu_vertical_cylinder_Kreith_Eckert', 'Nu_vertical_cylinder_Hanesian_Kalish_Morgan', 'Nu_vertical_cylinder_Al_Arabi_Khamis', - 'Nu_vertical_cylinder_Popiel_Churchill'] + 'Nu_vertical_cylinder_Popiel_Churchill', + 'Nu_vertical_cylinder'] def Nu_vertical_plate_Churchill(Pr, Gr):
Added said master function to __all__
py
diff --git a/bitshares/amount.py b/bitshares/amount.py index <HASH>..<HASH> 100644 --- a/bitshares/amount.py +++ b/bitshares/amount.py @@ -79,11 +79,16 @@ class Amount(dict): self["asset"] = Asset(args[1], bitshares_instance=self.bitshares) self["symbol"] = self["asset"]["symbol"] - elif amount and asset: + elif amount and asset and isinstance(asset, Asset): self["amount"] = amount self["asset"] = asset self["symbol"] = self["asset"]["symbol"] + elif amount and asset and isinstance(asset, str): + self["amount"] = amount + self["asset"] = Asset(asset) + self["symbol"] = asset + else: raise ValueError
[amount] make sure that quote/base can be dealt with as strings
py
diff --git a/visidata/vdtui.py b/visidata/vdtui.py index <HASH>..<HASH> 100755 --- a/visidata/vdtui.py +++ b/visidata/vdtui.py @@ -1901,7 +1901,7 @@ class Column: cls = self.__class__ ret = cls.__new__(cls) ret.__dict__.update(self.__dict__) - self.key = False # column copies lose their key status + self.keycol = False # column copies lose their key status if self._cachedValues is not None: ret._cachedValues = collections.OrderedDict() # an unrelated cache for copied columns return ret
[vdtui] typo bugfix for <I>a<I>, key is really keycol
py
diff --git a/luigi/lock.py b/luigi/lock.py index <HASH>..<HASH> 100644 --- a/luigi/lock.py +++ b/luigi/lock.py @@ -50,6 +50,8 @@ def getpcmd(pid): spid, scmd = line.strip().split(' ', 1) if int(spid) == int(pid): return scmd + # Fallback instead of None, for e.g. Cygwin where -o is an "unknown option" for the ps command: + return '[PROCESS_WITH_PID={}]'.format(pid) def get_info(pid_dir, my_pid=None):
Fallback process command name in lock.getpcmd
py
diff --git a/py/testdir_single_jvm/test_model_management.py b/py/testdir_single_jvm/test_model_management.py index <HASH>..<HASH> 100644 --- a/py/testdir_single_jvm/test_model_management.py +++ b/py/testdir_single_jvm/test_model_management.py @@ -416,6 +416,7 @@ class ApiTestCase(ModelManagementTestCase): # find all compatible frames models = node.models(key=model_key, find_compatible_frames=1) compatible_frames = models['models'][model_key]['compatible_frames'] + self.assertKeysExist(models, 'models/' + model_key, ['training_duration_in_ms']) self.assertNotEqual(models['models'][model_key]['training_duration_in_ms'], 0, "Expected non-zero training time for model: " + model_key) for frame_key in compatible_frames:
Due to a change in the core the training_duration_in_ms disappeared from GLM. Check for the key before checking its value is > 0.
py
diff --git a/tensor2tensor/models/research/transformer_vae.py b/tensor2tensor/models/research/transformer_vae.py index <HASH>..<HASH> 100644 --- a/tensor2tensor/models/research/transformer_vae.py +++ b/tensor2tensor/models/research/transformer_vae.py @@ -810,7 +810,7 @@ class TransformerAE(t2t_model.T2TModel): self._hparams.num_blocks, self._hparams.hidden_size, self._hparams.block_dim ], - initializer=tf.random_normal_initializer(), + initializer=tf.contrib.layers.xavier_initializer(), trainable=self._hparams.trainable_projections) self._hparams.reshape_fn = project_hidden elif self._hparams.reshape_method == "slice":
Change initializer on the projection tensors to get it working. PiperOrigin-RevId: <I>
py
diff --git a/tests/unit/modules/pip_test.py b/tests/unit/modules/pip_test.py index <HASH>..<HASH> 100644 --- a/tests/unit/modules/pip_test.py +++ b/tests/unit/modules/pip_test.py @@ -556,6 +556,15 @@ class PipTestCase(TestCase): cwd=None ) + def test_no_deps_argument_in_resulting_command(self): + mock = MagicMock(return_value={'retcode': 0, 'stdout': ''}) + with patch.dict(pip.__salt__, {'cmd.run_all': mock}): + pip.install('pep8', no_deps=True) + mock.assert_called_once_with( + 'pip install --no-deps pep8', + runas=None, + cwd=None + ) if __name__ == '__main__':
Added a mocked test case for `--no-deps` passing to `pip install`.
py
diff --git a/describe/mock/utils.py b/describe/mock/utils.py index <HASH>..<HASH> 100644 --- a/describe/mock/utils.py +++ b/describe/mock/utils.py @@ -1,7 +1,9 @@ +from ..frozen_dict import FrozenDict +from args_matcher import ArgList class Function(tuple): def __init__(self, (name, args, kwargs, is_property)): - super(Function, self).__init__((name, args, kwargs, is_property)) + super(Function, self).__init__((name, tuple(args), FrozenDict(kwargs), is_property)) @property def name(self): return self[0] @@ -12,6 +14,13 @@ class Function(tuple): @property def is_property(self): return self[3] + def with_args(self, args, kwargs): + return self.__class__(self.name, args, kwargs, self.is_property) + + @property + def arglist(self): + return ArgList(self.args, self.kwargs) + def __str__(self): return "%(prop)s%(name)s(%(args)s%(comma)s%(kwargs)s)" % { 'prop': 'property:' if self.is_property else '',
Function object will automatically make the args and kwargs immutable. Function object also now profiles ways to create new Function objects based on another one and returning an ArgList object.
py
diff --git a/src/sos/sos_executor.py b/src/sos/sos_executor.py index <HASH>..<HASH> 100755 --- a/src/sos/sos_executor.py +++ b/src/sos/sos_executor.py @@ -464,7 +464,7 @@ class Base_Executor: res = analyze_section(section) # # build DAG with input and output files of step - env.logger.debug('Adding step {} with output {}'.format(res['step_name'], short_repr(res['step_output']))) + env.logger.debug('Adding step {} with output {} to resolve target {}'.format(res['step_name'], short_repr(res['step_output']), target)) if isinstance(mo[0][1], dict): context = mo[0][1] else: @@ -519,7 +519,7 @@ class Base_Executor: res = analyze_section(section) # # build DAG with input and output files of step - env.logger.debug('Adding step {} with output {}'.format(res['step_name'], short_repr(res['step_output']))) + env.logger.debug('Adding step {} with output {} to resolve target {}'.format(res['step_name'], short_repr(res['step_output']), target)) if isinstance(mo[0][1], dict): context = mo[0][1] else:
Add a bit more debug message to help identify the source of auxiliary step
py
diff --git a/tests/system/test_topen.py b/tests/system/test_topen.py index <HASH>..<HASH> 100644 --- a/tests/system/test_topen.py +++ b/tests/system/test_topen.py @@ -51,7 +51,7 @@ class topenTest(unittest.TestCase): # Make assertions self.assertEqual(actual, expected) - def test_file_excel(self): + def test_file_xls(self): # Get results actual = topen(self.make_file_path('table.xls')).read() @@ -60,6 +60,15 @@ class topenTest(unittest.TestCase): # Make assertions self.assertEqual(actual, expected) + def test_file_xlsx(self): + + # Get results + actual = topen(self.make_file_path('table.xlsx')).read() + expected = [('id', 'name'), (1.0, 'english'), (2.0, '中国人')] + + # Make assertions + self.assertEqual(actual, expected) + def test_text_csv(self): # Get results
[#<I>] added system xlsx test
py
diff --git a/hypermap/aggregator/models.py b/hypermap/aggregator/models.py index <HASH>..<HASH> 100644 --- a/hypermap/aggregator/models.py +++ b/hypermap/aggregator/models.py @@ -831,9 +831,15 @@ def update_layers_wms(service): wms = WebMapService(service.url) layer_names = list(wms.contents) parent = wms.contents[layer_names[0]].parent - + # fallback, some endpoint like this one: + # https://nsidc.org/cgi-bin/atlas_north?service=WMS&request=GetCapabilities&version=1.1.1 + # does not have a parent to check for srs + if parent: + crsOptions = parent.crsOptions + else: + crsOptions = wms.contents[layer_names[0]].crsOptions # set srs - for crs_code in parent.crsOptions: + for crs_code in crsOptions: srs, created = SpatialReferenceSystem.objects.get_or_create(code=crs_code) service.srs.add(srs)
For wms in some case we need to fallback from parent to a layer to detect crsOptions. maybe we will need the same for other service types?
py
diff --git a/openquake/calculators/ebr.py b/openquake/calculators/ebr.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/ebr.py +++ b/openquake/calculators/ebr.py @@ -256,6 +256,7 @@ class EventBasedRiskCalculator(base.RiskCalculator): nses = self.oqparam.ses_per_logic_tree_path saved = {out: 0 for out in self.outs} N = len(self.assetcol) + zero2 = numpy.zeros(2) with self.monitor('saving loss table', autoflush=True, measuremem=True): for (o, l, r), data in numpy.ndenumerate(result): @@ -270,7 +271,7 @@ class EventBasedRiskCalculator(base.RiskCalculator): avgloss_by_aid = sum(data, AccumDict()) lst = [] for i, asset in enumerate(self.assetcol): - avg = avgloss_by_aid[i] * asset[lt] + avg = avgloss_by_aid.get(i, zero2) * asset[lt] lst.append((avg[0], avg[1])) avglosses = numpy.array(lst, avg_dt) self.datasets[o, l, r].dset[:] = avglosses
Managed the case of no avglosses
py
diff --git a/lib/emir/core.py b/lib/emir/core.py index <HASH>..<HASH> 100644 --- a/lib/emir/core.py +++ b/lib/emir/core.py @@ -20,6 +20,7 @@ import yaml from numina.core import BaseInstrument, BasePipeline, InstrumentConfiguration from numina.core import import_object +from numina.core.reciperesult import RecipeResultAutoQA as RecipeResult import logging import pkgutil
Alias numina's RecipeResultAutoQA as emir.core.RecipeResult
py
diff --git a/androlyze.py b/androlyze.py index <HASH>..<HASH> 100755 --- a/androlyze.py +++ b/androlyze.py @@ -39,7 +39,7 @@ from androguard.decompiler.decompiler import * from androguard.core import androconf from androguard.util import read -from IPython.frontend.terminal.embed import InteractiveShellEmbed +from IPython.terminal.embed import InteractiveShellEmbed from IPython.config.loader import Config from cPickle import dumps, loads
Update androlyze.py Correct IPython InteractiveShellEmbed import to avoid the warning: "The top-level `frontend` package has been deprecated. All its subpackages have been moved to the top `IPython` level."
py
diff --git a/pyinfra/facts/server.py b/pyinfra/facts/server.py index <HASH>..<HASH> 100644 --- a/pyinfra/facts/server.py +++ b/pyinfra/facts/server.py @@ -160,6 +160,7 @@ class LinuxDistribution(FactBase): _regexes = [ r'(Ubuntu) ([0-9]{2})\.([0-9]{2})', r'(CentOS) release ([0-9]).([0-9])', + r'(Red Hat Enterprise Linux) Server release ([0-9]).([0-9])', r'(CentOS) Linux release ([0-9])\.([0-9])', r'(Debian) GNU/Linux ([0-9])()', r'(Gentoo) Base System release ([0-9])\.([0-9])',
+Support for RHEL on `linux_distribution` fact.
py
diff --git a/telemetry/telemetry/util/global_hooks.py b/telemetry/telemetry/util/global_hooks.py index <HASH>..<HASH> 100644 --- a/telemetry/telemetry/util/global_hooks.py +++ b/telemetry/telemetry/util/global_hooks.py @@ -83,7 +83,7 @@ def InstallAtExitHook(): hangs on the builbots. """ # TODO(tonyg): Find a way to do something similar on Windows. - if platform.GetHostPlatform().GetOSName() not in ['linux', 'mac']: + if platform.GetHostPlatform().GetOSName() == 'win': return # Create new process group and become its leader.
Revert <I> "[Telemetry] Don't setpgrp on cros." Planning to revert a CL that this modified. BUG=<I> > [Telemetry] Don't setpgrp on cros. > > It doesn't work there for some reason. > > BUG= > > Review URL: <URL>
py
diff --git a/runtests.py b/runtests.py index <HASH>..<HASH> 100644 --- a/runtests.py +++ b/runtests.py @@ -7,6 +7,7 @@ import django DEFAULT_SETTINGS = { 'INSTALLED_APPS': ( + 'django.contrib.gis', 'spillway', 'tests', ),
Enable contrib.gis for test runs so templates are found
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ README = open('README.rst', 'rt').read() setup( name='vncdotool', - version='0.2.0dev', + version='0.2.0', description='Command line VNC client', install_requires=[ 'Twisted',
Preparing release <I>
py
diff --git a/eventsourcing/system.py b/eventsourcing/system.py index <HASH>..<HASH> 100644 --- a/eventsourcing/system.py +++ b/eventsourcing/system.py @@ -95,7 +95,7 @@ class Follower(Application): """ reader, mapper = self.readers[name] start = self.recorder.max_tracking_id(name) + 1 - for notification in reader.read(start=start): + for notification in reader.select(start=start): domain_event = mapper.to_domain_event(notification) process_event = ProcessEvent( Tracking(
Changed Follower to select() rather than read() notifications.
py
diff --git a/shap/maskers/_fixed_composite.py b/shap/maskers/_fixed_composite.py index <HASH>..<HASH> 100644 --- a/shap/maskers/_fixed_composite.py +++ b/shap/maskers/_fixed_composite.py @@ -30,4 +30,10 @@ class FixedComposite(Masker): if hasattr(self.masker, "mask_shapes") and callable(self.masker.mask_shapes): return self.masker.mask_shapes(*args) else: - return [a.shape for a in args] \ No newline at end of file + return [a.shape for a in args] + + def feature_names(self, *args): + if callable(getattr(self.masker, "feature_names", None)): + return self.masker.feature_names(*args) + else: + return None \ No newline at end of file
Added feature_names method to FixedComposite masker
py
diff --git a/jsonfield/fields.py b/jsonfield/fields.py index <HASH>..<HASH> 100644 --- a/jsonfield/fields.py +++ b/jsonfield/fields.py @@ -55,15 +55,16 @@ class JSONField(models.TextField): def value_from_object(self, obj): return json.dumps(super(JSONField, self).value_from_object(obj)) - def formfield(self, form_class=JSONFormField, **kwargs): - defaults = {"help_text": "Enter valid JSON"} + def formfield(self, **kwargs): - if getattr(self, "blank", False): - defaults["required"] = False + kwargs["form_class"] = JSONFormField - defaults.update(kwargs) + field = super(JSONField, self).formfield(**kwargs) - return form_class(**defaults) + if not field.help_text: + field.help_text = "Enter valid JSON" + + return field try: from south.modelsinspector import add_introspection_rules
Changed formfield to use default implementation. Changed help_text to only set default if not set
py
diff --git a/rdbtools/callbacks.py b/rdbtools/callbacks.py index <HASH>..<HASH> 100644 --- a/rdbtools/callbacks.py +++ b/rdbtools/callbacks.py @@ -364,7 +364,8 @@ class MemoryCallback(RdbCallback): def zadd(self, key, score, member): if self._current_encoding == 'skiplist': - self._current_size += self.sizeof_string(value) + self._current_size += 8 # self.sizeof_string(score) + self._current_size += self.sizeof_string(member) self._current_size += self.skiplist_entry_overhead() def end_sorted_set(self, key):
Fixing memory calculations for sorted set
py
diff --git a/compress/filters/css_url_replace/__init__.py b/compress/filters/css_url_replace/__init__.py index <HASH>..<HASH> 100644 --- a/compress/filters/css_url_replace/__init__.py +++ b/compress/filters/css_url_replace/__init__.py @@ -8,7 +8,7 @@ CSS_REPLACE = getattr(settings, 'COMPRESS_CSS_URL_REPLACE', []) class CSSURLReplace(FilterBase): def filter_css(self, css): for pattern, repl in CSS_REPLACE.iteritems(): - css = re.sub(pattern, repl, css, flags=re.UNICODE | re.IGNORECASE) + css = re.sub(pattern, repl, css) if self.verbose: print 'Replaced "%s" with "%s"' % (pattern, repl) return css
make it work on python <I>
py
diff --git a/rest/response.py b/rest/response.py index <HASH>..<HASH> 100644 --- a/rest/response.py +++ b/rest/response.py @@ -99,14 +99,17 @@ class RESTfulResponse(object): if isinstance(templ_or_func, str): def serialize(data): - data = data or {} - response = render_to_response(templ_or_func, data) + if data is not None: + context = { 'context': data } + response = render_to_response(templ_or_func, context) + else: + response = HttpResponse() response['Content-Type'] = content_type response.status_code = status return response else: def serialize(data): - if data: + if data is not None: response = HttpResponse(templ_or_func(data), content_type=content_type, status=status) else: response = HttpResponse(content_type=content_type, status=status)
Updated RESTfulResponse to set all data to the variable 'context' in a template If a template is used during content negotiation for transforming data into the desired representation, a variable named 'context' that holds all of the data returned by the resource is available from within the template. Also, if no data is returned, rendering the template is skipped. This matches the current behavior when a callable is used in place of a template. Currently the callable is only called when data is available.
py
diff --git a/test/test_task.py b/test/test_task.py index <HASH>..<HASH> 100644 --- a/test/test_task.py +++ b/test/test_task.py @@ -918,7 +918,7 @@ with open(_input, 'r') as inf, open(_output, 'w') as outf: Base_Executor(wf, args=['--g', str(val)], config={ 'config_file': '~/docker.yml', - 'default_queue': 'ts', + 'default_queue': 'docker', 'sig_mode': 'force', }).run() # now check if @@ -961,7 +961,7 @@ with open(_input, 'r') as inf, open(_output, 'w') as outf: Base_Executor(wf, args=['--g', str(val)], config={ 'config_file': '~/docker.yml', - 'default_queue': 'ts', + 'default_queue': 'docker', 'sig_mode': 'force', }).run() # now check if
Stop using pbs queue for task tests
py
diff --git a/autofit/graphical/mean_field.py b/autofit/graphical/mean_field.py index <HASH>..<HASH> 100755 --- a/autofit/graphical/mean_field.py +++ b/autofit/graphical/mean_field.py @@ -296,7 +296,7 @@ class MeanField(CollectionPriorModel, Dict[Variable, AbstractMessage], Factor): {k: m ** other for k, m in self.items()}, self.log_norm * other ) return type(self)( - {key: self[key] ** value for key, value in other.items()}, + {key: value ** other[key] for key, value in self.items()}, self.log_norm * other.log_norm, )
assume self subset of other for division
py
diff --git a/pymysql/connections.py b/pymysql/connections.py index <HASH>..<HASH> 100644 --- a/pymysql/connections.py +++ b/pymysql/connections.py @@ -640,9 +640,18 @@ class Connection(object): self._rfile = None sock.close() + @property + def open(self): + return self.socket is not None + def __del__(self): if self.socket: - self.close() + try: + self.socket.close() + except: + pass + self.socket = None + self._rfile = None def autocommit(self, value): self.autocommit_mode = bool(value)
Add connection.open. `__del__` doesn't wait server.
py
diff --git a/demosys/effects/managers.py b/demosys/effects/managers.py index <HASH>..<HASH> 100644 --- a/demosys/effects/managers.py +++ b/demosys/effects/managers.py @@ -66,14 +66,14 @@ class SingleEffectManager(BaseEffectManger): """ effect_list = [cfg.cls() for name, cfg in effects.effects.items()] - # If an effect want't specified in the initializer we grab the first one - if not self.active_effect: - self.active_effect = effect_list[0] - else: - # Otherwise we look for an effect with the name + # If an effect was specified in the initializer, find it + if self.effect_module: for effect in effect_list: if effect.name == self.effect_module: self.active_effect = effect + else: + # Otherwise we look just grab the first effect + self.active_effect = effect_list[0] if not self.active_effect: print("Cannot find effect '{}'".format(self.active_effect)) @@ -90,7 +90,8 @@ class SingleEffectManager(BaseEffectManger): self.active_effect.draw(time, frametime, target) def key_event(self, key, scancode, action, mods): - print("SingleEffectManager:key_event", key, scancode, action, mods) + # print("SingleEffectManager:key_event", key, scancode, action, mods) + pass class TrackerEffectManager(BaseEffectManger):
Bug: SingleEffectManager should run effect module we supply in the initializer
py
diff --git a/gubernator/github/classifier.py b/gubernator/github/classifier.py index <HASH>..<HASH> 100644 --- a/gubernator/github/classifier.py +++ b/gubernator/github/classifier.py @@ -282,6 +282,10 @@ def distill_events(events): continue if action == 'created': output.append(('comment', user, timestamp)) + if event == 'pull_request_review': + if action == 'submitted': + # this is morally equivalent to a comment + output.append(('comment', user, timestamp)) if event == 'pull_request': if action in ('opened', 'reopened', 'synchronize'): output.append(('push', user, timestamp))
Classify submitted PR reviews as comments for the state machine.
py
diff --git a/jsonrpc/site.py b/jsonrpc/site.py index <HASH>..<HASH> 100644 --- a/jsonrpc/site.py +++ b/jsonrpc/site.py @@ -162,7 +162,7 @@ class JSONRPCSite(object): if 'id' in D and D['id'] is not None: # regular request response['result'] = R response['id'] = D['id'] - if version == '1.1' and 'error' in response: + if version in ('1.1', '2.0') and 'error' in response: response.pop('error') elif is_batch: # notification, not ok in a batch format, but happened anyway raise InvalidRequestError @@ -174,7 +174,7 @@ class JSONRPCSite(object): except Error, e: signals.got_request_exception.send(sender=self.__class__, request=request) response['error'] = e.json_rpc_format - if version == '1.1' and 'result' in response: + if version in ('1.1', '2.0') and 'result' in response: response.pop('result') status = e.status except Exception, e: @@ -183,7 +183,7 @@ class JSONRPCSite(object): other_error = OtherError(e) response['error'] = other_error.json_rpc_format status = other_error.status - if version == '1.1' and 'result' in response: + if version in ('1.1', '2.0') and 'result' in response: response.pop('result') return response, status
adding a rule on the response (procedure return) according to the specification - 'Exactly one of result or error MUST be specified. It's not allowed to specify both or none.'
py
diff --git a/asammdf/gui/widgets/file.py b/asammdf/gui/widgets/file.py index <HASH>..<HASH> 100644 --- a/asammdf/gui/widgets/file.py +++ b/asammdf/gui/widgets/file.py @@ -465,7 +465,7 @@ class FileWidget(WithMDIArea, Ui_file_widget, QtWidgets.QWidget): item = QtWidgets.QTreeWidgetItem() item.setText(0, "Raw size") - item.setText(1, f"{size / 1024 / 1024:.1f} MB") + item.setText(1, f"{size / 1024 / 1024:.1f} MB") if cycles: item.setForeground(1, QtGui.QBrush(QtCore.Qt.darkGreen)) channel_group_item.addChild(item) @@ -680,7 +680,7 @@ class FileWidget(WithMDIArea, Ui_file_widget, QtWidgets.QWidget): for i, group in enumerate(self.mdf.groups): entry = i, 0xFFFFFFFFFFFFFFFF channel_group = TreeItem(entry, mdf_uuid=self.uuid) - comment = extract_cncomment_xml(channel_group.comment) + comment = extract_cncomment_xml(group.channel_group.comment) if self.mdf.version >= "4.00" and group.channel_group.acq_source: source = group.channel_group.acq_source
Fix for identation and missing group
py
diff --git a/beetle/base.py b/beetle/base.py index <HASH>..<HASH> 100644 --- a/beetle/base.py +++ b/beetle/base.py @@ -38,15 +38,13 @@ class Includer(object): def read(self, path, content): partial_path, extension = os.path.splitext(path) extension = extension.strip('.') + + suggested_path = remove_leading_folder(path) if extension in self.specific: - extension, content = self.specific[extension](path) - - partial_path = remove_leading_folder(partial_path) - destination = '{path}.{extension}'.format( - path=partial_path, - extension=extension, - ) - return destination, content + handler = self.specific[extension](content) + suggested_path, content = handler(content, suggested_path) + + return suggested_path, content def __init__(self, folders): self.include = folders['include']
Sending content and a suggested path to plugins. They can then manipulate both if needed.
py
diff --git a/tools/exposure-converter.py b/tools/exposure-converter.py index <HASH>..<HASH> 100644 --- a/tools/exposure-converter.py +++ b/tools/exposure-converter.py @@ -6,7 +6,6 @@ proper <cost> tag """ import sys -import os from lxml import etree from openquake import nrmllib @@ -130,6 +129,14 @@ def convert(filename, output_filename): cost, "insuranceLimit", text(asset_element, "limit")) + if find(asset_element, "occupants") is not None: + occupancies = etree.SubElement(element, "occupancies") + for occ in asset_element.findall( + ".//{%s}occupants" % nrmllib.NAMESPACE): + new_occ = etree.SubElement(occupancies, "occupancy") + new_occ.set("occupants", occ.text) + new_occ.set("period", get(occ, "description")) + output.write(etree.tostring(root, pretty_print=True, xml_declaration=True,
support for occupancies in converter
py
diff --git a/clam/clamservice.py b/clam/clamservice.py index <HASH>..<HASH> 100755 --- a/clam/clamservice.py +++ b/clam/clamservice.py @@ -1015,6 +1015,7 @@ class Project: @staticmethod def outputindex(project, user, d = '', quick=False): prefix = Project.path(project, user) + 'output/' + begintime = time.time() for f in glob.glob(prefix + d + "/*"): if os.path.basename(f)[0] != '.': #always skip all hidden files if os.path.isdir(f): @@ -1024,6 +1025,8 @@ class Project: file = clam.common.data.CLAMOutputFile(Project.path(project,user), f[len(prefix):], loadmetadata=not quick) file.attachviewers(settings.PROFILES) #attaches converters as well yield file + if not quick and time.time() - begintime >= settings.QUICKTIMEOUT: + quick = True @staticmethod def inputindexbytemplate(project, user, inputtemplate): @@ -2927,6 +2930,8 @@ def set_defaults(): settings.ENABLEWEBAPP = True if 'ENABLED' not in settingkeys: settings.ENABLED = True + if 'QUICKTIMEOUT' not in settingkeys: + settings.QUICKTIMEOUT = 90 #after loading output files for this many seconds, quick mode will be enabled and files will be loaded without metadata if 'REMOTEHOST' not in settingkeys: settings.REMOTEHOST = None elif 'REMOTEUSER' not in settingkeys:
automatically enable quick mode after a certain amount of seconds pass (QUICKTIMEOUT=<I> by default) #<I>
py
diff --git a/master/docs/bbdocs/ext.py b/master/docs/bbdocs/ext.py index <HASH>..<HASH> 100644 --- a/master/docs/bbdocs/ext.py +++ b/master/docs/bbdocs/ext.py @@ -196,6 +196,7 @@ class BBDomain(Domain): 'event': ObjType('event', 'event'), 'rtype': ObjType('rtype', 'rtype'), 'rpath': ObjType('rpath', 'rpath'), + 'raction': ObjType('raction', 'raction'), } directives = {
docs: Fix definition of bb-raction index
py
diff --git a/fmn/rules/generic.py b/fmn/rules/generic.py index <HASH>..<HASH> 100644 --- a/fmn/rules/generic.py +++ b/fmn/rules/generic.py @@ -15,6 +15,26 @@ def user_filter(config, message, fasnick=None, *args, **kw): if fasnick: return fasnick in fedmsg.meta.msg2usernames(message, **config) +def not_user_filter(config, message, fasnick=None, *args, **kw): + """ All messages not concerning one or more users. + + Use this rule to exclude messages that are associated with one or more + users. Specify several users by separating them with a comma ','. + """ + + fasnick = kw.get('fasnick', fasnick) + if not fasnick: + return False + + fasnick = fasnick.split(',') + valid = False + for nick in fasnick: + if nick.strip() in fedmsg.meta.msg2usernames(message, **config): + valid = True + break + + return valid + def user_package_filter(config, message, fasnick=None, *args, **kw): """ All messages concerning user's packages
Add filter to exclude notifications about one or more users Fixes <URL>
py
diff --git a/telethon/client/telegrambaseclient.py b/telethon/client/telegrambaseclient.py index <HASH>..<HASH> 100644 --- a/telethon/client/telegrambaseclient.py +++ b/telethon/client/telegrambaseclient.py @@ -375,7 +375,13 @@ class TelegramBaseClient(abc.ABC): # able to close the pending tasks properly, and letting the script # complete without calling disconnect causes the script to trigger # 100% CPU load. Call disconnect to make sure it doesn't happen. - self.disconnect() + try: + self.disconnect() + except Exception: + # Arguably not the best solution, but worth trying if the user + # forgot to disconnect; normally this is fine but sometimes it + # can fail (https://github.com/LonamiWebs/Telethon/issues/1073) + pass async def _switch_dc(self, new_dc): """
Don't raise inside __del__ (#<I>)
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -48,7 +48,7 @@ except(IOError, ImportError): setup( name='esgfpid', - version='0.7.17', + version='0.7.18-dev', author='Merret Buurman, German Climate Computing Centre (DKRZ)', author_email='[email protected]', url='https://github.com/IS-ENES-Data/esgf-pid',
Increment version number to <I>-dev.
py
diff --git a/tests/test_directives.py b/tests/test_directives.py index <HASH>..<HASH> 100644 --- a/tests/test_directives.py +++ b/tests/test_directives.py @@ -106,6 +106,15 @@ def test_current_api(): hug.test.get(api, 'v3/version_call_tester').data +def test_named_directives(): + '''Ensure that it's possible to attach directives to named parameters''' + @hug.get() + def test(time:hug.directive('timer')=3): + return time + + assert isinstance(hug(), hug.directives.Timer) + + def test_per_api_directives(): '''Test to ensure it's easy to define a directive within an API''' @hug.directive(apply_globally=False)
Add test to define exactly how named directives should workO
py
diff --git a/zipline/finance/risk.py b/zipline/finance/risk.py index <HASH>..<HASH> 100644 --- a/zipline/finance/risk.py +++ b/zipline/finance/risk.py @@ -27,7 +27,7 @@ Risk Report | alpha | The _algorithm_ alpha to the benchmark. | +-----------------+----------------------------------------------------+ | excess_return | The excess return of the algorithm over the | - | | benchmark. | + | | treasuries. | +-----------------+----------------------------------------------------+ | max_drawdown | The largest relative peak to relative trough move | | | for the portfolio returns between self.start_date |
fixed a type-o in the documentation.
py
diff --git a/anytemplate/engines/jinja2.py b/anytemplate/engines/jinja2.py index <HASH>..<HASH> 100644 --- a/anytemplate/engines/jinja2.py +++ b/anytemplate/engines/jinja2.py @@ -86,7 +86,10 @@ if SUPPORTED: >>> s = renders('a = {{ a }}, b = "{{ b }}"', {'a': 1, 'b': 'bbb'}) >>> assert s == 'a = 1, b = "bbb"' """ - return get_env(paths).from_string(tmpl_s).render(**ctx) + try: + return get_env(paths).from_string(tmpl_s).render(**ctx) + except jinja2.exceptions.TemplateNotFound as e: + raise anytemplate.engines.base.TemplateNotFound(str(e)) def render(filepath, ctx, paths=None): """
try to catch cases if a/any of included templates are missing
py
diff --git a/wandb/keras.py b/wandb/keras.py index <HASH>..<HASH> 100644 --- a/wandb/keras.py +++ b/wandb/keras.py @@ -1,4 +1,4 @@ -import numpy as np +import operator import os from wandb import history @@ -46,18 +46,18 @@ class WandBKerasCallback(object): mode = 'auto' if mode == 'min': - self.monitor_op = np.less - self.best = np.Inf + self.monitor_op = operator.lt + self.best = float('inf') elif mode == 'max': - self.monitor_op = np.greater - self.best = -np.Inf + self.monitor_op = operator.gt + self.best = float('-inf') else: if 'acc' in self.monitor or self.monitor.startswith('fmeasure'): - self.monitor_op = np.greater - self.best = -np.Inf + self.monitor_op = operator.gt + self.best = float('-inf') else: - self.monitor_op = np.less - self.best = np.Inf + self.monitor_op = operator.lt + self.best = float('inf') def set_params(self, params): self.params = params
Get rid of numpy dependency.
py