diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/myql/utils.py b/myql/utils.py index <HASH>..<HASH> 100644 --- a/myql/utils.py +++ b/myql/utils.py @@ -3,11 +3,11 @@ from xml.dom import minidom from xml.etree import cElementTree as tree def pretty_json(data): - data = json.loads(data) + data = json.loads(data.decode('utf-8')) return json.dumps(data, indent=4, sort_keys=True) def pretty_xml(data): - parsed_string = minidom.parseString(data) + parsed_string = minidom.parseString(data.decode('utf-8')) return parsed_string.toprettyxml(indent='\t', encoding='utf-8') def prettyfy(response, format='json'):
decode method added for each response.content
py
diff --git a/graphene/contrib/django/types.py b/graphene/contrib/django/types.py index <HASH>..<HASH> 100644 --- a/graphene/contrib/django/types.py +++ b/graphene/contrib/django/types.py @@ -26,7 +26,7 @@ class DjangoObjectTypeMeta(ObjectTypeMeta): return only_fields = cls._meta.only_fields reverse_fields = tuple(get_reverse_fields(cls._meta.model)) - all_fields = (list(cls._meta.model._meta.local_fields) + + all_fields = (list(cls._meta.model._meta.fields) + list(reverse_fields) + list(cls._meta.model._meta.local_many_to_many))
Fixed local_fields with fields when Extending from A non abstract Django model
py
diff --git a/tests/test_decorators.py b/tests/test_decorators.py index <HASH>..<HASH> 100644 --- a/tests/test_decorators.py +++ b/tests/test_decorators.py @@ -1311,3 +1311,14 @@ def test_output_format_inclusion(hug_api): hug_api.extend(api, '') assert hug.test.get(hug_api, 'my_endpoint').data == {'mutated': 'hello'} + + +def test_api_pass_along(hug_api): + """Test to ensure the correct API instance is passed along using API directive""" + @hug.get() + def takes_api(hug_api): + return hug_api.__name__ + + hug_api.__name__ = "Test API" + hug_api.extend(api, '') + assert hug.test.get(hug_api, 'takes_api').data == hug_api.__name__
Add test to ensure issue #<I> is resolved with the latest set of changes
py
diff --git a/sdk/formrecognizer/azure-ai-formrecognizer/setup.py b/sdk/formrecognizer/azure-ai-formrecognizer/setup.py index <HASH>..<HASH> 100644 --- a/sdk/formrecognizer/azure-ai-formrecognizer/setup.py +++ b/sdk/formrecognizer/azure-ai-formrecognizer/setup.py @@ -44,6 +44,7 @@ setup( author='Microsoft Corporation', author_email='[email protected]', url='https://github.com/Azure/azure-sdk-for-python', + keywords="azure, form recognizer, cognitive services, document analyzer, document analysis, applied ai", classifiers=[ "Development Status :: 4 - Beta", 'Programming Language :: Python',
[formrecognizer] Adding keywords to improve searching for FR package (#<I>) * adding keyword to improve searching for FR package * add applied ai to terms
py
diff --git a/groupy/api/endpoint.py b/groupy/api/endpoint.py index <HASH>..<HASH> 100644 --- a/groupy/api/endpoint.py +++ b/groupy/api/endpoint.py @@ -521,6 +521,29 @@ class Users(Endpoint): ) return cls.response(r) + @classmethod + def update(cls, avatar_url=None, name=None, email=None, zip_code=None): + """Update the information for the user. + + :param str avatar_url: the image URL for the user's avatar + :param str name: the new name of the user + :param str email: the new email of the user + :param str zip_code: the new Zip code of the user + :returns: the modified user + :rtype: :class:`dict` + """ + r = requests.post( + cls.build_url('update'), + params={ + 'avatar_url': avatar_url, + 'name': name, + 'email': email, + 'zip_code': zip_code + } + ) + return cls.response(r) + + class Sms(Endpoint): """Endpoint for the SMS API.
Adds support for updating user info Adds new method Users.update() that allows the modification of the user's avatar_url, name, email, and zip_code.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -44,7 +44,7 @@ if sys.version_info >= (3,): setup( name='ngram', description='A `set` subclass providing fuzzy search based on N-grams.', - version='3.3.0', + version='3.3.1', license='LGPLv3+', py_modules=['ngram'], zip_safe=True,
Change version number from <I> to <I>
py
diff --git a/appinst/platforms/win32.py b/appinst/platforms/win32.py index <HASH>..<HASH> 100644 --- a/appinst/platforms/win32.py +++ b/appinst/platforms/win32.py @@ -156,3 +156,9 @@ class Win32(object): if os.path.isdir(rel_path): shutil.rmtree(rel_path, True) print 'Removed start menu at: %s' % rel_path + + # Remove the top-level menu directory, but only if it's empty + try: + os.rmdir(os.path.join(start_menu, top_name)) + except: + pass
Remove the top-level menu directory if empty.
py
diff --git a/tangelo/tangelo/server.py b/tangelo/tangelo/server.py index <HASH>..<HASH> 100644 --- a/tangelo/tangelo/server.py +++ b/tangelo/tangelo/server.py @@ -430,6 +430,8 @@ class Tangelo(object): tangelo.http_status(501, "Error Importing Service") tangelo.content_type("application/json") result = tangelo.util.traceback_report(error="Could not import module %s" % (tangelo.request_path())) + + tangelo.log_warning("SERVICE", "Could not import service module %s:\n%s" % (tangelo.request_path(), "\n".join(result["traceback"]))) else: # Try to run the service - either it's in a function called # "run()", or else it's in a REST API consisting of at least one of
Reporting traceback when module *loading* fails, in addition to when module *execution* fails
py
diff --git a/libsubmit/providers/local/local.py b/libsubmit/providers/local/local.py index <HASH>..<HASH> 100644 --- a/libsubmit/providers/local/local.py +++ b/libsubmit/providers/local/local.py @@ -95,9 +95,9 @@ class Local(ExecutionProvider): if channel is None: if channel_script_dir is None: - self.channel = LocalChannel(scriptDir=channel_script_dir) - else: self.channel = LocalChannel() + else: + self.channel = LocalChannel(scriptDir=channel_script_dir) else: self.channel = channel self.config = config
Fix log error pointed out by @yadudoc
py
diff --git a/geomdl/trimming.py b/geomdl/trimming.py index <HASH>..<HASH> 100644 --- a/geomdl/trimming.py +++ b/geomdl/trimming.py @@ -49,8 +49,7 @@ def fix_multi_trim_curves(obj, **kwargs): # Directly add to the new trims array if the trim is a single curve if trim_size == 1: - # Also, get rid of the container if exists - new_trims.append(trim[0]) + new_trims.append(trim) continue new_trim = []
Don't remove the curve from the container before fixing the sense
py
diff --git a/pipes/elb/create_elb.py b/pipes/elb/create_elb.py index <HASH>..<HASH> 100644 --- a/pipes/elb/create_elb.py +++ b/pipes/elb/create_elb.py @@ -110,10 +110,10 @@ def main(): health_proto, health_port_path = args.health_target.split(':') health_port, *health_path = health_port_path.split('/') - if len(health_path) > 0: - health_path = '/{0}'.format(health_path) - else: + if not health_path: health_path = '/healthcheck' + else: + health_path = '/{0}'.format('/'.join(health_path)) LOG.info('Health Check\n\tprotocol: %s\n\tport: %s\n\tpath: %s', health_proto, health_port, health_path)
fix: Format Health Check path with slashes See also: PSOBAT-<I>
py
diff --git a/tensorflow_probability/python/distributions/platform_compatibility_test.py b/tensorflow_probability/python/distributions/platform_compatibility_test.py index <HASH>..<HASH> 100644 --- a/tensorflow_probability/python/distributions/platform_compatibility_test.py +++ b/tensorflow_probability/python/distributions/platform_compatibility_test.py @@ -131,6 +131,7 @@ VECTORIZED_LOGPROB_ATOL.update({ 'BetaQuotient': 2e-5, 'CholeskyLKJ': 1e-4, 'GammaGamma': 2e-5, + 'JohnsonSU': 2e-6, 'LKJ': 1e-3, 'PowerSpherical': 2e-5, }) @@ -139,6 +140,7 @@ VECTORIZED_LOGPROB_RTOL = collections.defaultdict(lambda: 1e-6) VECTORIZED_LOGPROB_RTOL.update({ 'Beta': 1e-5, 'GammaGamma': 1e-4, + 'JohnsonSU': 1e-5, 'NegativeBinomial': 1e-5, 'PERT': 1e-5, 'PowerSpherical': 5e-5,
Raise numerical tolerance for JohnsonSU agreement with vmap. PiperOrigin-RevId: <I>
py
diff --git a/dataviews/__init__.py b/dataviews/__init__.py index <HASH>..<HASH> 100644 --- a/dataviews/__init__.py +++ b/dataviews/__init__.py @@ -4,6 +4,10 @@ import sys, os cwd = os.path.abspath(os.path.split(__file__)[0]) sys.path.insert(0, os.path.join(cwd, '..', 'param')) +import param + +__version__ = param.Version(release=(0,7), fpath=__file__) + from .views import * # pyflakes:ignore (API import) from .dataviews import * # pyflakes:ignore (API import) from .sheetviews import * # pyflakes:ignore (API import)
Set __version__ using param.Version (commit tagged as '<I>')
py
diff --git a/pyani/scripts/parsers/report_parser.py b/pyani/scripts/parsers/report_parser.py index <HASH>..<HASH> 100644 --- a/pyani/scripts/parsers/report_parser.py +++ b/pyani/scripts/parsers/report_parser.py @@ -108,7 +108,7 @@ def build( "--run_results", action="store", dest="run_results", - metavar="RUN_ID", + metavar="RUN_IDS", default=False, help="Report table of results for comma separated list of runs", ) @@ -116,7 +116,7 @@ def build( "--run_matrices", action="store", dest="run_matrices", - metavar="RUN_ID", + metavar="RUN_IDS", default=False, help="Report matrices of results for comma separated list of runs", )
Use plural metavar, RUN_ID -> RUN_IDS Suggestion from code review
py
diff --git a/addok/core.py b/addok/core.py index <HASH>..<HASH> 100644 --- a/addok/core.py +++ b/addok/core.py @@ -65,6 +65,8 @@ class Result(object): def __init__(self, _id): doc = DB.hgetall(_id) for key, value in doc.items(): + if key.startswith(b'h|'): + continue setattr(self, key.decode(), value.decode()) self.score = float(self.importance)
Do not create a Result attr for each housenumber
py
diff --git a/src/sos/preview.py b/src/sos/preview.py index <HASH>..<HASH> 100644 --- a/src/sos/preview.py +++ b/src/sos/preview.py @@ -69,8 +69,11 @@ def preview_img(filename, kernel=None, style=None): try: from wand.image import Image img = Image(filename=filename) - return { 'image/' + image_type: image_data, - 'image/png': base64.b64encode(img._repr_png_()).decode('ascii') } + if image_type == 'gif': + return { 'image/' + image_type: image_data} + else: + return { 'image/' + image_type: image_data, + 'image/png': base64.b64encode(img._repr_png_()).decode('ascii') } except Exception: return { 'image/' + image_type: image_data } else: @@ -261,7 +264,7 @@ def preview_dot(filename, kernel=None, style=None): if 'image/gif' in result: #return {'image/gif': result['image/gif'], # 'image/png': result['image/png']} - return {'image/gif': result[image/fig]} + return {'image/gif': result['image/gif']} else: return result except Exception as e:
fix preview for .gif and a bug for .dot with multi outputs #<I>
py
diff --git a/master/setup.py b/master/setup.py index <HASH>..<HASH> 100755 --- a/master/setup.py +++ b/master/setup.py @@ -369,7 +369,7 @@ else: # buildbot depends on sqlalchemy internals, and this is the tested # version. 'sqlalchemy-migrate==0.7.2', - 'python-dateutil==1.5', + 'python-dateutil>=1.5', ] setup_args['extras_require'] = {
relax requirement for python-dateutil
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -32,6 +32,7 @@ except ImportError: with open('oct2py/__init__.py', 'rb') as fid: for line in fid: + line = line.decode('utf-8') if line.startswith('__version__'): version = line.strip().split()[-1][1:-1] break
Fix setup.py in Py3K
py
diff --git a/dataviews/plots.py b/dataviews/plots.py index <HASH>..<HASH> 100644 --- a/dataviews/plots.py +++ b/dataviews/plots.py @@ -794,7 +794,7 @@ class DataGridPlot(Plot): for coord in self.grid.keys(): view = self.grid.get(coord, None) if view is not None: - subax = plt.subplot(self._gridspec[r, c]) + subax = plt.subplot(self._gridspec[c, r]) vtype = view.type if isinstance(view, DataStack) else view.__class__ subplot = viewmap[vtype](view, show_axes=self.show_axes, show_legend=self.show_legend,
Fixed DataGrid indexing bug causing grids to be rotated
py
diff --git a/flask_rest_jsonapi/utils.py b/flask_rest_jsonapi/utils.py index <HASH>..<HASH> 100644 --- a/flask_rest_jsonapi/utils.py +++ b/flask_rest_jsonapi/utils.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- import json +from uuid import UUID from datetime import datetime @@ -8,4 +9,6 @@ class JSONEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, datetime): return obj.isoformat() + elif isinstance(obj, UUID): + return str(obj) return json.JSONEncoder.default(self, obj)
manage uuid object in json encoder
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -24,8 +24,7 @@ def find_version(*file_paths): return version_match.group(1) raise RuntimeError("Unable to find version string.") -requirements_file = "requirements.txt" -requirements = [pkg.strip() for pkg in open(requirements_file).readlines()] +install_requires = ['requests', 'ratelim', 'click'] try: import pypandoc @@ -53,7 +52,7 @@ setup( package_data={'': ['LICENSE', 'README.md']}, package_dir={'geocoder': 'geocoder'}, include_package_data=True, - install_requires=requirements, + install_requires=install_requires, zip_safe=False, keywords='geocoder arcgis tomtom opencage google bing here', classifiers=(
Correct setup file python3
py
diff --git a/tests/test_decorators.py b/tests/test_decorators.py index <HASH>..<HASH> 100644 --- a/tests/test_decorators.py +++ b/tests/test_decorators.py @@ -187,3 +187,16 @@ def test_json_auto_convert(): return body assert hug.test.get(api, 'test_json_body_stream_only', body=['value1', 'value2']).data == None + +def test_output_format(): + @hug.default_output_format() + def augmented(data): + return hug.output_format.json(['Augmented', data]) + + @hug.get() + def hello(): + return "world" + + assert hug.test.get(api, 'hello').data == ['Augmented', 'world'] + +
Add test to define how I would output formats to work
py
diff --git a/pifacecommon/core.py b/pifacecommon/core.py index <HASH>..<HASH> 100644 --- a/pifacecommon/core.py +++ b/pifacecommon/core.py @@ -49,8 +49,8 @@ INTPOL_HIGH = 0x02 # interupt polarity INTPOL_LOW = 0x00 SPIDEV = '/dev/spidev' -SPI_HELP_LINK = \ - "https://github.com/piface/pifacecommon#1-enable-the-spi-module" +SPI_HELP_LINK = "http://piface.github.io/pifacecommon/installation.html" \ + "#enable-the-spi-module" spidev_fd = None
updated spi help link to point to the new docs
py
diff --git a/sitetree/admin.py b/sitetree/admin.py index <HASH>..<HASH> 100644 --- a/sitetree/admin.py +++ b/sitetree/admin.py @@ -3,7 +3,7 @@ from django import VERSION as django_version try: from django.urls import get_urlconf, get_resolver except ImportError: - from django.conf.urls import get_urlconf, get_resolver + from django.core.urlresolvers import get_urlconf, get_resolver from django.utils.translation import ugettext_lazy as _ from django.utils import six from django.http import HttpResponseRedirect
fixed my error replaced django.conf.urls with django.core.urlresolvers.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -56,6 +56,12 @@ setup( license="Apache 2.0", keywords="google oauth 2.0 http client", classifiers=[ + 'Programming Language :: Python :: 2', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.4', 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License',
Add classifiers for Python 2 and 3.
py
diff --git a/python/mxnet/gluon/data/vision/datasets.py b/python/mxnet/gluon/data/vision/datasets.py index <HASH>..<HASH> 100644 --- a/python/mxnet/gluon/data/vision/datasets.py +++ b/python/mxnet/gluon/data/vision/datasets.py @@ -128,7 +128,7 @@ class FashionMNIST(MNIST): class CIFAR10(dataset._DownloadedDataset): """CIFAR10 image classification dataset from https://www.cs.toronto.edu/~kriz/cifar.html - Each sample is an image (in 3D NDArray) with shape (32, 32, 1). + Each sample is an image (in 3D NDArray) with shape (32, 32, 3). Parameters ---------- @@ -190,7 +190,7 @@ class CIFAR10(dataset._DownloadedDataset): class CIFAR100(CIFAR10): """CIFAR100 image classification dataset from https://www.cs.toronto.edu/~kriz/cifar.html - Each sample is an image (in 3D NDArray) with shape (32, 32, 1). + Each sample is an image (in 3D NDArray) with shape (32, 32, 3). Parameters ----------
Correct shapes of images in cifar<I> and cifar<I> (#<I>) * Correct shapes of images in cifar<I> and cifar<I> cifar<I> and cifar<I> have 3 channels * Retrigger build
py
diff --git a/tests/test_filelist.py b/tests/test_filelist.py index <HASH>..<HASH> 100644 --- a/tests/test_filelist.py +++ b/tests/test_filelist.py @@ -6,7 +6,9 @@ from distutils import debug from distutils.log import WARN from distutils.errors import DistutilsTemplateError from distutils.filelist import glob_to_re, translate_pattern, FileList +from distutils import filelist +import test.support from test.support import captured_stdout from distutils.tests import support @@ -292,5 +294,13 @@ class FileListTestCase(support.LoggingSilencer, self.assertWarnings() +class FindAllTestCase(unittest.TestCase): + @test.support.skip_unless_symlink + def test_missing_symlink(self): + with test.support.temp_cwd(): + os.symlink('foo', 'bar') + self.assertEqual(filelist.findall(), []) + + if __name__ == "__main__": unittest.main()
Issue #<I>: Add test capturing failure.
py
diff --git a/straight/plugin/loaders.py b/straight/plugin/loaders.py index <HASH>..<HASH> 100644 --- a/straight/plugin/loaders.py +++ b/straight/plugin/loaders.py @@ -59,10 +59,7 @@ class ModuleLoader(Loader): def _isPackage(self, path): pkg_init = os.path.join(path, '__init__.py') - if os.path.exists(pkg_init): - return True - - return False + return os.path.exists(pkg_init) def _findPluginFilePaths(self, namespace): already_seen = set()
Update loaders.py Small readability fix. ;)
py
diff --git a/aiomanhole/__init__.py b/aiomanhole/__init__.py index <HASH>..<HASH> 100644 --- a/aiomanhole/__init__.py +++ b/aiomanhole/__init__.py @@ -186,6 +186,7 @@ class InteractiveInterpreter: try: yield from self.handle_one_command() except ConnectionResetError: + writer.close() break except Exception as e: traceback.print_exc()
Handle nmap-ncat putting the socket into a half closed state on ^D. nmap-ncat correctly waits on the remote to close the connection, where as openbsd-netcat and gnu-netcat both read the ^D from stdin and terminate the process, killing the socket by side effect.
py
diff --git a/lib/reda/utils/fix_sign_with_K.py b/lib/reda/utils/fix_sign_with_K.py index <HASH>..<HASH> 100644 --- a/lib/reda/utils/fix_sign_with_K.py +++ b/lib/reda/utils/fix_sign_with_K.py @@ -37,7 +37,7 @@ def fix_sign_with_K(dataframe): indices_negative = (dataframe['K'] < 0) & (dataframe['r'] < 0) if np.where(indices_negative)[0].size == 0: # nothing to do here - return + return dataframe dataframe.ix[indices_negative, ['K', 'r']] *= -1
also return dataframe when no changes are required
py
diff --git a/blockstack_client/constants.py b/blockstack_client/constants.py index <HASH>..<HASH> 100644 --- a/blockstack_client/constants.py +++ b/blockstack_client/constants.py @@ -294,11 +294,14 @@ BLOCKSTACK_BURN_ADDRESS = virtualchain.hex_hash160_to_address(BLOCKSTACK_BURN_PU # never changes, so safe to duplicate to avoid gratuitous imports MAXIMUM_NAMES_PER_ADDRESS = 25 -MAX_RPC_LEN = 1024 * 1024 * 1024 - RPC_MAX_ZONEFILE_LEN = 4096 # 4KB RPC_MAX_PROFILE_LEN = 1024000 # 1MB +MAX_RPC_LEN = RPC_MAX_ZONEFILE_LEN * 110 # maximum blockstackd RPC length--100 zonefiles with overhead +if os.environ.get("BLOCKSTACK_TEST_MAX_RPC_LEN"): + MAX_RPC_LEN = int(os.environ.get("BLOCKSTACK_TEST_MAX_RPC_LEN")) + print("Overriding MAX_RPC_LEN to {}".format(MAX_RPC_LEN)) + CONFIG_FILENAME = 'client.ini' WALLET_FILENAME = 'wallet.json'
allow maximum RPC size to be overwritten by the test framework
py
diff --git a/salt/utils/cloud.py b/salt/utils/cloud.py index <HASH>..<HASH> 100644 --- a/salt/utils/cloud.py +++ b/salt/utils/cloud.py @@ -19,6 +19,16 @@ import types import re import warnings +# Let's import pwd and catch the ImportError. We'll raise it if this is not +# Windows +try: + import pwd +except ImportError: + if not sys.platform.lower().startswith('win'): + # We can use salt.utils.is_windows() a little down because that will + # cause issues under windows at install time. + raise + # Get logging started log = logging.getLogger(__name__) @@ -30,13 +40,6 @@ import salt.utils import salt.utils.event from salt.utils.nb_popen import NonBlockingPopen -# Let's import pwd after salt.utils to check for windows platform -try: - import pwd -except ImportError: - if not salt.utils.is_windows(): - raise - # Import salt cloud libs import salt.cloud from salt.cloud.exceptions import (
Don't rely on `salt.utils.is_windows()`, it fails at install time under windows.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -30,8 +30,8 @@ setup_args.update(dict( "holoviews.operation", "holoviews.plotting", "holoviews.plotting.mpl"], - package_data={'holoviews.plotting.mpl': ['*.mplstyle'], - 'holoviews.ipython': ['*.jinja']}, + package_data={'holoviews.plotting.mpl': ['*.mplstyle', '*.jinja'], + 'holoviews.plotting': ['*.jinja']}, classifiers = [ "License :: OSI Approved :: BSD License", "Development Status :: 5 - Production/Stable",
Updated setup.py with new jinja template locations
py
diff --git a/LiSE/setup.py b/LiSE/setup.py index <HASH>..<HASH> 100644 --- a/LiSE/setup.py +++ b/LiSE/setup.py @@ -25,6 +25,6 @@ setup( 'LiSE': ['sqlite.json'] }, install_requires=[ - "gorm>=0.7.7", + "gorm>=0.7.8", ], )
require a gorm that can really retrieve node values
py
diff --git a/ailment/statement.py b/ailment/statement.py index <HASH>..<HASH> 100644 --- a/ailment/statement.py +++ b/ailment/statement.py @@ -102,7 +102,7 @@ class Store(Statement): return "STORE(addr=%s, data=%s, size=%s, endness=%s, guard=%s)" % (self.addr, str(self.data), self.size, self.endness, self.guard) else: - return "%s =%s %s<%d>%s" % (self.variable.name, self.endness[0], str(self.data), self.size, + return "%s =%s %s<%d>%s" % (self.variable.name, "L" if self.endness == "Iend_LE" else "B", str(self.data), self.size, "" if self.guard is None else "[%s]" % self.guard) def replace(self, old_expr, new_expr):
Store: Fix endness representation in __repr__().
py
diff --git a/hangups/client.py b/hangups/client.py index <HASH>..<HASH> 100644 --- a/hangups/client.py +++ b/hangups/client.py @@ -442,7 +442,8 @@ class Client(object): # found via the other methods. # TODO We should note who these users are and try to request # them. - if len(p) > 1: + # for some contats, p[1] is None?? + if len(p) > 1 and p[1]: display_name = p[1] self.initial_users[user_id] = User( id_=user_id, first_name=display_name.split()[0],
only show contacts with display names; fixes #5 some contacts have `None` for their display name, this commit addresses this by only showing contacts with truthy display name values. looks like this contact code is temporary, but at least this fixes this issue for now
py
diff --git a/empymod/transform.py b/empymod/transform.py index <HASH>..<HASH> 100644 --- a/empymod/transform.py +++ b/empymod/transform.py @@ -321,7 +321,7 @@ def hqwe(zsrc, zrec, lsrc, lrec, off, angle, depth, ab, etaH, etaV, zetaH, # New lambda, from min to max required lambda with pts_per_dec start = np.log(lambd.min()) stop = np.log(lambd.max()) - ilambd = np.logspace(start, stop, (stop-start)*pts_per_dec + 1, 10) + ilambd = np.logspace(start, stop, (stop-start)*pts_per_dec + 1) # Call the kernel PJ0, PJ1, PJ0b = kernel.wavenumber(zsrc, zrec, lsrc, lrec, depth,
Cleaned up logspace-call in hqwe
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -57,7 +57,7 @@ def get_setup_version(reponame): Helper to get the current version from either git describe or the .version file (if available). """ - import json, importlib + import json basepath = os.path.split(__file__)[0] version_file_path = os.path.join(basepath, reponame, '.version') try:
Removed unused import in setup.py
py
diff --git a/mythril/analysis/modules/delegatecall.py b/mythril/analysis/modules/delegatecall.py index <HASH>..<HASH> 100644 --- a/mythril/analysis/modules/delegatecall.py +++ b/mythril/analysis/modules/delegatecall.py @@ -50,7 +50,7 @@ def _analyze_states(state: GlobalState) -> List[Issue]: if call.type is not "DELEGATECALL": return [] - if call.node.function_name is not "fallback": + if state.environment.active_function_name is not "fallback": return [] state = call.state @@ -77,8 +77,8 @@ def _concrete_call( return [] issue = Issue( - contract=call.node.contract_name, - function_name=call.node.function_name, + contract=state.environment.active_account.contract_name, + function_name=state.environment.active_function_name, address=address, swc_id=DELEGATECALL_TO_UNTRUSTED_CONTRACT, bytecode=state.environment.code.bytecode,
use environment instead of node in delegate call
py
diff --git a/easywebdav/client.py b/easywebdav/client.py index <HASH>..<HASH> 100644 --- a/easywebdav/client.py +++ b/easywebdav/client.py @@ -137,5 +137,5 @@ class Client(object): tree = xml.parse(StringIO(response.content)) return [elem2file(elem) for elem in tree.findall('{DAV:}response')] def exists(self, remote_path): - response = self._send('HEAD', remote_path, (200, 201, 404)) + response = self._send('HEAD', remote_path, (200, 404)) return True if response.status_code != 404 else False
We should not expect <I> code
py
diff --git a/dvc/remote/base.py b/dvc/remote/base.py index <HASH>..<HASH> 100644 --- a/dvc/remote/base.py +++ b/dvc/remote/base.py @@ -80,8 +80,8 @@ class RemoteBase(object): @staticmethod def tmp_file(fname): """ Temporary name for a partial download """ - # FIXME probably better use uuid() - return fname + '.part' + import uuid + return fname + '.' + str(uuid.uuid4()) def save_info(self, path_info): raise NotImplementedError
remote: use uuid for tmp name generation
py
diff --git a/AdvancedHTMLParser/__init__.py b/AdvancedHTMLParser/__init__.py index <HASH>..<HASH> 100755 --- a/AdvancedHTMLParser/__init__.py +++ b/AdvancedHTMLParser/__init__.py @@ -5,7 +5,7 @@ from .Parser import AdvancedHTMLParser, IndexedAdvancedHTMLParser from .Tags import AdvancedTag, TagCollection, toggleAttributesDOM, isTextNode, isTagNode -from .Formatter import AdvancedHTMLFormatter +from .Formatter import AdvancedHTMLFormatter, AdvancedHTMLMiniFormatter from .Validator import ValidatingAdvancedHTMLParser from .exceptions import InvalidCloseException, MissedCloseException, HTMLValidationException, MultipleRootNodeException from .SpecialAttributes import StyleAttribute
Add AdvancedHTMLMiniFormatter to the primary AdvancedHTMLParser export
py
diff --git a/atomic_reactor/tasks/binary.py b/atomic_reactor/tasks/binary.py index <HASH>..<HASH> 100644 --- a/atomic_reactor/tasks/binary.py +++ b/atomic_reactor/tasks/binary.py @@ -22,9 +22,9 @@ class BinaryPreBuildTask(plugin_based.PluginBasedTask): plugins_def = plugin_based.PluginsDef( prebuild=[ - {"name": "check_user_settings"}, {"name": "distgit_fetch_artefacts"}, {"name": "check_and_set_platforms"}, + {"name": "check_user_settings"}, {"name": "flatpak_create_dockerfile"}, {"name": "inject_parent_image"}, {"name": "check_base_image"},
Run check_user_settings after setting platforms CLOUDBLD-<I> The changes to the check_user_settings plugin only work if the build directory is properly initialized. The initialization happens in the check_and_set_platforms plugin. Change the order of the plugins to make this work.
py
diff --git a/fedmsg/core.py b/fedmsg/core.py index <HASH>..<HASH> 100644 --- a/fedmsg/core.py +++ b/fedmsg/core.py @@ -77,9 +77,7 @@ class FedMsgContext(object): raise IOError("Couldn't find an available endpoint.") else: - # fedmsg is not configured to send any messages - #raise ValueError("FedMsgContext was misconfigured.") - pass + warnings.warn("fedmsg is not configured to send any messages") atexit.register(self.destroy)
emit a warning if configured without a publisher.
py
diff --git a/beaver/ssh_tunnel.py b/beaver/ssh_tunnel.py index <HASH>..<HASH> 100644 --- a/beaver/ssh_tunnel.py +++ b/beaver/ssh_tunnel.py @@ -45,6 +45,8 @@ class BeaverSshTunnel(BeaverSubprocess): """SSH Tunnel Subprocess Wrapper""" def __init__(self, beaver_config): + super(BeaverSshTunnel, self).__init__(beaver_config) + key_file = beaver_config.get('ssh_key_file') tunnel = beaver_config.get('ssh_tunnel') tunnel_port = beaver_config.get('ssh_tunnel_port')
Fix issue where super method was not called in BeaverSshTunnel
py
diff --git a/tweepy/api.py b/tweepy/api.py index <HASH>..<HASH> 100644 --- a/tweepy/api.py +++ b/tweepy/api.py @@ -352,10 +352,10 @@ class API: return self.chunked_upload(filename, file=file, file_type=file_type, *args, **kwargs) else: - return self.simple_upload(filename, file=file, *args, **kwargs) + return self.simple_upload(filename, file=file, **kwargs) @payload('media') - def simple_upload(self, filename, *args, file=None, media_category=None, + def simple_upload(self, filename, *, file=None, media_category=None, additional_owners=None, **kwargs): """ :reference: https://developer.twitter.com/en/docs/media/upload-media/api-reference/post-media-upload """ @@ -371,7 +371,7 @@ class API: post_data['additional_owners'] = additional_owners return self.request( - 'POST', 'media/upload', *args, post_data=post_data, files=files, + 'POST', 'media/upload', post_data=post_data, files=files, upload_api=True, **kwargs )
Stop allowing positional arguments for API.simple_upload Stop allowing positional arguments besides filename for API.simple_upload
py
diff --git a/glances/glances.py b/glances/glances.py index <HASH>..<HASH> 100644 --- a/glances/glances.py +++ b/glances/glances.py @@ -1391,10 +1391,16 @@ class GlancesStats: if psutil_net_io_counters: # psutil >= 1.0.0 - get_net_io_counters = psutil.net_io_counters(pernic=True) + try: + get_net_io_counters = psutil.net_io_counters(pernic=True) + except IOError: + self.network_error_tag = True else: # psutil < 1.0.0 - get_net_io_counters = psutil.network_io_counters(pernic=True) + try: + get_net_io_counters = psutil.network_io_counters(pernic=True) + except IOError: + self.network_error_tag = True if not hasattr(self, 'network_old'): try:
Add control on psutil.net_io_counters / Bug on some Debian distrib
py
diff --git a/spanner/pylint.config.py b/spanner/pylint.config.py index <HASH>..<HASH> 100644 --- a/spanner/pylint.config.py +++ b/spanner/pylint.config.py @@ -14,10 +14,18 @@ """This module is used to configure gcp-devrel-py-tools run-pylint.""" +import copy + +from gcp_devrel.tools import pylint + # Library configuration # library_additions = {} -# library_replacements = {} +# Ignore generated code +library_replacements = copy.deepcopy(pylint.DEFAULT_LIBRARY_RC_REPLACEMENTS) +library_replacements['MASTER']['ignore'].append('spanner_v1') +library_replacements['MASTER']['ignore'].append('spanner_admin_instance_v1') +library_replacements['MASTER']['ignore'].append('spanner_admin_database_v1') # Test configuration
Exclude generated code from linter. (#<I>)
py
diff --git a/cleverhans/experimental/certification/dual_formulation.py b/cleverhans/experimental/certification/dual_formulation.py index <HASH>..<HASH> 100644 --- a/cleverhans/experimental/certification/dual_formulation.py +++ b/cleverhans/experimental/certification/dual_formulation.py @@ -90,8 +90,8 @@ class DualFormulation(object): lo_plus_up = self.nn_params.forward_pass(self.lower[i] + self.upper[i], i) lo_minus_up = self.nn_params.forward_pass(self.lower[i] - self.upper[i], i, is_abs=True) up_minus_lo = self.nn_params.forward_pass(self.upper[i] - self.lower[i], i, is_abs=True) - current_lower = 0.5 * (lo_plus_up + lo_minus_up + self.nn_params.biases[i]) - current_upper = 0.5 * (lo_plus_up + up_minus_lo + self.nn_params.biases[i]) + current_lower = 0.5 * (lo_plus_up + lo_minus_up) + self.nn_params.biases[i] + current_upper = 0.5 * (lo_plus_up + up_minus_lo) + self.nn_params.biases[i] self.pre_lower.append(current_lower) self.pre_upper.append(current_upper) self.lower.append(tf.nn.relu(current_lower))
Fix bug in certification (#<I>) * Changing code to use TF lanczos * addressing comments * addressing more comments * remove unnecessary * fixed error * Warm starts for lanczos * making it work * making tests pass * fix bug in code * make test pass * fix comments * fix bug
py
diff --git a/tests/categorical_ensembling_test.py b/tests/categorical_ensembling_test.py index <HASH>..<HASH> 100644 --- a/tests/categorical_ensembling_test.py +++ b/tests/categorical_ensembling_test.py @@ -56,14 +56,7 @@ def test_categorical_ensembling_regression(model_name=None): print('test_score') print(test_score) - # Bumping this up since without these features our score drops - lower_bound = -4.0 - if model_name == 'DeepLearningRegressor': - # NOTE: the model fails to learn for one of the classes. might be worth looking into more - lower_bound = -16 - if model_name == 'LGBMRegressor': - lower_bound = -4.95 - + lower_bound = -4.2 assert lower_bound < test_score < -2.8
adjusts test bound, and cleans up old code
py
diff --git a/test/datasets_utils.py b/test/datasets_utils.py index <HASH>..<HASH> 100644 --- a/test/datasets_utils.py +++ b/test/datasets_utils.py @@ -266,6 +266,10 @@ class DatasetTestCase(unittest.TestCase): def inject_fake_data(self, tmpdir: str, config: Dict[str, Any]) -> Union[int, Dict[str, Any]]: """Inject fake data for dataset into a temporary directory. + During the creation of the dataset the download and extract logic is disabled. Thus, the fake data injected + here needs to resemble the raw data, i.e. the state of the dataset directly after the files are downloaded and + potentially extracted. + Args: tmpdir (str): Path to a temporary directory. For most cases this acts as root directory for the dataset to be created and in turn also for the fake data injected here.
Add explanation for injected fake data in dataset tests (#<I>)
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -88,6 +88,10 @@ setup( "openquake.cfg", "openquake_worker.cfg", "README.md", "LICENSE", "CONTRIBUTORS.txt"]}, namespace_packages=['openquake'], + install_requires=[ + 'openquake.hazardlib', + 'openquake.risklib', + ], scripts=["openquake/engine/bin/oq_create_db"], zip_safe=False, )
Added dependencies in setup.py
py
diff --git a/marrow/schema/declarative.py b/marrow/schema/declarative.py index <HASH>..<HASH> 100644 --- a/marrow/schema/declarative.py +++ b/marrow/schema/declarative.py @@ -1,5 +1,6 @@ # encoding: utf-8 +from inspect import isclass from .meta import Element @@ -104,7 +105,7 @@ class Attribute(DataAttribute): except AttributeError: pass else: - value = default() if callable(default) else default + value = default() if callable(default) and not isclass(default) else default if self.assign: self.__set__(obj, value)
Fix to prevent auto-instantiation of classes used as default values.
py
diff --git a/flake8_import_order/stdlib_list.py b/flake8_import_order/stdlib_list.py index <HASH>..<HASH> 100644 --- a/flake8_import_order/stdlib_list.py +++ b/flake8_import_order/stdlib_list.py @@ -11,6 +11,7 @@ STDLIB_NAMES = set(( "array", "ast", "asynchat", + "asyncio", "asyncore", "atexit", "audioop",
Add asyncio to stdlib list
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -2,8 +2,8 @@ import sys from setuptools import setup setup(name='pysmap', - packages=['pysmap', 'pysmap.twitterutil', 'pysmap.viz'], - version='0.0.41', + packages=['pysmap', 'pysmap.twitterutil', 'pysmap.viz', 'pysmap.mltools'], + version='0.0.42', description='pysmap is a set of tools for working with twitter data', author='yvan', author_email='[email protected]',
fixed setup to include mltools
py
diff --git a/openpnm/topotools/generators/__init__.py b/openpnm/topotools/generators/__init__.py index <HASH>..<HASH> 100644 --- a/openpnm/topotools/generators/__init__.py +++ b/openpnm/topotools/generators/__init__.py @@ -1,3 +1,29 @@ +r""" +================================================ +Generators (:mod:`openpnm.topotools.generators`) +================================================ + +This module contains a selection of functions that deal specifically with +generating sufficient information that can be turned into an openpnm network. + +.. currentmodule:: openpnm.topotools.generators + +.. autosummary:: + :template: mybase.rst + :toctree: generated/ + :nosignatures: + + cubic + delaunay + gabriel + voronoi + voronoi_delaunay_dual + cubic_template + fcc + bcc + +""" + from .cubic import cubic from .delaunay import delaunay from .gabriel import gabriel
Add docstrings to generators' init file
py
diff --git a/llvmlite/tests/test_ir.py b/llvmlite/tests/test_ir.py index <HASH>..<HASH> 100644 --- a/llvmlite/tests/test_ir.py +++ b/llvmlite/tests/test_ir.py @@ -325,7 +325,7 @@ class TestBlock(TestBase): block.replace(d, f) self.check_block(block, """\ my_block: - %"c" = add i32 %".1", %".2" + %"cz" = add i32 %".1", %".2" %"f" = sdiv i32 %"c", %".2" %"e" = mul i32 %"f", %".2" """)
Introduce dummy error (check it gets detected)
py
diff --git a/rhino/mapper.py b/rhino/mapper.py index <HASH>..<HASH> 100644 --- a/rhino/mapper.py +++ b/rhino/mapper.py @@ -370,6 +370,8 @@ class Mapper(object): ranges. Ranges passed as a dictionary will be merged into the default ranges, with those given in `ranges` taking precedence. """ + default_encoding = None + default_content_type = None # TODO 'root' parameter for manually specifying a URL prefix not reflected # in SCRIPT_NAME (e.g. when proxying). @@ -469,9 +471,13 @@ class Mapper(object): # TODO here is were we would have to prepend self.root request._add_context(root=request.script_name, mapper=self, route=None) for route in self.routes: - rv = route(request, ctx) - if rv is not None: - return rv + response = route(request, ctx) + if response is not None: + if self.default_encoding is not None: + response.default_encoding = self.default_encoding + if self.default_content_type is not None: + response.default_content_type = self.default_content_type + return response raise NotFound def start_server(self, host='localhost', port=9000, app=None):
Allow overriding default encoding/content-type Added a way to override Response.default_encoding and Response.default_content_type by setting Mapper attributes of the same name. This does not affect responses raised as exceptions.
py
diff --git a/isovar/variant_read.py b/isovar/variant_read.py index <HASH>..<HASH> 100644 --- a/isovar/variant_read.py +++ b/isovar/variant_read.py @@ -43,13 +43,13 @@ def trim_N_nucleotides(prefix, suffix): if 'N' in prefix: # trim prefix to exclude all occurrences of N rightmost_index = prefix.rfind('N') - logging.info("Trimming %d nucleotides from read prefix '%s'" % ( + logging.debug("Trimming %d nucleotides from read prefix '%s'" % ( rightmost_index + 1, prefix)) prefix = prefix[rightmost_index + 1:] if 'N' in suffix: leftmost_index = suffix.find('N') - logging.info("Trimming %d nucleotides from read suffix '%s'" % ( + logging.debug("Trimming %d nucleotides from read suffix '%s'" % ( len(suffix) - leftmost_index, suffix)) suffix = suffix[:leftmost_index]
changed logging.info to logging.debug
py
diff --git a/test/integration/integration.py b/test/integration/integration.py index <HASH>..<HASH> 100644 --- a/test/integration/integration.py +++ b/test/integration/integration.py @@ -1,4 +1,3 @@ -import errno import os import shutil import subprocess @@ -35,13 +34,10 @@ def cleandir(path, recreate=True): try: shutil.rmtree(path) break - except Exception as e: - if e.errno == errno.ENOTEMPTY: - if t is None: - raise RuntimeError('unable to remove {}'.format(path)) - time.sleep(t) - elif e.errno != errno.ENOENT: - raise + except OSError: + if t is None: + raise RuntimeError('unable to remove {}'.format(path)) + time.sleep(t) if recreate: makedirs(path)
Be more lenient when retrying to remove a directory in the integration tests
py
diff --git a/src/ansiblelint/_prerun.py b/src/ansiblelint/_prerun.py index <HASH>..<HASH> 100644 --- a/src/ansiblelint/_prerun.py +++ b/src/ansiblelint/_prerun.py @@ -38,7 +38,7 @@ def check_ansible_presence() -> None: if version.parse(release.__version__) <= version.parse(ANSIBLE_MIN_VERSION): failed = True - except (ImportError, ModuleNotFoundError) as e: + except (ImportError, ModuleNotFoundError, UnboundLocalError) as e: failed = True __version__ = "none" print(e, file=sys.stderr)
Improve ansible version check (#<I>) Fixes: #<I>
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ from setuptools import setup, find_packages setup( name='frasco', - version='1.5.0', + version='1.5.1', url='http://github.com/digicoop/frasco', license='MIT', author='Maxime Bouroumeau-Fuseau', @@ -13,6 +13,8 @@ setup( package_data={ 'frasco': [ 'angular/static/*.js', + 'assets/*.js', + 'assets/*.html', 'billing/invoicing/emails/*.html', 'mail/templates/*.html', 'mail/templates/layouts/*',
Fix: added missing assets in python package
py
diff --git a/push_notifications/apns.py b/push_notifications/apns.py index <HASH>..<HASH> 100644 --- a/push_notifications/apns.py +++ b/push_notifications/apns.py @@ -15,6 +15,13 @@ from django.core.exceptions import ImproperlyConfigured from . import NotificationError, PUSH_NOTIFICATIONS_SETTINGS as SETTINGS +SETTINGS.setdefault("APNS_PORT", 2195) +if settings.DEBUG: + SETTINGS.setdefault("APNS_HOST", "gateway.sandbox.push.apple.com") +else: + SETTINGS.setdefault("APNS_HOST", "gateway.push.apple.com") + + class APNSError(NotificationError): pass @@ -22,12 +29,6 @@ class APNSError(NotificationError): class APNSDataOverflow(APNSError): pass -SETTINGS.setdefault("APNS_PORT", 2195) -if settings.DEBUG: - SETTINGS.setdefault("APNS_HOST", "gateway.sandbox.push.apple.com") -else: - SETTINGS.setdefault("APNS_HOST", "gateway.push.apple.com") - APNS_MAX_NOTIFICATION_SIZE = 256
apns: Declare settings at the top
py
diff --git a/click_shell/core.py b/click_shell/core.py index <HASH>..<HASH> 100644 --- a/click_shell/core.py +++ b/click_shell/core.py @@ -127,9 +127,9 @@ class ClickShell(ClickCmd): def add_command(self, cmd, name): # Use the MethodType to add these as bound methods to our current instance - setattr(self, 'do_%s' % name, types.MethodType(get_invoke(cmd), self, ClickShell)) - setattr(self, 'help_%s' % name, types.MethodType(get_help(cmd), self, ClickShell)) - setattr(self, 'complete_%s' % name, types.MethodType(get_complete(cmd), self, ClickShell)) + setattr(self, 'do_%s' % name, types.MethodType(get_invoke(cmd), self)) + setattr(self, 'help_%s' % name, types.MethodType(get_help(cmd), self)) + setattr(self, 'complete_%s' % name, types.MethodType(get_complete(cmd), self)) def make_click_shell(ctx, prompt=None, intro=None, hist_file=None):
Fixed things broken on python3
py
diff --git a/samples/dumpit.py b/samples/dumpit.py index <HASH>..<HASH> 100755 --- a/samples/dumpit.py +++ b/samples/dumpit.py @@ -120,7 +120,7 @@ for s in slots: objects = session.findObjects() print - print "Found %d objects: %s" % (len(objects), map(lambda x:x.value(), objects)) + print "Found %d objects: %s" % (len(objects), map(lambda x: "0x%08X" % x.value(), objects)) all_attributes = PyKCS11.CKA.keys() # remove the CKR_ATTRIBUTE_SENSITIVE attributes since we can't get
display objects ID as hex in the list of objects found to reflect the way they are displayed in the object header
py
diff --git a/tile_generator/opsmgr.py b/tile_generator/opsmgr.py index <HASH>..<HASH> 100644 --- a/tile_generator/opsmgr.py +++ b/tile_generator/opsmgr.py @@ -569,9 +569,9 @@ def get_cfinfo(): 'system_domain': system_domain, 'apps_domain': apps_domain, 'admin_username': admin_credentials['identity'], - 'admin_password': admin_credentials['password'], + 'admin_password': admin_credentials.get('password', None), 'system_services_username': system_services_credentials['identity'], - 'system_services_password': system_services_credentials['password'], + 'system_services_password': system_services_credentials.get('password', None), } def logs(install_id):
Added some robustness for the edge case where cf fails to install
py
diff --git a/milkman/dairy.py b/milkman/dairy.py index <HASH>..<HASH> 100644 --- a/milkman/dairy.py +++ b/milkman/dairy.py @@ -111,7 +111,10 @@ class MilkTruck(object): for field in self.fields_to_generate(self.model_class._meta.fields, exclude): if isinstance(field, RelatedField): - v = the_milkman.deliver(field.rel.to) + try: + v = the_milkman.deliver(field.rel.to) + except: + pass else: v = self.generator_for(the_milkman.registry, field).next() try:
when you try to generate a foreignkey which contains foreignkey itself, it fails. This hack is fine as we'll raise an error if the foreignkey id hasn't been given as kwarg to the deliver
py
diff --git a/pandas/tests/groupby/test_apply.py b/pandas/tests/groupby/test_apply.py index <HASH>..<HASH> 100644 --- a/pandas/tests/groupby/test_apply.py +++ b/pandas/tests/groupby/test_apply.py @@ -880,3 +880,24 @@ def test_apply_function_index_return(function): index=pd.Index([1, 2, 3], name="id"), ) tm.assert_series_equal(result, expected) + + +def test_apply_function_with_indexing(): + # GH: 33058 + df = pd.DataFrame( + {"col1": ["A", "A", "A", "B", "B", "B"], "col2": [1, 2, 3, 4, 5, 6]} + ) + + def fn(x): + x.col2[x.index[-1]] = 0 + return x.col2 + + result = df.groupby(["col1"], as_index=False).apply(fn) + expected = pd.Series( + [1, 2, 0, 4, 5, 0], + index=pd.MultiIndex.from_tuples( + [(0, 0), (0, 1), (0, 2), (1, 3), (1, 4), (1, 5)] + ), + name="col2", + ) + tm.assert_series_equal(result, expected)
BUG: Add test to ensure, that bug will not occur again. #<I> (#<I>)
py
diff --git a/asammdf/blocks/mdf_v4.py b/asammdf/blocks/mdf_v4.py index <HASH>..<HASH> 100644 --- a/asammdf/blocks/mdf_v4.py +++ b/asammdf/blocks/mdf_v4.py @@ -4517,12 +4517,20 @@ class MDF4(object): cg_source = grp.channel_group.acq_source if source: source = SignalSource( - source.name or (cg_source and cg_source.name) or "", + source.name, source.path, source.comment, source.source_type, source.bus_type, ) + elif cg_source: + source = SignalSource( + cg_source.name, + cg_source.path, + cg_source.comment, + cg_source.source_type, + cg_source.bus_type, + ) else: source = None
use channel group source if the channel has no referenced source
py
diff --git a/openquake/calculators/tests/event_based_risk_test.py b/openquake/calculators/tests/event_based_risk_test.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/tests/event_based_risk_test.py +++ b/openquake/calculators/tests/event_based_risk_test.py @@ -171,7 +171,8 @@ class EventBasedRiskTestCase(CalculatorTestCase): out['agg_curve-rlzs', 'xml'] self.assertEqual(len(fnames), 3) # 2 loss_maps + 1 agg_curve for fname in fnames: - self.assertEqualFiles('expected/' + strip_calc_id(fname), fname) + self.assertEqualFiles('expected/' + strip_calc_id(fname), + fname, delta=1E-5) fnames = export(('loss_maps-rlzs', 'csv'), self.calc.datastore) if REFERENCE_OS:
Raised the tolerance in test_occupants Former-commit-id: b<I>a<I>b<I>e6c9f1e<I>f5e<I>e5c<I>bb<I>
py
diff --git a/thermo/eos_mix_methods.py b/thermo/eos_mix_methods.py index <HASH>..<HASH> 100644 --- a/thermo/eos_mix_methods.py +++ b/thermo/eos_mix_methods.py @@ -1263,12 +1263,11 @@ def SRK_lnphis(T, P, Z, b, a_alpha, bs, a_alpha_j_rows, N, lnphis=None): t3 = log(1. + B/Z) Z_minus_one_over_B = (Z - 1.0)*B_inv two_over_a_alpha = 2./a_alpha - x0 = A_B*B_inv - x1 = A_B*two_over_a_alpha + x0 = A_B*B_inv*t3 + x1 = A_B*two_over_a_alpha*t3 + x2 = (Z_minus_one_over_B + x0)*P_RT for i in range(N): - Bi = bs[i]*P_RT - t2 = Bi*x0 - x1*a_alpha_j_rows[i] - lnphis[i] = Bi*Z_minus_one_over_B - t0 + t2*t3 + lnphis[i] = bs[i]*x2 - t0 - x1*a_alpha_j_rows[i] return lnphis def VDW_lnphis(T, P, Z, b, a_alpha, bs, a_alpha_roots, N, lnphis=None):
Finish optimizing SRK fugacities
py
diff --git a/bika/lims/exportimport/instruments/__init__.py b/bika/lims/exportimport/instruments/__init__.py index <HASH>..<HASH> 100644 --- a/bika/lims/exportimport/instruments/__init__.py +++ b/bika/lims/exportimport/instruments/__init__.py @@ -34,6 +34,7 @@ from nuclisens import easyq from genexpert import genexpert __all__ = ['abaxis.vetscan.vs2', + 'abbott.m2000rt.m2000rt', 'agilent.masshunter.quantitative', 'alere.pima.beads', 'alere.pima.cd4', @@ -67,6 +68,7 @@ __all__ = ['abaxis.vetscan.vs2', # interface PARSERS = [ ['abaxis.vetscan.vs2', 'AbaxisVetScanCSVVS2Parser'], + ['abbott.m2000rt.m2000rt', 'AbbottM2000rtTSVParser'], ['agilent.masshunter.quantitative', 'MasshunterQuantCSVParser'], ['alere.pima.beads', 'AlerePimaSLKParser'], ['alere.pima.cd4', 'AlerePimacd4SLKParser'],
Add instrument interface and interface parser relation
py
diff --git a/cassandra/connection.py b/cassandra/connection.py index <HASH>..<HASH> 100644 --- a/cassandra/connection.py +++ b/cassandra/connection.py @@ -352,8 +352,14 @@ class Connection(object): return self.is_defunct = True - log.debug("Defuncting connection (%s) to %s:", - id(self), self.host, exc_info=exc) + exc_info = sys.exc_info() + # if we are not handling an exception, just use the passed exception, and don't try to format exc_info with the message + if any(exc_info): + log.debug("Defuncting connection (%s) to %s:", + id(self), self.host, exc_info=exc_info) + else: + log.debug("Defuncting connection (%s) to %s: %s", + id(self), self.host, exc) self.last_error = exc self.close()
Proper exc_info handling for Connection.defunct debug log Fixes an issue where exc_info can cause formatting failures when defunct is called while not actually handling an exception. PYTHON-<I>
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -18,6 +18,8 @@ requirements = [ test_requirements = [ # TODO: put package test requirements here + 'hypothesis', + 'pytest' ] setup(
added hypothesis and pytest to test requirements.
py
diff --git a/scripts/devops_tasks/test_run_samples.py b/scripts/devops_tasks/test_run_samples.py index <HASH>..<HASH> 100644 --- a/scripts/devops_tasks/test_run_samples.py +++ b/scripts/devops_tasks/test_run_samples.py @@ -100,10 +100,6 @@ IGNORED_SAMPLES = { "mgmt_topic_async.py", "proxy_async.py", "receive_deferred_message_queue_async.py" - ], - "azure-ai-formrecognizer": [ - "sample_recognize_receipts_from_url.py", - "sample_recognize_receipts_from_url_async.py" ] }
unskip url samples (#<I>)
py
diff --git a/parsl/app/futures.py b/parsl/app/futures.py index <HASH>..<HASH> 100644 --- a/parsl/app/futures.py +++ b/parsl/app/futures.py @@ -63,7 +63,6 @@ class DataFuture(Future): else: raise ValueError("DataFuture must be initialized with a str or File") self.parent = fut - self._exception = None if fut is None: logger.debug("Setting result to filepath since no future was passed")
Remove unused _exception member (#<I>)
py
diff --git a/pyvisa_py/tcpip.py b/pyvisa_py/tcpip.py index <HASH>..<HASH> 100644 --- a/pyvisa_py/tcpip.py +++ b/pyvisa_py/tcpip.py @@ -286,12 +286,12 @@ class TCPIPInstrSession(Session): # keepalive packets even for VXI11 protocol. To read more on this issue # https://tech.xing.com/a-reason-for-unexplained-connection-timeouts-on-kubernetes-docker-abd041cf7e02 if attribute == constants.VI_ATTR_TCPIP_KEEPALIVE: - if attribute_state === True: + if attribute_state == True: self.interface.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) self.interface.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 60) self.interface.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 60) self.interface.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 5) - elif attribute_state === False: + elif attribute_state == False: self.interface.sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 0) else: return StatusCode.error_nonsupported_format
Python isn't julia
py
diff --git a/nbserverproxy/handlers.py b/nbserverproxy/handlers.py index <HASH>..<HASH> 100644 --- a/nbserverproxy/handlers.py +++ b/nbserverproxy/handlers.py @@ -235,6 +235,12 @@ class LocalProxyHandler(WebSocketHandlerMixin, IPythonHandler): headers = self.proxy_request_headers() + # Some applications check X-Forwarded-Context and X-ProxyContextPath + # headers to see if and where they are being proxied from. We set + # them to be {base_url}/proxy/{port}. + headers['X-Forwarded-Context'] = headers['X-ProxyContextPath'] = \ + url_path_join(self.base_url, 'proxy', port) + req = httpclient.HTTPRequest( client_uri, method=self.request.method, body=body, headers=headers,
Support X-Forwarded-Context and X-ProxyContextPath.
py
diff --git a/holoviews/ipython/display_hooks.py b/holoviews/ipython/display_hooks.py index <HASH>..<HASH> 100644 --- a/holoviews/ipython/display_hooks.py +++ b/holoviews/ipython/display_hooks.py @@ -204,9 +204,9 @@ def view_display(view, size, **kwargs): @display_hook def map_display(vmap, size, map_format, max_frames, widget_mode, **kwargs): if not isinstance(vmap, HoloMap): return None + magic_info = process_cell_magics(vmap) if widget_mode is not None and len(vmap.keys()) > 1: return display_widgets(vmap, map_format, widget_mode) - magic_info = process_cell_magics(vmap) if magic_info: return magic_info mapplot = Store.defaults[vmap.type](vmap, **opts(vmap.last, get_plot_size(size)))
Fixed magic not being processed before widget display
py
diff --git a/apiritif/__init__.py b/apiritif/__init__.py index <HASH>..<HASH> 100644 --- a/apiritif/__init__.py +++ b/apiritif/__init__.py @@ -101,9 +101,12 @@ class transaction(object): self.name = name self.success = True self.error_message = None - self._extras = {} + self._request = None + self._response = None + self._response_code = None self._start_ts = None self._finish_ts = None + self._extras = {} def __enter__(self): self.start() @@ -140,6 +143,24 @@ class transaction(object): self.success = False self.error_message = message + def request(self): + return self._request + + def set_request(self, value): + self._request = value + + def response(self): + return self._response + + def set_response(self, value): + self._response = value + + def response_code(self): + return self._response_code + + def set_response_code(self, code): + self._response_code = code + def attach_extra(self, key, value): self._extras[key] = value
Add transaction fields for request/response/response-code
py
diff --git a/ibis/backends/base/sql/alchemy/registry.py b/ibis/backends/base/sql/alchemy/registry.py index <HASH>..<HASH> 100644 --- a/ibis/backends/base/sql/alchemy/registry.py +++ b/ibis/backends/base/sql/alchemy/registry.py @@ -472,6 +472,7 @@ sqlalchemy_operation_registry: Dict[Any, Any] = { ops.BitOr: reduction(sa.func.bit_or), ops.BitXor: reduction(sa.func.bit_xor), ops.CountDistinct: reduction(lambda arg: sa.func.count(arg.distinct())), + ops.HLLCardinality: reduction(lambda arg: sa.func.count(arg.distinct())), ops.GroupConcat: _group_concat, ops.Between: fixed_arity(sa.between, 3), ops.IsNull: _is_null,
feat(sqlalchemy): implement approx_count_distinct as count distinct
py
diff --git a/cockroachdb/sqlalchemy/test_requirements.py b/cockroachdb/sqlalchemy/test_requirements.py index <HASH>..<HASH> 100644 --- a/cockroachdb/sqlalchemy/test_requirements.py +++ b/cockroachdb/sqlalchemy/test_requirements.py @@ -25,13 +25,12 @@ class Requirements(SuiteRequirements): # We don't do implicit casts. date_coerces_from_datetime = exclusions.closed() - # Our reflection support is incomplete (we need to return type - # parameters). - table_reflection = exclusions.open() - # The following tests are also disabled by disabling_table_reflection, - # but are failing for their own reasons. + # We do not support creation of views with `SELECT *` expressions, + # which these tests use. view_reflection = exclusions.closed() view_column_reflection = exclusions.closed() + # Requires either implementing pg_get_constraintdef() or overriding + # Dialect.get_foreign_keys() foreign_key_constraint_reflection = exclusions.closed() # The autoincrement tests assume a predictable 1-based sequence.
Update comments on sqlalchemy test requirements
py
diff --git a/pytest_cov.py b/pytest_cov.py index <HASH>..<HASH> 100644 --- a/pytest_cov.py +++ b/pytest_cov.py @@ -276,6 +276,8 @@ class CovPlugin(object): setattr(config.option, option, result) def pytest_funcarg__cov(self, request): + """A pytest funcarg that provide access to the underlying coverage object.""" + return self.cov_controller.cov def pytest_sessionstart(self, session):
Added docstring for pytest funarg.
py
diff --git a/penaltymodel_maxgap/penaltymodel/maxgap/package_info.py b/penaltymodel_maxgap/penaltymodel/maxgap/package_info.py index <HASH>..<HASH> 100644 --- a/penaltymodel_maxgap/penaltymodel/maxgap/package_info.py +++ b/penaltymodel_maxgap/penaltymodel/maxgap/package_info.py @@ -13,7 +13,7 @@ # limitations under the License. # # ================================================================================================ -__version__ = '0.5.3' +__version__ = '0.5.4' __author__ = 'D-Wave Systems Inc.' __authoremail__ = '[email protected]' __description__ = 'Generates penalty models using smt solvers.'
Release maxgap version <I> New Features --- - Python <I> support
py
diff --git a/commander/meta/reflash.py b/commander/meta/reflash.py index <HASH>..<HASH> 100644 --- a/commander/meta/reflash.py +++ b/commander/meta/reflash.py @@ -24,17 +24,20 @@ def reflash_module(controller, hexfile, name=None, address=None, force=False, ve mod.rpc(1, 0, 8, bucket) mod.reset() - if verbose: - prog = ProgressBar("Reflashing", 10) - prog.start() + sleep(1.5) + if not controller.alarm_asserted(): + print "Module reflash NOT DETECTED. Verify the module checksum to ensure it is programmed correctly." + raise RuntimeError("Could not reflash module, reflash not detected using alarm pin.") + + print "Reflash in progress" + while controller.alarm_asserted(): + sys.stdout.write('.') + sys.stdout.flush() + sleep(0.1) - for i in xrange(0, 10): - sleep(1) - prog.progress(i) + print "\nReflash complete." - prog.end() - else: - sleep(10) + sleep(0.5) if not noreset: if verbose:
Modify reflash to wait for reflash to finish before sending reset. Requires that the new pic<I>_executive be loaded to work correctly. If the old executive is loaded, it will throw an error saying reflash not detected and quit. The reflash is still working and after ~<I> seconds (<<I> seconds) for the pic<I>lf<I>, the reflash will succeed. This allows upgrading from an old pic<I>_executive using the new modtool.
py
diff --git a/indra/sources/reach/processor.py b/indra/sources/reach/processor.py index <HASH>..<HASH> 100644 --- a/indra/sources/reach/processor.py +++ b/indra/sources/reach/processor.py @@ -423,7 +423,8 @@ class ReachProcessor(object): elif ns == 'simple_chemical': if xr['id'].startswith('HMDB'): db_refs['HMDB'] = xr['id'] - elif ns == 'fplx': + # We handle "be" here for compatibility with older versions + elif ns in ('fplx', 'be'): db_refs['FPLX'] = xr['id'] # These name spaces are ignored elif ns in ['uaz']:
Handle be for backwards compatibility
py
diff --git a/openquake/risklib/scientific.py b/openquake/risklib/scientific.py index <HASH>..<HASH> 100644 --- a/openquake/risklib/scientific.py +++ b/openquake/risklib/scientific.py @@ -906,8 +906,8 @@ def classical_damage( imls = numpy.array(fragility_functions._interp_imls) min_val, max_val = hazard_imls[0], hazard_imls[-1] assert min_val > 0, hazard_imls # sanity check - numpy.putmask(imls, imls < min_val, min_val) - numpy.putmask(imls, imls > max_val, max_val) + imls[imls < min_val] = min_val + imls[imls > max_val] = max_val poes = interpolate.interp1d(hazard_imls, hazard_poes)(imls) else: imls = hazard_imls
Small cleanup in risklib.scientific
py
diff --git a/delphi/api.py b/delphi/api.py index <HASH>..<HASH> 100644 --- a/delphi/api.py +++ b/delphi/api.py @@ -34,7 +34,8 @@ def create_qualitative_analysis_graph(sts: List[Influence]) -> AnalysisGraph: return make_cag_skeleton(sts) -def get_subgraph_for_concept(concept: str, cag: AnalysisGraph, depth_limit = 2) -> AnalysisGraph: +def get_subgraph_for_concept(concept: str, cag: AnalysisGraph, + depth_limit = None) -> AnalysisGraph: """ Get a subgraph of the analysis graph for a single concept. Args: @@ -42,8 +43,9 @@ def get_subgraph_for_concept(concept: str, cag: AnalysisGraph, depth_limit = 2) cag depth_limit """ - pred = nx.dfs_predecessors(cag, concept, depth_limit = depth_limit) - return cag.subgraph(list(pred.keys())+[concept]) + rev = cag.reverse() + dfs_edges = nx.dfs_edges(rev, concept, depth_limit = depth_limit) + return cag.subgraph(chain.from_iterable(dfs_edges)) def get_subgraph_for_concept_pair(source: str, target: str,
Fixed get_subgraph_for_concept bug
py
diff --git a/test/test_config.py b/test/test_config.py index <HASH>..<HASH> 100644 --- a/test/test_config.py +++ b/test/test_config.py @@ -102,7 +102,7 @@ class Creation(TestCase): def test__config_is_written_to_the_right_place(self, mock_json, mock_path): dummy_path = '/config/path' mock_path.return_value = dummy_path - with mock.patch(open_mock_string, autospec=True) as mock_open: + with mock.patch(open_mock_string) as mock_open: config.create_default_config() mock_open.assert_called_with(dummy_path, 'w+')
Autospec bug with python <I> removed.
py
diff --git a/hotdoc/extensions/c/c_extension.py b/hotdoc/extensions/c/c_extension.py index <HASH>..<HASH> 100644 --- a/hotdoc/extensions/c/c_extension.py +++ b/hotdoc/extensions/c/c_extension.py @@ -16,7 +16,7 @@ # You should have received a copy of the GNU Lesser General Public License # along with this library. If not, see <http://www.gnu.org/licenses/>. -import os, sys, linecache, pkgconfig, glob, subprocess +import os, sys, linecache, pkgconfig, glob, subprocess, shutil from hotdoc.extensions.c.clang import cindex from ctypes import * @@ -35,6 +35,11 @@ from hotdoc.extensions.c.utils import CCommentExtractor from hotdoc.utils.loggable import (info as core_info, warn, Logger, debug as core_debug) + +if shutil.which('llvm-config') is None: + raise ImportError() + + def ast_node_is_function_pointer (ast_node): if ast_node.kind == cindex.TypeKind.POINTER and \ ast_node.get_pointee().get_result().kind != \
c_extension: raise ImportError if llvm-config does not exist
py
diff --git a/airflow/configuration.py b/airflow/configuration.py index <HASH>..<HASH> 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -84,6 +84,8 @@ defaults = { }, 'smtp': { 'smtp_starttls': True, + 'smtp_user': '', + 'smtp_password': '', }, 'kerberos': { 'ccache': '/tmp/airflow_krb5_ccache',
set default smtp_user, smtp_password so they are not needed in user config
py
diff --git a/xtraceback/tests/coverage_transform.py b/xtraceback/tests/coverage_transform.py index <HASH>..<HASH> 100755 --- a/xtraceback/tests/coverage_transform.py +++ b/xtraceback/tests/coverage_transform.py @@ -9,6 +9,10 @@ import sys from coverage import CoverageData +# import this so we get our monkey patching done which is needed for +# os.path.relpath on python2.5 +import xtraceback + def transform_data(data, transform): result = dict()
fixing coverage transform for python<I>
py
diff --git a/salt/modules/zcbuildout.py b/salt/modules/zcbuildout.py index <HASH>..<HASH> 100644 --- a/salt/modules/zcbuildout.py +++ b/salt/modules/zcbuildout.py @@ -33,13 +33,14 @@ def __virtual__(): return __virtualname__ return False - +# Import python libs import os import re import sys import traceback import urllib2 +# Import salt libs from salt.exceptions import CommandExecutionError from salt._compat import string_types @@ -102,7 +103,7 @@ def _salt_callback(func): return _call_callback -class Logger(): +class _Logger(): levels = ('info', 'warn', 'debug', 'error') def __init__(self): @@ -147,7 +148,7 @@ class Logger(): return self._by_level -LOG = Logger() +LOG = _Logger() def _set_status(m,
don't expose the Logger class to the loader
py
diff --git a/safe/gui/tools/test/test_wizard_dialog.py b/safe/gui/tools/test/test_wizard_dialog.py index <HASH>..<HASH> 100644 --- a/safe/gui/tools/test/test_wizard_dialog.py +++ b/safe/gui/tools/test/test_wizard_dialog.py @@ -965,6 +965,7 @@ class WizardDialogTest(unittest.TestCase): dialog.pbnCancel.click() + @unittest.skip('Please fix wizard first after layer_mode_none removal') def test_integrated_raster(self): """Test for raster layer and all possibilities.""" layer = clone_raster_layer(
Skipped failed test in wizard. Will be un-skip after the wizard is finished.
py
diff --git a/EventRegistry/EventRegistry.py b/EventRegistry/EventRegistry.py index <HASH>..<HASH> 100644 --- a/EventRegistry/EventRegistry.py +++ b/EventRegistry/EventRegistry.py @@ -26,7 +26,8 @@ class EventRegistry(object): def __init__(self, host = None, logging = False, minDelayBetweenRequests = 0.5, # the minimum number of seconds between individual api calls repeatFailedRequestCount = -1, # if a request fails (for example, because ER is down), what is the max number of times the request should be repeated (-1 for indefinitely) - verboseOutput = False): # if true, additional info about query times etc will be printed to console + verboseOutput = False, # if true, additional info about query times etc will be printed to console + apiKey = None): self._host = host self._lastException = None self._logRequests = logging @@ -41,7 +42,7 @@ class EventRegistry(object): # lock for making sure we make one request at a time - requests module otherwise sometimes returns incomplete json objects self._lock = threading.Lock() self._reqSession = requests.Session() - self._apiKey = None + self._apiKey = apiKey # if there is a settings.json file in the directory then try using it to login to ER # and to read the host name from it (if custom host is not specified)
Adding the apiKey parameter to the init method of EventRegistry class. Users can pass the apiKey programatically instead of needing to store it in a file.
py
diff --git a/ipyrad/core/assembly.py b/ipyrad/core/assembly.py index <HASH>..<HASH> 100644 --- a/ipyrad/core/assembly.py +++ b/ipyrad/core/assembly.py @@ -738,7 +738,7 @@ class Assembly(object): if not self.samples: ## try linking edits from working dir print("linked fasta files from [working_directory]/edits") - self.link_edits() + self.link_fastas() ## run clustering for all samples print("clustering {} samples on {} processors".\ format(len(self.samples), self.paramsdict["N_processors"]))
In step3 I changed call from link_edits() to link_fastas(), since link_edits didn't exist and link_fastas seemed to do what we want.
py
diff --git a/twitter_ads/__init__.py b/twitter_ads/__init__.py index <HASH>..<HASH> 100644 --- a/twitter_ads/__init__.py +++ b/twitter_ads/__init__.py @@ -1,6 +1,6 @@ # Copyright (C) 2015 Twitter, Inc. -VERSION = (1, 2, 1) +VERSION = (1, 2, 2) from twitter_ads.utils import get_version
bump to <I> (#<I>)
py
diff --git a/salt/modules/boto_elb.py b/salt/modules/boto_elb.py index <HASH>..<HASH> 100644 --- a/salt/modules/boto_elb.py +++ b/salt/modules/boto_elb.py @@ -151,7 +151,7 @@ def create(name, availability_zones, listeners=None, subnets=None, CLI example to create an ELB:: - salt myminion boto_elb.create myelb '["us-east-1a", "us-east-1e"]' listeners='[["HTTPS", "HTTP", 443, 80, "arn:aws:iam::1111111:server-certificate/mycert"]]' region=us-east-1 + salt myminion boto_elb.create myelb '["us-east-1a", "us-east-1e"]' listeners='[[443, 80, "HTTPS", "HTTP", "arn:aws:iam::1111111:server-certificate/mycert"]]' region=us-east-1 ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
Update boto_elb.create docstring with correct syntax The format of the listeners example was incorrect/backwards. According to the Boto documentation[1], the listeners should be in the format of: LoadBalancerPortNumber, InstancePortNumber, Protocol, InstanceProtocol, and an optional SSLCertificateId. [1] <URL>
py
diff --git a/tests/integration/states/test_lxd.py b/tests/integration/states/test_lxd.py index <HASH>..<HASH> 100644 --- a/tests/integration/states/test_lxd.py +++ b/tests/integration/states/test_lxd.py @@ -11,7 +11,7 @@ import salt.utils.path # Import Salt Testing Libs from tests.support.unit import skipIf from tests.support.case import ModuleCase -from tests.support.helpers import destructiveTest +from tests.support.helpers import destructiveTest, flaky from tests.support.mixins import SaltReturnAssertsMixin try: @@ -29,6 +29,7 @@ class LxdTestCase(ModuleCase, SaltReturnAssertsMixin): run_once = False + @flaky def test_01__init_lxd(self): if LxdTestCase.run_once: return
mark lxd init as flaky
py
diff --git a/asciimatics/particles.py b/asciimatics/particles.py index <HASH>..<HASH> 100644 --- a/asciimatics/particles.py +++ b/asciimatics/particles.py @@ -197,6 +197,7 @@ class ParticleEffect(with_metaclass(ABCMeta, Effect)): self._y = y self._life_time = life_time self._active_systems = [] + self.reset() @abstractmethod def reset(self):
Minor fix to ensure particle effects are reset.
py