diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
---|---|---|
diff --git a/intern/resource/boss/resource.py b/intern/resource/boss/resource.py
index <HASH>..<HASH> 100644
--- a/intern/resource/boss/resource.py
+++ b/intern/resource/boss/resource.py
@@ -14,7 +14,12 @@
from intern.resource import Resource
from abc import abstractmethod
+from enum import Enum
+class CacheMode(Enum):
+ cache = 'cache'
+ no_cache = 'no_cache'
+ raw = 'raw'
class BossResource(Resource):
"""Base class for Boss resources.
|
Adds CacheMode Enum class to resources
|
py
|
diff --git a/quarkc/command.py b/quarkc/command.py
index <HASH>..<HASH> 100644
--- a/quarkc/command.py
+++ b/quarkc/command.py
@@ -180,7 +180,8 @@ def main(args):
else:
assert False
except compiler.QuarkError as err:
- shell.command_log.warn("")
+ if not args["run"]:
+ shell.command_log.warn("")
return err
shell.command_log.warn("Done")
|
Get rid of silly logging not configured warning
|
py
|
diff --git a/airtest/core/android/yosemite.py b/airtest/core/android/yosemite.py
index <HASH>..<HASH> 100644
--- a/airtest/core/android/yosemite.py
+++ b/airtest/core/android/yosemite.py
@@ -37,7 +37,13 @@ class Yosemite(object):
installed_version = self.adb.get_package_version(package)
LOGGING.info("local version code is {}, installed version code is {}".format(apk_version, installed_version))
if installed_version is None or apk_version > int(installed_version):
- self.adb.install_app(apk_path, replace=True, install_options=["-t", "-g"])
+ try:
+ self.adb.install_app(apk_path, replace=True, install_options=["-t", "-g"])
+ except:
+ if installed_version is None:
+ raise
+ # If the installation fails, but the phone has an old version, do not force the installation
+ LOGGING.error("Yosemite.apk update failed, please try to reinstall manually.")
@on_method_ready('install_or_upgrade')
def get_ready(self):
|
If the installation of Yosemite.apk fails, but the phone has an old version, the installation is not mandatory (cherry picked from commit 1bb<I>d4ccd5f<I>dc<I>ba<I>f5ee6f) (cherry picked from commit <I>d9eb<I>ae<I>f<I>d8ecb<I>aa<I>a<I>b<I>be4c)
|
py
|
diff --git a/salt/modules/pacman.py b/salt/modules/pacman.py
index <HASH>..<HASH> 100644
--- a/salt/modules/pacman.py
+++ b/salt/modules/pacman.py
@@ -223,7 +223,7 @@ def refresh_db():
def install(name=None,
- refresh=True,
+ refresh=False,
pkgs=None,
sources=None,
**kwargs):
|
[salt] Pacman shouldn't default to refresh Because refreshing requires you to update the entire system and installing should by default limited to only the package you are installing.
|
py
|
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -25,8 +25,10 @@ on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if on_rtd:
import pip
pip.main(['install', 'sphinx_bootstrap_theme'])
+ pip.main(['install', 'recommonmark'])
import sphinx_bootstrap_theme
+from recommonmark.parser import CommonMarkParser
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
@@ -49,10 +51,14 @@ extensions = [
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
+source_parsers = {
+ '.md': CommonMarkParser,
+}
+
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
-source_suffix = '.rst'
+source_suffix = ['.rst', '.md']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
|
Enable using markdown for Sphinx docs (#<I>)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -25,7 +25,6 @@ setup(
],
extras_require={
'development': [
- 'webtest',
'Sphinx',
'repoze.sphinx.autointerface',
'flake8',
@@ -37,8 +36,6 @@ setup(
'pytest-cov',
'tox',
'mock',
- 'pyquery',
- 'mr.hermes',
'setuptools-git',
],
},
|
remove some cargo-culted dev-dependencies
|
py
|
diff --git a/ocrd/ocrd/workspace.py b/ocrd/ocrd/workspace.py
index <HASH>..<HASH> 100644
--- a/ocrd/ocrd/workspace.py
+++ b/ocrd/ocrd/workspace.py
@@ -186,6 +186,9 @@ class Workspace():
try:
ocrd_file = next(self.mets.find_files(ID=ID))
except StopIteration:
+ if ID.startswith(REGEX_PREFIX):
+ # allow empty results if filter criteria involve a regex
+ return None
raise FileNotFoundError("File %s not found in METS" % ID)
if page_recursive and ocrd_file.mimetype == MIMETYPE_PAGE:
with pushd_popd(self.directory):
|
Workspace.remove_file: no exception if empty but ID was a regex
|
py
|
diff --git a/web3/iban.py b/web3/iban.py
index <HASH>..<HASH> 100644
--- a/web3/iban.py
+++ b/web3/iban.py
@@ -2,9 +2,7 @@ import functools
import re
from eth_utils import (
- coerce_args_to_text,
is_string,
- pad_left,
to_checksum_address,
)
@@ -14,7 +12,7 @@ from web3.utils.validation import (
def pad_left_hex(value, num_bytes):
- return pad_left(value, num_bytes * 2, '0')
+ return value.rjust(num_bytes * 2, '0')
def iso13616Prepare(iban):
@@ -88,7 +86,6 @@ class IsValid:
return functools.partial(self.validate, instance._iban)
@staticmethod
- @coerce_args_to_text
def validate(iban_address):
if not is_string(iban_address):
return False
|
upgrade iban.py for eth-utils v1b2 compatibility
|
py
|
diff --git a/src/transformers/models/auto/configuration_auto.py b/src/transformers/models/auto/configuration_auto.py
index <HASH>..<HASH> 100644
--- a/src/transformers/models/auto/configuration_auto.py
+++ b/src/transformers/models/auto/configuration_auto.py
@@ -389,7 +389,13 @@ class _LazyConfigMapping(OrderedDict):
module_name = model_type_to_module_name(key)
if module_name not in self._modules:
self._modules[module_name] = importlib.import_module(f".{module_name}", "transformers.models")
- return getattr(self._modules[module_name], value)
+ if hasattr(self._modules[module_name], value):
+ return getattr(self._modules[module_name], value)
+
+ # Some of the mappings have entries model_type -> config of another model type. In that case we try to grab the
+ # object at the top level.
+ transformers_module = importlib.import_module("transformers")
+ return getattr(transformers_module, value)
def keys(self):
return list(self._mapping.keys()) + list(self._extra_content.keys())
|
Allow the same config in the auto mapping (#<I>)
|
py
|
diff --git a/bson/son.py b/bson/son.py
index <HASH>..<HASH> 100644
--- a/bson/son.py
+++ b/bson/son.py
@@ -35,7 +35,8 @@ class SON(dict):
=================================== ============= ===================
None null both
bool boolean both
- int number (int) both
+ int [#int]_ int32 / int64 py -> bson
+ long int64 both
float number (real) both
string string py -> bson
unicode string both
@@ -56,6 +57,9 @@ class SON(dict):
`bson.binary.Binary`. Otherwise it will be saved as a BSON string
and retrieved as unicode.
+ .. [#int] A Python int will be saved as a BSON int32 or BSON int64 depending
+ on its size. A BSON int32 will always decode to a Python int. A BSON int64
+ will always decode to a Python long.
.. [#dt] datetime.datetime instances will be rounded to the nearest
millisecond when saved
.. [#dt2] all datetime.datetime instances are treated as *naive*. clients
|
Clarify int/long encode/decode behavior PYTHON-<I>
|
py
|
diff --git a/src/toil/utils/toilStatus.py b/src/toil/utils/toilStatus.py
index <HASH>..<HASH> 100644
--- a/src/toil/utils/toilStatus.py
+++ b/src/toil/utils/toilStatus.py
@@ -82,10 +82,12 @@ def main():
sys.exit(0)
toilState = ToilState(jobStore, rootJob )
-
- failedJobs = [ job for job in toilState.updatedJobs | \
- set(toilState.successorCounts.keys()) \
- if job.remainingRetryCount == 0 ]
+
+ # The first element of the toilState.updatedJobs tuple is the jobWrapper we want to inspect
+ totalJobs = set(toilState.successorCounts.keys()) | \
+ {jobTuple[0] for jobTuple in toilState.updatedJobs}
+
+ failedJobs = [ job for job in totalJobs if job.remainingRetryCount == 0 ]
print "There are %i active jobs, %i parent jobs with children, and \
%i totally failed jobs currently in toil workflow: %s" % \
|
Fix attribute error in toil stats for incomplete runs (resolves #<I>)
|
py
|
diff --git a/dvc/version.py b/dvc/version.py
index <HASH>..<HASH> 100644
--- a/dvc/version.py
+++ b/dvc/version.py
@@ -7,7 +7,7 @@ import os
import subprocess
-_BASE_VERSION = "0.82.4"
+_BASE_VERSION = "0.82.5"
def _generate_version(base_version):
|
dvc: bump to <I>
|
py
|
diff --git a/spyder/utils/site/sitecustomize.py b/spyder/utils/site/sitecustomize.py
index <HASH>..<HASH> 100644
--- a/spyder/utils/site/sitecustomize.py
+++ b/spyder/utils/site/sitecustomize.py
@@ -280,13 +280,6 @@ class IPyTesProgram(TestProgram):
TestProgram.__init__(self, *args, **kwargs)
unittest.main = IPyTesProgram
-# Filter warnings that appear for ipykernel when interacting with
-# the Variable explorer (i.e trying to see a variable)
-# Fixes Issue 5591
-warnings.filterwarnings(action='ignore', category=DeprecationWarning,
- module='ipykernel.datapub',
- message=".*ipykernel.datapub is deprecated.*")
-
#==============================================================================
# Pandas adjustments
|
Sitecustomize: Remove code to filter warning in ipykernel.datapub
|
py
|
diff --git a/caravel/viz.py b/caravel/viz.py
index <HASH>..<HASH> 100755
--- a/caravel/viz.py
+++ b/caravel/viz.py
@@ -1304,9 +1304,8 @@ class SunburstViz(BaseViz):
metric = self.form_data.get('metric')
secondary_metric = self.form_data.get('secondary_metric')
if metric == secondary_metric:
- ndf = df[cols]
- ndf['m1'] = df[metric]
- ndf['m2'] = df[metric]
+ ndf = df
+ ndf.columns = [cols + ['m1', 'm2']]
else:
cols += [
self.form_data['metric'], self.form_data['secondary_metric']]
|
viz: make sunburst work again (#<I>) By using a different method for renaming the metric columns
|
py
|
diff --git a/zk_shell/xcmd.py b/zk_shell/xcmd.py
index <HASH>..<HASH> 100644
--- a/zk_shell/xcmd.py
+++ b/zk_shell/xcmd.py
@@ -141,7 +141,9 @@ def ensure_params_with_parser(parser, func):
params = parser.parse_args(shlex.split(args[1]))
return func(args[0], params)
except (ShellParser.ParserException, ValueError) as ex:
- print(ex)
+ doc = getattr(func, "__doc__", None)
+ cmd = func.__name__.replace("do_", "")
+ print("\n%s\n\n%s: %s" % (ex, cmd, doc) if doc else ex)
return wrapper
|
Show help when a command has missing (or wrong) parameters
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -20,7 +20,7 @@ f.close()
setup(
name='django-cron',
- version='0.2.1',
+ version='0.2.2',
author='Sumit Chachra',
author_email='[email protected]',
url='http://github.com/tivix/django-cron',
@@ -32,7 +32,7 @@ setup(
install_requires=[
'Django>=1.2.3',
'South>=0.7.2',
- 'django-common>=0.1'
+ 'django-common-helpers>=0.3'
],
test_suite = 'django_cron.tests',
include_package_data=True,
|
Fixed and put django-common-helpers>=<I> Bumped up version also.
|
py
|
diff --git a/examples/yoti_example_flask/app.py b/examples/yoti_example_flask/app.py
index <HASH>..<HASH> 100644
--- a/examples/yoti_example_flask/app.py
+++ b/examples/yoti_example_flask/app.py
@@ -37,7 +37,9 @@ def auth():
activity_details = client.get_activity_details(request.args['token'])
user_profile = activity_details.user_profile
user_profile['base64_selfie_uri'] = getattr(activity_details, 'base64_selfie_uri')
- save_image(user_profile.get('selfie'))
+ selfie = user_profile.get('selfie')
+ if selfie is not None:
+ save_image(selfie)
return render_template('profile.html',
**user_profile)
|
[SDK-<I>]: Added check for when selfie is not present
|
py
|
diff --git a/tinyscript/helpers/path.py b/tinyscript/helpers/path.py
index <HASH>..<HASH> 100644
--- a/tinyscript/helpers/path.py
+++ b/tinyscript/helpers/path.py
@@ -8,7 +8,7 @@ from shutil import rmtree
from six import string_types
from tempfile import gettempdir, NamedTemporaryFile as TempFile
-from .utils import LINUX, DARWIN, PYTHON3, WINDOWS
+from .utils import u, LINUX, DARWIN, PYTHON3, WINDOWS
__all__ = __features__ = ["Path", "TempPath"]
@@ -72,7 +72,7 @@ class Path(BasePath):
raise TypeError("data must be str, not %s" %
data.__class__.__name__)
with self.open(mode=mode, encoding=encoding, errors=errors) as f:
- return f.write(data)
+ return f.write(u(data))
def append_bytes(self, data):
""" Allows to append bytes to the file, as only write_bytes is available
|
Fixed Python2-related bug in helpers.path
|
py
|
diff --git a/qiskit/circuit/quantumcircuit.py b/qiskit/circuit/quantumcircuit.py
index <HASH>..<HASH> 100644
--- a/qiskit/circuit/quantumcircuit.py
+++ b/qiskit/circuit/quantumcircuit.py
@@ -2016,10 +2016,11 @@ class QuantumCircuit:
if isinstance(parameters, dict):
# unroll the parameter dictionary (needed if e.g. it contains a ParameterVector)
unrolled_param_dict = self._unroll_param_dict(parameters)
+ unsorted_parameters = self._unsorted_parameters()
# check that all param_dict items are in the _parameter_table for this circuit
params_not_in_circuit = [param_key for param_key in unrolled_param_dict
- if param_key not in self._unsorted_parameters()]
+ if param_key not in unsorted_parameters]
if len(params_not_in_circuit) > 0:
raise CircuitError('Cannot bind parameters ({}) not present in the circuit.'.format(
', '.join(map(str, params_not_in_circuit))))
|
put call to unsorted params outside of loop (#<I>)
|
py
|
diff --git a/asv/commands/run.py b/asv/commands/run.py
index <HASH>..<HASH> 100644
--- a/asv/commands/run.py
+++ b/asv/commands/run.py
@@ -198,7 +198,7 @@ class Run(Command):
commit_hashes = repo.get_new_branch_commits(conf.branches, [])
elif isinstance(range_spec, six.string_types) and range_spec.startswith('HASHFILE:'):
hashfn = range_spec[9:]
- if hashfn == '-' or hashfn.lower() == 'stdin':
+ if hashfn == '-':
hashstr = sys.stdin.read()
elif os.path.isfile(hashfn):
with open(hashfn, 'r') as f:
@@ -206,7 +206,7 @@ class Run(Command):
else:
log.error('Requested commit hash file "{}" is not a file'.format(hashfn))
return 1
- commit_hashes = hashstr.strip().split('\n')
+ commit_hashes = [h.strip() for h in hashstr.split("\n") if h.strip()]
elif isinstance(range_spec, list):
commit_hashes = range_spec
else:
|
address inline suggestsions from @pv's review
|
py
|
diff --git a/squad/frontend/tests.py b/squad/frontend/tests.py
index <HASH>..<HASH> 100644
--- a/squad/frontend/tests.py
+++ b/squad/frontend/tests.py
@@ -83,19 +83,20 @@ class TestResultTable(list):
suite_id,
name,
SUM(CASE when result is null then 1 else 0 end) as skips,
- SUM(CASE when result is null then 0 when result then 0 else 1 end) as fails,
+ SUM(CASE when result is not null and not result and not has_known_issues then 1 else 0 end) as fails,
+ SUM(CASE when result is not null and not result and has_known_issues then 1 else 0 end) as xfails,
SUM(CASE when result is null then 0 when result then 1 else 0 end) as passes
FROM core_test
JOIN core_testrun ON core_testrun.id = core_test.test_run_id
WHERE core_testrun.build_id = %s
GROUP BY suite_id, name
- ORDER BY fails DESC, skips DESC, passes DESC, suite_id, name
+ ORDER BY fails DESC, xfails DESC, skips DESC, passes DESC, suite_id, name
LIMIT %s
OFFSET %s
"""
with connection.cursor() as cursor:
cursor.execute(query, [build_id, per_page, offset])
- for suite_id, name, _, _, _ in cursor.fetchall():
+ for suite_id, name, _, _, _, _ in cursor.fetchall():
conditions.setdefault(suite_id, [])
conditions[suite_id].append(name)
|
frontend: "All tests": fix sorting in the presence of xfail Tests are ordered by decreasing number of fails, xfails, skips, and passes.
|
py
|
diff --git a/flask_venom/__init__.py b/flask_venom/__init__.py
index <HASH>..<HASH> 100644
--- a/flask_venom/__init__.py
+++ b/flask_venom/__init__.py
@@ -78,6 +78,7 @@ class Venom(venom.rpc.Venom):
self.blueprint = app
else:
self._init_app(app)
+ self.app = app
def _deferred_blueprint_init(self, setup_state: 'flask.blueprints.BlueprintSetupState'):
self._init_app(setup_state.app, url_prefix=setup_state.url_prefix)
|
Fix flask_venom.Venom not recording routes in some cases
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -83,8 +83,8 @@ setup(name=constants.NAME,
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
- 'Programming Language :: Python :: 3.6'
- 'Programming Language :: Python :: 3.7'
+ 'Programming Language :: Python :: 3.6',
+ 'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8'],
packages=['sakelib'],
scripts=['sake'],
|
small fix with commas in setup.py
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -7,13 +7,13 @@ with open('README.txt') as file:
setup(name='MinimalModbus',
version='0.1',
- description='Simple Modbus RTU implementation',
- long_description = long_description
+ description='Simple Modbus RTU implementation for Python',
+ long_description = long_description,
author='Jonas Berg',
- author_email='pyhys@ ???++',
+ author_email='[email protected]',
url='http://sourceforge.net/projects/minimalmodbus/',
- modules=['minimalmodbus', 'eurotherm3504'],
- licence = 'Apache License, Version 2.0',
+ py_modules=['minimalmodbus', 'eurotherm3500'],
+ license = 'Apache License, Version 2.0',
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
|
Improved the setup.py script
|
py
|
diff --git a/windpowerlib/wind_turbine_cluster.py b/windpowerlib/wind_turbine_cluster.py
index <HASH>..<HASH> 100644
--- a/windpowerlib/wind_turbine_cluster.py
+++ b/windpowerlib/wind_turbine_cluster.py
@@ -101,8 +101,8 @@ class WindTurbineCluster(object):
"""
return sum(wind_farm.installed_power for wind_farm in self.wind_farms)
- def assign_power_curve(self, wake_losses_model='wind_efficiency_curve',
- smoothing=True, block_width=0.5,
+ def assign_power_curve(self, wake_losses_model='power_efficiency_curve',
+ smoothing=False, block_width=0.5,
standard_deviation_method='turbulence_intensity',
smoothing_order='wind_farm_power_curves',
turbulence_intensity=None, **kwargs):
|
Set smoothing to False as default
|
py
|
diff --git a/salt/returners/mongo_return.py b/salt/returners/mongo_return.py
index <HASH>..<HASH> 100644
--- a/salt/returners/mongo_return.py
+++ b/salt/returners/mongo_return.py
@@ -2,10 +2,17 @@
Return data to a mongodb server
This is the default interface for returning data for the butter statd subsytem
+
+Required python modules: pymongo
'''
import logging
-import pymongo
+
+try:
+ import pymongo
+ has_pymongo = True
+except ImportError
+ has_pymongo = False
log = logging.getLogger(__name__)
@@ -17,6 +24,12 @@ __opts__ = {'mongo.db': 'salt',
'mongo.user': ''}
+def __virtual__():
+ if not has_pymongo:
+ return False
+ return 'mongo_return'
+
+
def returner(ret):
'''
Return data to a mongodb server
|
Don't complain about missing pymongo
|
py
|
diff --git a/salt/states/ansiblegate.py b/salt/states/ansiblegate.py
index <HASH>..<HASH> 100644
--- a/salt/states/ansiblegate.py
+++ b/salt/states/ansiblegate.py
@@ -67,7 +67,7 @@ class AnsibleState(object):
for mod_name, mod_params in kwargs.items():
args, kwargs = self.get_args(mod_params)
try:
- ans_mod_out = __salt__['ansible.{0}'.format(mod_name)](*args, **kwargs)
+ ans_mod_out = __salt__['ansible.{0}'.format(mod_name)](**{'__pub_arg': [args, kwargs]})
except Exception as err:
ans_mod_out = 'Module "{0}" failed. Error message: ({1}) {2}'.format(
mod_name, err.__class__.__name__, err)
|
Mimic the direct call of the module to reuse internal arg parsing
|
py
|
diff --git a/core/eolearn/core/eonode.py b/core/eolearn/core/eonode.py
index <HASH>..<HASH> 100644
--- a/core/eolearn/core/eonode.py
+++ b/core/eolearn/core/eonode.py
@@ -42,7 +42,7 @@ class EONode:
if self.name is None:
super().__setattr__('name', self.task.__class__.__name__)
- super().__setattr__('uid', generate_uid(self.name))
+ super().__setattr__('uid', generate_uid(self.task.__class__.__name__))
def get_custom_name(self, number=0):
""" Provides custom node name according to the class of the contained task and a given number
|
node uid generated from task class name, not custom name
|
py
|
diff --git a/drf_fsm_transitions/viewset_mixins.py b/drf_fsm_transitions/viewset_mixins.py
index <HASH>..<HASH> 100644
--- a/drf_fsm_transitions/viewset_mixins.py
+++ b/drf_fsm_transitions/viewset_mixins.py
@@ -13,7 +13,7 @@ def get_transition_viewset_method(transition_name):
transition_method(by=self.request.user)
- if self.save_post_transition:
+ if self.save_after_transition:
object.save()
serializer = self.get_serializer(object)
@@ -31,7 +31,7 @@ def get_viewset_transition_action_mixin(model):
instance = model()
class Mixin(object):
- save_post_transition = True
+ save_after_transition = True
transitions = instance.get_all_status_transitions()
transition_names = set(x.name for x in transitions)
|
switch to save_after_transition
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -138,9 +138,6 @@ setup(
('share/man/man7',
['doc/man/salt.7',
]),
- (mod_path,
- ['salt/modules/cytest.pyx',
- ]),
(doc_path,
[
]),
|
remove cytest from setup.py
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,7 @@ with open('README.rst') as file:
setup(
name = 'fritzconnection',
- version = '0.8.2',
+ version = '0.8.3',
packages = find_packages(),
license = 'MIT',
description = 'Communicate with the AVM FritzBox',
|
setup.py also adapted to <I>
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -33,7 +33,7 @@ if __name__ == "__main__":
url="https://github.com/Salamek/cron-descriptor",
long_description=long_description,
long_description_content_type='text/markdown',
- packages=setuptools.find_packages(),
+ packages=setuptools.find_packages(exclude=['tests*',]),
package_data={
'cron_descriptor': [
'locale/*.mo',
|
Removed tests package from installation This is fairly standard, and in fact some build system with fail builds that try to install tests packages.
|
py
|
diff --git a/lib/emir/recipes/aiv/mask.py b/lib/emir/recipes/aiv/mask.py
index <HASH>..<HASH> 100644
--- a/lib/emir/recipes/aiv/mask.py
+++ b/lib/emir/recipes/aiv/mask.py
@@ -201,6 +201,9 @@ def compute_fwhm_global(data, center, box):
except ValueError as error:
_logger.warning("%s", error)
return center[1], center[0], -99.0, -99.0, -99.0
+ except Exception as error:
+ _logger.warning("%s", error)
+ return center[1], center[0], -199.0, -199.0, -199.0
# returns x,y
|
Catch more errors in fwhm
|
py
|
diff --git a/perft.py b/perft.py
index <HASH>..<HASH> 100755
--- a/perft.py
+++ b/perft.py
@@ -152,7 +152,6 @@ class PerftTestCase(unittest.TestCase):
self.assertEqual(perft(board, 2), 628)
self.assertEqual(perft(board, 3), 12858)
self.assertEqual(perft(board, 4), 405636)
- self.assertEqual(perft(board, 5), 8992652)
sys.stdout.write(".\n")
sys.stdout.flush()
|
Pypy on Travis CI can not make perft 5 in <I> minutes
|
py
|
diff --git a/examples/receive.py b/examples/receive.py
index <HASH>..<HASH> 100644
--- a/examples/receive.py
+++ b/examples/receive.py
@@ -5,7 +5,16 @@ import notifs
def print_notification(title, message):
- print "Notification received: {0}: {1}".format(title, message)
+ print "Print notification received: {0}: {1}".format(title, message)
+
+def web_app_notify(title, message):
+ print "Webapp notification received: {0}: {1}".format(title, message)
+
+def iphone_app_notify(title, message):
+ print "iPhone App notification received: {0}: {1}".format(title, message)
+
+def android_app_notify(title, message):
+ print "Android App notification received: {0}: {1}".format(title, message)
try:
n = notifs.Notifs("amqps://user:[email protected]:5673/%2F")
@@ -15,6 +24,9 @@ try:
# This will make it easy to choose which channels your clients
# will receive. Can be a list too, for listening on multiple streams.
n.receive("test_channel", print_notification)
+ n.receive("test_channel", web_app_notify)
+ n.receive("test_channel", iphone_app_notify)
+ n.receive("test_channel", android_app_notifify)
except KeyboardInterrupt:
break
|
Expand example This makes it more clear how to add more queues ("channels") to the exchange to receive copies of the message.
|
py
|
diff --git a/src/metpy/calc/indices.py b/src/metpy/calc/indices.py
index <HASH>..<HASH> 100644
--- a/src/metpy/calc/indices.py
+++ b/src/metpy/calc/indices.py
@@ -126,20 +126,15 @@ def mean_pressure_weighted(pressure, *args, height=None, bottom=None, depth=None
Renamed ``heights`` parameter to ``height``
"""
- ret = [] # Returned variable means in layer
- layer_arg = get_layer(pressure, *args, height=height,
- bottom=bottom, depth=depth)
- layer_p = layer_arg[0]
- layer_arg = layer_arg[1:]
+ # Split pressure profile from other variables to average
+ pres_prof, *others = get_layer(pressure, *args, height=height, bottom=bottom, depth=depth)
+
# Taking the integral of the weights (pressure) to feed into the weighting
# function. Said integral works out to this function:
- pres_int = 0.5 * (layer_p[-1]**2 - layer_p[0]**2)
- for i, _datavar in enumerate(args):
- arg_mean = np.trapz((layer_arg[i] * layer_p),
- x=layer_p) / pres_int
- ret.append(arg_mean)
+ pres_int = 0.5 * (pres_prof[-1] ** 2 - pres_prof[0] ** 2)
- return ret
+ # Perform integration on the profile for each variable
+ return [np.trapz(var_prof * pres_prof, x=pres_prof) / pres_int for var_prof in others]
@exporter.export
|
Refactor mean_pressure_weighted() Replaced loop using `enumerate` with a list comprehension, plus some general Python code cleanup.
|
py
|
diff --git a/condoor/version.py b/condoor/version.py
index <HASH>..<HASH> 100644
--- a/condoor/version.py
+++ b/condoor/version.py
@@ -1,3 +1,3 @@
"""Version information."""
-__version__ = '1.0.16'
+__version__ = '1.0.17'
|
Bumping version number to <I>
|
py
|
diff --git a/octodns/provider/azuredns.py b/octodns/provider/azuredns.py
index <HASH>..<HASH> 100644
--- a/octodns/provider/azuredns.py
+++ b/octodns/provider/azuredns.py
@@ -337,6 +337,8 @@ class AzureProvider(BaseProvider):
)
return self._dns_client_handle
+ def _set_dns_client(self, client)
+ self.dns_client_handle = client
def __init__(self, id, client_id, key, directory_id, sub_id,
@@ -352,7 +354,7 @@ class AzureProvider(BaseProvider):
self._dns_client_key = key
self._dns_client_directory_id = directory_id
self._dns_client_subscription_id = sub_id
- self._dns_client = property(_get_dns_client)
+ self._dns_client = property(_get_dns_client, _set_dns_client)
self._resource_group = resource_group
self._azure_zones = set()
|
Add set DNS client logic if needed for testing
|
py
|
diff --git a/examples/ccxt.pro/py/binance-watch-spot-futures-balances-continuously.py b/examples/ccxt.pro/py/binance-watch-spot-futures-balances-continuously.py
index <HASH>..<HASH> 100644
--- a/examples/ccxt.pro/py/binance-watch-spot-futures-balances-continuously.py
+++ b/examples/ccxt.pro/py/binance-watch-spot-futures-balances-continuously.py
@@ -25,7 +25,7 @@ async def print_balance_continuously(exchange):
except Exception as e:
print('-----------------------------------------------------------')
print(exchange.iso8601(exchange.milliseconds()), exchange.id, type(e), e)
- await exchange.sleep (300000) # sleep 5 minutes and retry
+ await exchange.sleep(300000) # sleep 5 minutes and retry
async def main():
|
examples/ccxt.pro/py/binance-watch-spot-futures-balances-continuously.py linting
|
py
|
diff --git a/tests/test_ndict.py b/tests/test_ndict.py
index <HASH>..<HASH> 100644
--- a/tests/test_ndict.py
+++ b/tests/test_ndict.py
@@ -21,6 +21,12 @@ def test_ndict():
assert 'x' in nd
assert 'y' not in nd
+ # Mutability
+ nd2 = ndict({'x': 1})
+ assert nd2.x == 1
+ nd2.x = 2
+ assert nd2.x == 2
+
# Class isolation
assert '__init__' not in nd
assert '__iter__' not in nd
|
test mutability of ndict
|
py
|
diff --git a/apitools/base/py/list_pager_test.py b/apitools/base/py/list_pager_test.py
index <HASH>..<HASH> 100644
--- a/apitools/base/py/list_pager_test.py
+++ b/apitools/base/py/list_pager_test.py
@@ -192,7 +192,6 @@ class ListPagerTest(unittest2.TestCase):
self._AssertInstanceSequence(results, 5)
-
def testYieldFromListEmpty(self):
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
|
Delete extra line from list_pager_test.
|
py
|
diff --git a/pylas/lasdatas/base.py b/pylas/lasdatas/base.py
index <HASH>..<HASH> 100644
--- a/pylas/lasdatas/base.py
+++ b/pylas/lasdatas/base.py
@@ -362,6 +362,8 @@ class LasBase(object):
self.write_to(destination, do_compress=do_compress)
def _compress_with_laszip_executable(self, out_stream):
+ if self.vlrs.get("ExtraBytesVlr"):
+ raise errors.LazError("Compressing LAS that has extra bytes is not supported with LASzip")
try:
out_stream.fileno()
except OSError:
|
raise exception when trying to compress LAS with extrabyte using laszip I don't know yet why but it produces invalid lazfiles
|
py
|
diff --git a/genepattern/taskwidget.py b/genepattern/taskwidget.py
index <HASH>..<HASH> 100644
--- a/genepattern/taskwidget.py
+++ b/genepattern/taskwidget.py
@@ -1,7 +1,6 @@
import inspect
import json
import os
-import tempfile
from urllib.request import Request, urlopen
from urllib.error import HTTPError
from gp import GPTask
@@ -100,11 +99,10 @@ class GPTaskWidget(UIBuilder):
def genepattern_upload_callback(values):
try:
for k in values:
- with tempfile.NamedTemporaryFile() as f:
- f.write(values[k]['content'])
- f.flush()
- gpfile = self.task.server_data.upload_file(k, os.path.realpath(f.name))
- return gpfile.get_url()
+ path = os.path.realpath(k)
+ gpfile = self.task.server_data.upload_file(k, path)
+ os.remove(path)
+ return gpfile.get_url()
except Exception as e:
self.error = f"Error encountered uploading file: {e}"
return genepattern_upload_callback
|
Updated GP upload callback to support ipyuploads widget
|
py
|
diff --git a/ndb/__init__.py b/ndb/__init__.py
index <HASH>..<HASH> 100644
--- a/ndb/__init__.py
+++ b/ndb/__init__.py
@@ -1,6 +1,6 @@
"""NDB -- A new datastore API for the Google App Engine Python runtime."""
-__version__ = '1.0.3+'
+__version__ = '1.0.4'
__all__ = []
|
Set version to <I> (even though things may still change).
|
py
|
diff --git a/udata/core/user/models.py b/udata/core/user/models.py
index <HASH>..<HASH> 100644
--- a/udata/core/user/models.py
+++ b/udata/core/user/models.py
@@ -223,6 +223,7 @@ class User(WithMetrics, UserMixin, db.Document):
self.avatar_url = None
self.website = None
self.about = None
+ self.extras = None
self.deleted = datetime.now()
self.save()
for organization in self.organizations:
|
Clean user extras field on user delete
|
py
|
diff --git a/openquake/calculators/extract.py b/openquake/calculators/extract.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/extract.py
+++ b/openquake/calculators/extract.py
@@ -733,3 +733,17 @@ def extract_event_info(dstore, eidx):
yield key, val
yield 'rlzi', rlzi
yield 'gsim', repr(gsim)
+
+
[email protected]('ruptures_within')
+def get_ruptures_within(dstore, bbox):
+ """
+ :param dstore: a DataStore instance
+ :param bbox: a string minlon,minlat,maxlon,maxlat
+ :returns: rup_array within the bounding box
+ """
+ minlon, minlat, maxlon, maxlat = map(float, bbox.split(','))
+ hypo = dstore['ruptures']['hypo'].T # shape (3, N)
+ mask = ((minlon <= hypo[0]) * (minlat <= hypo[1]) *
+ (maxlon >= hypo[0]) * (maxlat >= hypo[1]))
+ return dstore['ruptures'][mask]
|
Added extract/ruptures_within [skip CI] Former-commit-id: <I>feb4afe4adcb1fab<I>c<I>ff3d<I>f7da<I>f
|
py
|
diff --git a/libusb1.py b/libusb1.py
index <HASH>..<HASH> 100644
--- a/libusb1.py
+++ b/libusb1.py
@@ -703,16 +703,16 @@ except AttributeError:
else:
libusb_error_name.argtypes = [c_int]
libusb_error_name.restype = c_char_p
-try:
- #char *libusb_strerror(enum libusb_error errcode);
- libusb_strerror = libusb.libusb_strerror
-except AttributeError:
- # Place holder
- def libusb_strerror(errcode):
- return None
-else:
- libusb_strerror.argtypes = [c_int]
- libusb_strerror.restype = c_char_p
+
+# Note on libusb_strerror, libusb_setlocale and future functions in the
+# same spirit:
+# I do not think end-user-facing messages belong to a technical library.
+# Such features bring a new, non essential set of problems, and is a luxury
+# I do not want to spend time supporting considering limited resources and
+# more important stuff to work on.
+# For backward compatibility, expose libusb_strerror placeholder.
+def libusb_strerror(errcode):
+ return None
#ssize_t libusb_get_device_list(libusb_context *ctx,
# libusb_device ***list);
|
Replace libusb_strerror binding with a fallback. The objective in dropping existing wrapper for this function is to not mislead user in thinking support will be added for such functions.
|
py
|
diff --git a/salt/modules/cloud.py b/salt/modules/cloud.py
index <HASH>..<HASH> 100644
--- a/salt/modules/cloud.py
+++ b/salt/modules/cloud.py
@@ -8,7 +8,12 @@ import os
import logging
# Import salt libs
-import salt.cloud
+try:
+ import salt.cloud
+ HAS_SALTCLOUD = True
+except ImportError:
+ HAS_SALTCLOUD = False
+
import salt.utils
log = logging.getLogger(__name__)
@@ -22,9 +27,9 @@ def __virtual__():
'''
Only work on POSIX-like systems
'''
- if salt.utils.is_windows():
- return False
- return True
+ if HAS_SALTCLOUD:
+ return True
+ return False
def _get_client():
|
don't load salt cloud if it won't import
|
py
|
diff --git a/pyensembl/download_cache.py b/pyensembl/download_cache.py
index <HASH>..<HASH> 100644
--- a/pyensembl/download_cache.py
+++ b/pyensembl/download_cache.py
@@ -163,9 +163,9 @@ class DownloadCache(object):
the result of downloading to be a decompressed file)
"""
for ext in [".gz", ".gzip", ".zip"]:
- if local_filename.endswith(ext):
- return local_filename[:-len(ext)]
- return local_filename
+ if filename.endswith(ext):
+ return filename[:-len(ext)]
+ return filename
def cached_path(self, path_or_url):
|
changed local_filename->filename in download_cache compression helper
|
py
|
diff --git a/tests/unit/topotools/TopotoolsTest.py b/tests/unit/topotools/TopotoolsTest.py
index <HASH>..<HASH> 100644
--- a/tests/unit/topotools/TopotoolsTest.py
+++ b/tests/unit/topotools/TopotoolsTest.py
@@ -113,12 +113,11 @@ class TopotoolsTest:
net1['pore.test2'] = 10
net2['pore.test3'] = True
net2['pore.test4'] = 10.0
- with pytest.warns(UserWarning):
- topotools.merge_networks(net1, net2)
+ topotools.merge_networks(net1, net2)
assert np.sum(net1['pore.test1']) == 27
assert np.sum(net1['pore.test3']) == 27
- assert np.sum(net1['pore.test2']) == 270
- assert np.sum(np.isnan(net1['pore.test4'])) == 27
+ assert np.sum(net1['pore.test2'][:27]) == 270
+ assert np.sum(net1['pore.test4'][27:]) == 270
if __name__ == '__main__':
|
fixing the tests for merge_networks
|
py
|
diff --git a/gaugetest.py b/gaugetest.py
index <HASH>..<HASH> 100644
--- a/gaugetest.py
+++ b/gaugetest.py
@@ -538,6 +538,9 @@ def test_boundary():
floor = Boundary(iter([zero_seg]), operator.gt)
assert ceil.best is min
assert floor.best is max
+ # repr
+ assert repr(ceil) == ('<Boundary seg={0}, cmp=<built-in function lt>>'
+ ''.format(zero_seg))
@pytest.fixture
|
test coverage = <I>%
|
py
|
diff --git a/bokeh/pyplot.py b/bokeh/pyplot.py
index <HASH>..<HASH> 100644
--- a/bokeh/pyplot.py
+++ b/bokeh/pyplot.py
@@ -60,6 +60,7 @@ def show_bokeh(figure=None, filename=None, server=None, notebook=False):
for axes in figure.axes:
plot = mplsupport.axes2plot(axes)
+ plotting._config["curplot"] = plot # need a better way to do this
session.plotcontext.children.append(plot)
# TODO: this should be obviated once Mateusz's auto-add PR is merged
objects = [plot, plot.x_range, plot.y_range] + plot.data_sources + plot.renderers + \
|
Small hack to pyplot.py to set 'curplot' in bokeh.plotting. (Needed by show() in some cases)
|
py
|
diff --git a/pgextras/sql_constants.py b/pgextras/sql_constants.py
index <HASH>..<HASH> 100644
--- a/pgextras/sql_constants.py
+++ b/pgextras/sql_constants.py
@@ -424,16 +424,6 @@ TABLE_INDEXES_SIZE = """
ORDER BY pg_indexes_size(c.oid) DESC;
"""
-PG_STATS_NOT_AVAILABLE = """
- pg_stat_statements extension needs to be installed in the
- public schema first. This extension is only available on
- Postgres versions 9.2 or greater. You can install it by
- adding pg_stat_statements to shared_preload_libraries in
- postgresql.conf, restarting postgres and then running the
- following sql statement in your database:
- CREATE EXTENSION pg_stat_statements;
-"""
-
PS = """
SELECT
{pid_column},
|
This was an exception message that was moved to the actual exception.
|
py
|
diff --git a/ccy/core/country.py b/ccy/core/country.py
index <HASH>..<HASH> 100644
--- a/ccy/core/country.py
+++ b/ccy/core/country.py
@@ -12,7 +12,7 @@ __all__ = ['country', 'countryccy', 'set_new_country',
# using ISO 3166-1 alpha-2 country codes
# see http://en.wikipedia.org/wiki/ISO_3166-1_alpha-2
#
-eurozone = tuple(('AT BE CY DE EE ES FI FR GR IE IT LU LV MT '
+eurozone = tuple(('AT BE CY DE EE ES FI FR GR IE IT LU LV LT MT '
'NL PT SI SK').split(' '))
|
Added Lithuania to eurozone countries list Lithuania is an Eurozone country as of <I>-<I>-<I>
|
py
|
diff --git a/openid/store/filestore.py b/openid/store/filestore.py
index <HASH>..<HASH> 100644
--- a/openid/store/filestore.py
+++ b/openid/store/filestore.py
@@ -214,7 +214,8 @@ class FileOpenIDStore(OpenIDStore):
file_obj, tmp = self._mktemp()
try:
file_obj.write(auth_key)
- os.fsync(file_obj.fileno())
+ # Must close the file before linking or renaming it on win32.
+ file_obj.close()
try:
if hasattr(os, 'link'):
|
[project @ store.filestore.createAuthKey: close before rename.] Fixes a "[Errno <I>] Permission denied" in python-win.
|
py
|
diff --git a/examples/google_spreadsheet.py b/examples/google_spreadsheet.py
index <HASH>..<HASH> 100755
--- a/examples/google_spreadsheet.py
+++ b/examples/google_spreadsheet.py
@@ -40,7 +40,7 @@ import datetime
import Adafruit_DHT
import gspread
-from oauth2client.client import SignedJwtAssertionCredentials
+from oauth2client.service_account import ServiceAccountCredentials
# Type of sensor, can be Adafruit_DHT.DHT11, Adafruit_DHT.DHT22, or Adafruit_DHT.AM2302.
DHT_TYPE = Adafruit_DHT.DHT22
@@ -81,10 +81,8 @@ FREQUENCY_SECONDS = 30
def login_open_sheet(oauth_key_file, spreadsheet):
"""Connect to Google Docs spreadsheet and return the first worksheet."""
try:
- json_key = json.load(open(oauth_key_file))
- credentials = SignedJwtAssertionCredentials(json_key['client_email'],
- json_key['private_key'],
- ['https://spreadsheets.google.com/feeds'])
+ scope = ['https://spreadsheets.google.com/feeds']
+ credentials = ServiceAccountCredentials.from_json_keyfile_name(oauth_key_file, scope)
gc = gspread.authorize(credentials)
worksheet = gc.open(spreadsheet).sheet1
return worksheet
|
Fix deprecated SignedJwtAssertionCredentials
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -2,15 +2,15 @@ from setuptools import setup
with open('requirements.txt', 'rb') as f:
- requirements = [i.strip().replace('==', '>=') for i in f]
+ requirements = [i.strip() for i in f]
setup(name='nameko',
version='0.1-dev',
description='service framework supporting multiple'
'messaging and RPC implementations',
- author='',
- author_email='',
+ author='onefinestay',
+ author_email='[email protected]',
packages=['nameko', ],
install_requires=requirements,
test_requires=['pytest>=2.2.4', 'mock>=1.0b1', ],
|
making setup.py treat requirements.txt without magic
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -16,7 +16,7 @@ if version_info < (3, 5):
setup(
name='wikitextparser',
# Scheme: [N!]N(.N)*[{a|b|rc}N][.postN][.devN]
- version='0.13.0.dev1',
+ version='0.13.1.dev1',
description='A simple, purely python, WikiText parsing tool.',
long_description=open(path.join(here, 'README.rst')).read(),
url='https://github.com/5j9/wikitextparser',
|
version='<I>.dev1 I had set the wrong version number in the previous commmit.
|
py
|
diff --git a/simpleai/search/viewers.py b/simpleai/search/viewers.py
index <HASH>..<HASH> 100644
--- a/simpleai/search/viewers.py
+++ b/simpleai/search/viewers.py
@@ -55,6 +55,11 @@ class WebViewer(ConsoleViewer):
self.port = port
self.paused = True
self.events = []
+ self.current_fringe = []
+ self.last_chosen = None
+ self.last_is_goal = False
+ self.last_expanded = None
+ self.last_successors = []
web_template_path = path.join(path.dirname(__file__), 'web_viewer.html')
self.web_template = open(web_template_path).read()
@@ -90,3 +95,15 @@ class WebViewer(ConsoleViewer):
def output(self, *args):
self.events.append(' '.join(map(str, args)))
+
+ def new_iteration(self, fringe):
+ self.current_fringe = fringe
+ super(WebViewer, self).new_iteration(fringe)
+
+ def chosen_node(self, node, is_goal):
+ self.last_chosen, self.last_is_goal = node, is_goal
+ super(WebViewer, self).chosen_node(node, is_goal)
+
+ def expanded(self, node, successors):
+ self.last_expanded, self.last_successors = node, successors
+ super(WebViewer, self).expanded(node, successors)
|
Storing last events information, will need it to build the graph
|
py
|
diff --git a/blockstack_cli_0.14.1/blockstack_client/config.py b/blockstack_cli_0.14.1/blockstack_client/config.py
index <HASH>..<HASH> 100644
--- a/blockstack_cli_0.14.1/blockstack_client/config.py
+++ b/blockstack_cli_0.14.1/blockstack_client/config.py
@@ -87,6 +87,11 @@ if os.environ.get("BLOCKSTACK_TEST", None) is not None and os.environ.get("BLOCK
FIRST_BLOCK_TIME_UTC = 1441737751
+TX_MIN_CONFIRMATIONS = 6
+if os.environ.get("BLOCKSTACK_TEST", None) is not None:
+ # test environment
+ TX_MIN_CONFIRMATIONS = 0
+
# borrowed from Blockstack
# Opcodes
ANNOUNCE = '#'
@@ -294,7 +299,6 @@ USER_ZONEFILE_TTL = 3600 # cache lifetime for a user's zonefile
SLEEP_INTERVAL = 20 # in seconds
TX_EXPIRED_INTERVAL = 10 # if a tx is not picked up by x blocks
-
PREORDER_CONFIRMATIONS = 6
PREORDER_MAX_CONFIRMATIONS = 130 # no. of blocks after which preorder should be removed
TX_CONFIRMATIONS_NEEDED = 10
|
TX_MIN_CONFIRMATIONS is 6 in production, but 0 in testing (since we use bitcoind -regtest)
|
py
|
diff --git a/vsgen/__main__.py b/vsgen/__main__.py
index <HASH>..<HASH> 100644
--- a/vsgen/__main__.py
+++ b/vsgen/__main__.py
@@ -31,7 +31,7 @@ def main(argv=None):
pylogger = VSGLogger()
# Construct a command line parser and parse the command line
- args = VSGSuite.make_parser(description='Executes the VSG package as an application.').parse_args(argv[1:])
+ args = VSGSuite.make_parser(description='Executes the vsgen package as an application.').parse_args(argv[1:])
for s in VSGSuite.from_args(**vars(args)):
s.write(False)
return 0
|
Corrected package name in argparse construction.
|
py
|
diff --git a/bulbs/__init__.py b/bulbs/__init__.py
index <HASH>..<HASH> 100644
--- a/bulbs/__init__.py
+++ b/bulbs/__init__.py
@@ -1 +1 @@
-__version__ = "0.3.11"
+__version__ = "0.3.12"
|
upped version number for pypi
|
py
|
diff --git a/grab/spider.py b/grab/spider.py
index <HASH>..<HASH> 100644
--- a/grab/spider.py
+++ b/grab/spider.py
@@ -240,7 +240,7 @@ class Spider(object):
Stop the task which was executed too many times.
"""
- if task.task_try_count >= self.task_try_limit:
+ if task.task_try_count > self.task_try_limit:
logging.debug('Task tries ended: %s / %s' % (task.name, task.url))
return False
elif task.network_try_count >= self.network_try_limit:
|
Fix bug in task number processing in spider
|
py
|
diff --git a/tests/unit/pyobjects_test.py b/tests/unit/pyobjects_test.py
index <HASH>..<HASH> 100644
--- a/tests/unit/pyobjects_test.py
+++ b/tests/unit/pyobjects_test.py
@@ -82,6 +82,18 @@ import salt://map.sls
Pkg.removed("samba-imported", names=[Samba.server, Samba.client])
'''
+recursive_map_template = '''#!pyobjects
+from salt://map.sls import Samba
+
+class CustomSamba(Samba):
+ pass
+'''
+
+recursive_import_template = '''#!pyobjects
+from salt://recursive_map.sls import CustomSamba
+
+Pkg.removed("samba-imported", names=[CustomSamba.server, CustomSamba.client])'''
+
from_import_template = '''#!pyobjects
# this spacing is like this on purpose to ensure it's stripped properly
from salt://map.sls import Samba
@@ -316,6 +328,9 @@ class RendererTests(RendererMixin, StateTests):
render_and_assert(import_template)
render_and_assert(from_import_template)
+ self.write_template_file("recursive_map.sls", recursive_map_template)
+ render_and_assert(recursive_import_template)
+
def test_random_password(self):
'''Test for https://github.com/saltstack/salt/issues/21796'''
ret = self.render(random_password_template)
|
Add test to prove that recursive imports are currently broken Refs #<I> This also exposes some other issues with the Registry being in an inconsistent state when an exception occurs when we are rendering.
|
py
|
diff --git a/pysatMagVect/__init__.py b/pysatMagVect/__init__.py
index <HASH>..<HASH> 100644
--- a/pysatMagVect/__init__.py
+++ b/pysatMagVect/__init__.py
@@ -8,12 +8,12 @@ del here
on_rtd = os.environ.get('ONREADTHEDOCS') == 'True'
if not on_rtd:
- from . import igrf
+ from pysatMagVect import igrf
else:
igrf = None
-from . import _core
-from ._core import *
-from . import satellite
+from pysatMagVect import _core
+from pysatMagVect._core import *
+from pysatMagVect import satellite
__all__ = []
|
STY: Removed relative imports
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ from setuptools import setup
setup(
name='raincloudy',
packages=['raincloudy'],
- version='0.0.2',
+ version='0.0.3',
description='A Python library to communicate with Melnor' +
' RainCloud Smart Garden Watering Irrigation Timer' +
' (https://wwww.melnor.com/)',
|
Bump dev version <I>
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -5,6 +5,7 @@ import re
import shutil
import sys
from setuptools import setup, find_packages
+from pkg_resources import get_distribution, DistributionNotFound
def read(*names, **kwargs):
@@ -15,6 +16,13 @@ def read(*names, **kwargs):
return fp.read()
+def get_dist(pkgname):
+ try:
+ return get_distribution(pkgname)
+ except DistributionNotFound:
+ return None
+
+
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
@@ -30,12 +38,15 @@ VERSION = find_version('torchvision', '__init__.py')
requirements = [
'numpy',
- 'pillow >= 4.1.1',
'six',
'torch',
'tqdm'
]
+pillow_ver = ' >= 4.1.1'
+pillow_req = 'pillow-simd' if get_dist('pillow-simd') is not None else 'pillow'
+requirements.append(pillow_req + pillow_ver)
+
setup(
# Metadata
name='torchvision',
|
make vision depend on pillow-simd if already installed (#<I>) * make vision depend on pillow-simd if already installed * actually make pillow-simd optional
|
py
|
diff --git a/test_pem.py b/test_pem.py
index <HASH>..<HASH> 100644
--- a/test_pem.py
+++ b/test_pem.py
@@ -84,6 +84,17 @@ class TestCertificateOptionsFromFiles(object):
)
assert 2 == len(ctxFactory.extraCertChain)
+ def test_worksWithChainCertsFirst(self, tmpdir):
+ pytest.importorskip('twisted')
+ keyFile = tmpdir.join('key.pem')
+ keyFile.write(KEY_PEM)
+ certFile = tmpdir.join('cert_and_chain.pem')
+ certFile.write(''.join(reversed(CERT_PEMS)))
+ ctxFactory = pem.certificateOptionsFromFiles(
+ str(keyFile), str(certFile)
+ )
+ assert 2 == len(ctxFactory.extraCertChain)
+
def test_worksWithEverythingInOneFile(self, allFile):
pytest.importorskip('twisted')
ctxFactory = pem.certificateOptionsFromFiles(str(allFile))
|
A test case for allowing chain certificates to come first.
|
py
|
diff --git a/emma2/msm/analysis/sparse/assessment.py b/emma2/msm/analysis/sparse/assessment.py
index <HASH>..<HASH> 100644
--- a/emma2/msm/analysis/sparse/assessment.py
+++ b/emma2/msm/analysis/sparse/assessment.py
@@ -106,7 +106,7 @@ def is_reversible(T, mu=None, tol=1e-15):
T = T.tocsr()
if mu is None:
- from decomposition import stationary_distribution_from_eigenvector as statdist
+ from decomposition import stationary_distribution_from_backward_iteration as statdist
mu = statdist(T)
Mu = diags(mu, 0)
|
[msm/analysis] Use backward iteration in is_reversible method to comopute stationary distribution
|
py
|
diff --git a/tests/functional_tests/test_plot.py b/tests/functional_tests/test_plot.py
index <HASH>..<HASH> 100644
--- a/tests/functional_tests/test_plot.py
+++ b/tests/functional_tests/test_plot.py
@@ -11,7 +11,7 @@ def test_plot():
ds.compute("adev")
p = at.Plot(no_display=True)
p.plot(ds, errorbars=True)
- p.show()
+ # p.show() # can't show() in test
if __name__ == "__main__":
test_plot()
|
can't show() from automated test
|
py
|
diff --git a/mplotqueries/mplotqueries.py b/mplotqueries/mplotqueries.py
index <HASH>..<HASH> 100755
--- a/mplotqueries/mplotqueries.py
+++ b/mplotqueries/mplotqueries.py
@@ -82,8 +82,8 @@ class MongoPlotQueries(object):
parser.add_argument('--type', action='store', default='duration', choices=self.plot_types.keys(), help='type of plot (default=duration)')
mutex = parser.add_mutually_exclusive_group()
- mutex.add_argument('--group')
- mutex.add_argument('--label')
+ mutex.add_argument('--group', help="specify value to group on. Possible values depend on type of plot. All basic plot types can group on 'namespace', 'operation', 'thread', range plots can additionally group on 'log2code'.")
+ mutex.add_argument('--label', help="instead of specifying a group, a label can be specified. Grouping is then disabled, and the single group for all data points is named LABEL.")
# separate parser for --plot arguments (multiple times possible)
self.args = vars(parser.parse_args())
|
added some help text to group/label.
|
py
|
diff --git a/percy/environment.py b/percy/environment.py
index <HASH>..<HASH> 100644
--- a/percy/environment.py
+++ b/percy/environment.py
@@ -58,7 +58,7 @@ class Environment(object):
def _get_origin_url(self):
process = subprocess.Popen(
['git', 'config', '--get', 'remote.origin.url'], stdout=subprocess.PIPE)
- return str(process.stdout.read().strip())
+ return unicode(process.stdout.read().strip())
@property
def repo(self):
|
Attempt to fix python 3 encoding issues.
|
py
|
diff --git a/python/phonenumbers/__init__.py b/python/phonenumbers/__init__.py
index <HASH>..<HASH> 100644
--- a/python/phonenumbers/__init__.py
+++ b/python/phonenumbers/__init__.py
@@ -146,7 +146,7 @@ from .phonenumbermatcher import PhoneNumberMatch, PhoneNumberMatcher, Leniency
# Version number is taken from the upstream libphonenumber version
# together with an indication of the version of the Python-specific code.
-__version__ = "8.12.29"
+__version__ = "8.12.30"
__all__ = ['PhoneNumber', 'CountryCodeSource', 'FrozenPhoneNumber',
'REGION_CODE_FOR_NON_GEO_ENTITY', 'NumberFormat', 'PhoneNumberDesc', 'PhoneMetadata',
|
Prep for <I> release
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -18,12 +18,12 @@ setup(
'efopen'
],
install_requires=[
- 'boto3==1.16.39',
- 'click==7.1.2',
+ 'boto3<=1.16.39',
+ 'click<=7.1.2',
'PyYAML<=5.3.1',
- 'cfn-lint==0.44.0',
- 'requests==2.25.1',
- 'yamllint==1.25.0'
+ 'cfn-lint<=0.44.0',
+ 'requests<=2.25.1',
+ 'yamllint<=1.25.0'
],
extras_require={
'test': [
|
Update setup.py Loosen all package version requirements
|
py
|
diff --git a/tcod/console.py b/tcod/console.py
index <HASH>..<HASH> 100644
--- a/tcod/console.py
+++ b/tcod/console.py
@@ -1246,6 +1246,7 @@ def load_xp(
.. versionadded:: 12.4
Example::
+
import tcod
from numpy import np
@@ -1298,6 +1299,7 @@ def save_xp(
.. versionadded:: 12.4
Example::
+
import tcod
from numpy import np
|
Fix example blocks formatting. I always forget to add the extra blank line here.
|
py
|
diff --git a/tools/pyboard.py b/tools/pyboard.py
index <HASH>..<HASH> 100644
--- a/tools/pyboard.py
+++ b/tools/pyboard.py
@@ -89,8 +89,8 @@ class Pyboard:
return self.exec(pyfile)
def get_time(self):
- t = str(self.exec('pyb.time()'), encoding='ascii').strip().split()[1].split(':')
- return int(t[0]) * 3600 + int(t[1]) * 60 + int(t[2])
+ t = str(self.eval('pyb.RTC().datetime()'), encoding='ascii')[1:-1].split(', ')
+ return int(t[4]) * 3600 + int(t[5]) * 60 + int(t[6])
def execfile(filename, device='/dev/ttyACM0'):
pyb = Pyboard(device)
|
tools: Update pyboard.py to work with latest changes to RTC.
|
py
|
diff --git a/oauth_django_example/twitter/views.py b/oauth_django_example/twitter/views.py
index <HASH>..<HASH> 100644
--- a/oauth_django_example/twitter/views.py
+++ b/oauth_django_example/twitter/views.py
@@ -7,8 +7,8 @@ from django.contrib.auth.decorators import login_required
from twython import Twython
from twitter.models import Profile
-CONSUMER_KEY = "piKE9TwKoAhJoj7KEMlwGQ"
-CONSUMER_SECRET = "RA9IzvvzoLAFGOOoOndm1Cvyh94pwPWLy4Grl4dt0o"
+CONSUMER_KEY = "YOUR CONSUMER KEY HERE"
+CONSUMER_SECRET = "YOUR CONSUMER SECRET HERE"
def twitter_logout(request):
logout(request)
|
Oh yeah, forgot to strip these...
|
py
|
diff --git a/django_uwsgi/urls.py b/django_uwsgi/urls.py
index <HASH>..<HASH> 100755
--- a/django_uwsgi/urls.py
+++ b/django_uwsgi/urls.py
@@ -1,8 +1,8 @@
from django.conf.urls import patterns, url
from . import views
-urlpatterns = patterns('',
+urlpatterns = [
url(r'^$', views.UwsgiStatus.as_view(), name='uwsgi_index'),
url(r'^reload/$', views.UwsgiReload.as_view(), name='uwsgi_reload'),
url(r'^clear_cache/$', views.UwsgiCacheClear.as_view(), name='uwsgi_cache_clear'),
-)
+]
|
Resolve compatibility issue: django.conf.urls.patterns() is deprecated and will be removed in Django <I>
|
py
|
diff --git a/edisgo/tools/pypsa_io.py b/edisgo/tools/pypsa_io.py
index <HASH>..<HASH> 100644
--- a/edisgo/tools/pypsa_io.py
+++ b/edisgo/tools/pypsa_io.py
@@ -295,7 +295,7 @@ def mv_to_pypsa(network):
generator['name'].append(repr(gen))
generator['bus'].append(bus_name)
generator['control'].append('PQ')
- generator['p_nom'].append(gen.nominal_capacity / 1e3)
+ generator['p_nom'].append(gen.nominal_power / 1e3)
generator['type'].append('_'.join([gen.type, gen.subtype]))
bus['name'].append(bus_name)
@@ -545,7 +545,7 @@ def lv_to_pypsa(network):
storage['name'].append(repr(sto))
storage['bus'].append(bus_name)
- storage['p_nom'].append(sto.nominal_capacity)
+ storage['p_nom'].append(sto.nominal_power)
storage['state_of_charge_initial'].append(sto.soc_inital)
storage['efficiency_store'].append(sto.efficiency_in)
storage['efficiency_dispatch'].append(sto.efficiency_out)
|
Correct p_nom in pypsa representation of storages
|
py
|
diff --git a/tcex/testing/validate_data.py b/tcex/testing/validate_data.py
index <HASH>..<HASH> 100644
--- a/tcex/testing/validate_data.py
+++ b/tcex/testing/validate_data.py
@@ -853,7 +853,7 @@ class ThreatConnect(object):
unique_id=entity.get('id'),
)
elif tc_entity.get('type') in self.provider.tcex.indicator_types:
- tc_entity['summary'] = unquote(tc_entity.get('summary'))
+ tc_entity['summary'] = tc_entity.get('summary')
if tc_entity.get('type').lower() == 'file':
tc_entity['summary'] = tc_entity.get('summary').upper()
ti_entity = self.provider.tcex.ti.indicator(
|
changing how urls are encoded during testing
|
py
|
diff --git a/src/livestreamer_cli/argparser.py b/src/livestreamer_cli/argparser.py
index <HASH>..<HASH> 100644
--- a/src/livestreamer_cli/argparser.py
+++ b/src/livestreamer_cli/argparser.py
@@ -353,11 +353,6 @@ player.add_argument(
is capable of reconnecting to a HTTP stream. This is usually
done by setting your player to a "repeat mode".
- Note: Some stream types may end up looping the last part of a
- stream once or twice when it ends. This is caused by a lack of
- shared state between attempts to use a stream and may be fixed in
- the future.
-
"""
)
player.add_argument(
|
docs: Remove no longer needed note.
|
py
|
diff --git a/edtf_validate/valid_edtf.py b/edtf_validate/valid_edtf.py
index <HASH>..<HASH> 100644
--- a/edtf_validate/valid_edtf.py
+++ b/edtf_validate/valid_edtf.py
@@ -137,7 +137,7 @@ yearWithU = (
)
monthWithU = "u" + digitOrU | "0u" | "1u"
oneThru3 = oneOf("1 2 3")
-dayWithU = "u" + digitOrU | oneThru3 + "u"
+dayWithU = "u" + digitOrU | oneThru3 + "u" | "0u"
monthDayWithU = (
monthWithU + "-" + dayWithU |
month + "-" + dayWithU |
|
Day with you could start with 0
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -39,7 +39,6 @@ setup(
'urbansim = urbansim.cli:main'
],
'urbansim.commands': [
- 'compile = urbansim.urbansim.compilecli:Compile',
'serve = urbansim.server.servecli:Serve'
]
}
|
removing compile command from the setup
|
py
|
diff --git a/tests/beacon/test_helpers.py b/tests/beacon/test_helpers.py
index <HASH>..<HASH> 100644
--- a/tests/beacon/test_helpers.py
+++ b/tests/beacon/test_helpers.py
@@ -62,10 +62,6 @@ from tests.beacon.helpers import (
)
-class UnreachableCodePathError(Exception):
- pass
-
-
@pytest.fixture()
def sample_block(sample_beacon_block_params):
return SerenityBeaconBlock(**sample_beacon_block_params)
@@ -861,8 +857,7 @@ def _corrupt_vote_count(params):
new_vote_count,
)
else:
- msg = "list of ``custody_bit_0_indices`` should not exhaust ``itertools.count``"
- raise UnreachableCodePathError(msg)
+ raise Exception("Unreachable code path")
def _create_slashable_vote_data_messages(params):
|
Remove specialized exception in favor of built-in type
|
py
|
diff --git a/pdfwatermarker/utils/path.py b/pdfwatermarker/utils/path.py
index <HASH>..<HASH> 100644
--- a/pdfwatermarker/utils/path.py
+++ b/pdfwatermarker/utils/path.py
@@ -24,7 +24,20 @@ def set_destination(source, suffix):
# Concatenate new filename
dst_path = src_file_name + '_' + suffix + src_file_ext
- return os.path.join(directory, dst_path) # new full path
+ full_path = os.path.join(directory, dst_path) # new full path
+
+ if not os.path.exists(full_path):
+ return full_path
+ else:
+ # If file exists, increment number until filename is unique
+ number = 1
+ while True:
+ dst_path = src_file_name + '_' + suffix + '_' + str(number) + src_file_ext
+ if not os.path.exists(dst_path):
+ break
+ number = number + 1
+ full_path = os.path.join(directory, dst_path) # new full path
+ return full_path
def resource_path(relative):
|
Added if statement and number incrementer to check if filename is unique.
|
py
|
diff --git a/meshio/vtk_io.py b/meshio/vtk_io.py
index <HASH>..<HASH> 100644
--- a/meshio/vtk_io.py
+++ b/meshio/vtk_io.py
@@ -526,7 +526,7 @@ def _write_cells(f, cells, write_binary):
if write_binary:
for key in cells:
if key[:7] == "polygon":
- d = numpy.full(len(cells[key]), meshio_to_vtk_type["polygon"]).astype(
+ d = numpy.full(len(cells[key]), meshio_to_vtk_type[key[:7]]).astype(
numpy.dtype(">i4")
)
else:
@@ -540,9 +540,7 @@ def _write_cells(f, cells, write_binary):
for key in cells:
if key[:7] == "polygon":
for _ in range(len(cells[key])):
- f.write(
- "{}\n".format(meshio_to_vtk_type["polygon"]).encode("utf-8")
- )
+ f.write("{}\n".format(meshio_to_vtk_type[key[:7]]).encode("utf-8"))
else:
for _ in range(len(cells[key])):
f.write("{}\n".format(meshio_to_vtk_type[key]).encode("utf-8"))
|
use same coding style as in tests
|
py
|
diff --git a/sos/plugins/openswan.py b/sos/plugins/openswan.py
index <HASH>..<HASH> 100644
--- a/sos/plugins/openswan.py
+++ b/sos/plugins/openswan.py
@@ -36,6 +36,11 @@ class Openswan(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin):
if self.get_option("ipsec-barf"):
self.add_cmd_output("ipsec barf")
- self.add_forbidden_path("/etc/ipsec.d/key[3-4].db")
+ self.add_forbidden_path([
+ '/etc/ipsec.secrets',
+ '/etc/ipsec.secrets.d/*',
+ '/etc/ipsec.d/*.db',
+ '/etc/ipsec.d/*.secrets'
+ ])
# vim: set et ts=4 sw=4 :
|
[openswan] Forbid collection of secrets and certdb When collecting IPSec data, currently nothing prevents the collection of keys or the cert.db files. Ensure we don't collect this information as it is private.
|
py
|
diff --git a/bcbio/structural/seq2c.py b/bcbio/structural/seq2c.py
index <HASH>..<HASH> 100644
--- a/bcbio/structural/seq2c.py
+++ b/bcbio/structural/seq2c.py
@@ -277,7 +277,8 @@ def _combine_coverages(items, work_dir):
with open(tx_out_file, 'w') as out_f:
for data in items:
svouts = [x for x in data["sv"] if x["variantcaller"] == "seq2c"]
- assert len(svouts) == 1
+ cfiles = list(set([os.path.basename(x["coverage"]) for x in svouts]))
+ assert len(cfiles) == 1, (dd.get_sample_name(data), cfiles)
cov_file = svouts[0]["coverage"]
with open(cov_file) as cov_f:
out_f.write(cov_f.read())
|
seq2c: improve combined sanity check to handle shared normals These will have multiple identical coverage files due to the batching.
|
py
|
diff --git a/ignite/engine/engine.py b/ignite/engine/engine.py
index <HASH>..<HASH> 100644
--- a/ignite/engine/engine.py
+++ b/ignite/engine/engine.py
@@ -611,9 +611,9 @@ class Engine(Serializable):
Engine has a state and the following logic is applied in this function:
- At the first call, new state is defined by `max_epochs`, `max_iters`, `epoch_length`, if provided.
- A timer for total and per-epoch time is initialized when Events.STARTED is handled.
+ A timer for total and per-epoch time is initialized when Events.STARTED is handled.
- If state is already defined such that there are iterations to run until `max_epochs` and no input arguments
- provided, state is kept and used in the function.
+ provided, state is kept and used in the function.
- If state is defined and engine is "done" (no iterations to run until `max_epochs`), a new state is defined.
- If state is defined, engine is NOT "done", then input arguments if provided override defined state.
|
[docs] fix engine.run docstring (#<I>) * [docs] engine.run docstring * Update setup.cfg
|
py
|
diff --git a/openquake/hazardlib/calc/filters.py b/openquake/hazardlib/calc/filters.py
index <HASH>..<HASH> 100644
--- a/openquake/hazardlib/calc/filters.py
+++ b/openquake/hazardlib/calc/filters.py
@@ -358,6 +358,8 @@ class RtreeFilter(SourceFilter):
Integration distance dictionary (TRT -> distance in km)
"""
def __init__(self, sitecol, integration_distance):
+ if rtree is None:
+ raise ImportError('rtree')
self.integration_distance = integration_distance
self.distribute = 'processpool'
self.indexpath = gettemp()
|
Cleanup [skip CI]
|
py
|
diff --git a/troposphere/apigateway.py b/troposphere/apigateway.py
index <HASH>..<HASH> 100644
--- a/troposphere/apigateway.py
+++ b/troposphere/apigateway.py
@@ -210,7 +210,7 @@ class RestApi(AWSObject):
resource_type = "AWS::ApiGateway::RestApi"
props = {
- "Body": (basestring, False),
+ "Body": (dict, False),
"BodyS3Location": (S3Location, False),
"CloneFrom": (basestring, False),
"Description": (basestring, False),
|
Change param type for resource: RestAPI (#<I>) "Body" is a dict now: <URL>
|
py
|
diff --git a/niworkflows/__about__.py b/niworkflows/__about__.py
index <HASH>..<HASH> 100644
--- a/niworkflows/__about__.py
+++ b/niworkflows/__about__.py
@@ -10,7 +10,7 @@ as well as for open-source software distribution.
from __future__ import absolute_import, division, print_function
import datetime
-__version__ = '0.5.1-1'
+__version__ = '0.5.2dev'
__packagename__ = 'niworkflows'
__author__ = 'The CRN developers'
__copyright__ = 'Copyright {}, Center for Reproducible Neuroscience, Stanford University'.format(
|
[skip ci] Update version after relase
|
py
|
diff --git a/tacl/constants.py b/tacl/constants.py
index <HASH>..<HASH> 100644
--- a/tacl/constants.py
+++ b/tacl/constants.py
@@ -197,6 +197,9 @@ REPORT_EPILOG = '''\
on a set of results more than once will make the results
inaccurate!
+ --extend applies before --reduce because it may generate results
+ that are also amenable to reduction.
+
Since this command always outputs a valid results file, its output
can be used as input for a subsequent tacl report command. To
chain commands together without creating an intermediate file,
|
Added note about why --extend is run before --reduce when both options are specified.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -1,13 +1,15 @@
import os
from setuptools import setup, find_packages
+import polls
+
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-polls',
- version='0.0.1',
+ version=polls.__version__,
description='A simple polls app for django',
long_description=read('README.md'),
license=read('LICENSE'),
|
use version from polls module for setup
|
py
|
diff --git a/python/examples/logistic_regression.py b/python/examples/logistic_regression.py
index <HASH>..<HASH> 100755
--- a/python/examples/logistic_regression.py
+++ b/python/examples/logistic_regression.py
@@ -39,7 +39,7 @@ def readPointBatch(iterator):
strs = list(iterator)
matrix = np.zeros((len(strs), D + 1))
for i in xrange(len(strs)):
- matrix[i] = np.fromstring(strs[i], dtype=np.float32, sep = ' ')
+ matrix[i] = np.fromstring(strs[i].replace(',', ' '), dtype=np.float32, sep=' ')
return [matrix]
if __name__ == "__main__":
|
Fix string parsing and style in LR
|
py
|
diff --git a/openquake/calculators/tests/event_based_risk_test.py b/openquake/calculators/tests/event_based_risk_test.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/tests/event_based_risk_test.py
+++ b/openquake/calculators/tests/event_based_risk_test.py
@@ -163,7 +163,8 @@ class EventBasedRiskTestCase(CalculatorTestCase):
# Turkey with SHARE logic tree
self.run_calc(case_4.__file__, 'job.ini')
[fname] = export(('avg_losses-stats', 'csv'), self.calc.datastore)
- self.assertEqualFiles('expected/avg_losses-mean.csv', fname)
+ self.assertEqualFiles('expected/avg_losses-mean.csv',
+ fname, delta=1E-5)
fnames = export(('agg_loss_table', 'csv'), self.calc.datastore)
assert fnames, 'No agg_losses exported??'
|
Added a delta [skip hazardlib]
|
py
|
diff --git a/openquake/engine/db/models.py b/openquake/engine/db/models.py
index <HASH>..<HASH> 100644
--- a/openquake/engine/db/models.py
+++ b/openquake/engine/db/models.py
@@ -2130,7 +2130,7 @@ class Gmf(djm.Model):
'where parent_id=%s', (self.id,))
return [self.__class__.objects.get(pk=r[0]) for r in curs]
- def get_gmfs_per_ses(self, orderby=False):
+ def get_gmfs_per_ses(self, orderby=True):
"""
Get the ground motion fields per SES in a good format for
the XML export.
@@ -2169,7 +2169,7 @@ class Gmf(djm.Model):
GROUP BY imt, sa_period, sa_damping, tag
""" % (hc.id, self.id, ses.id)
if orderby: # may be used in tests to get reproducible results
- query += 'order by imt, sa_period, sa_damping, rupture_id;'
+ query += 'order by imt, sa_period, sa_damping, tag;'
with transaction.commit_on_success(using='job_init'):
curs = getcursor('job_init')
curs.execute(query)
|
The GMFs must be ordered by default (the ordering is fast anyway)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -11,7 +11,8 @@ with open(join(dirname(__file__), 'bddrest', '__init__.py')) as v_file:
dependencies = [
'pyyaml',
- 'argcomplete'
+ 'argcomplete',
+ 'pytest >= 4.0.2',
]
|
Migrating to pytest >= <I>
|
py
|
diff --git a/api/src/opentrons/hardware_control/backends/ot3simulator.py b/api/src/opentrons/hardware_control/backends/ot3simulator.py
index <HASH>..<HASH> 100644
--- a/api/src/opentrons/hardware_control/backends/ot3simulator.py
+++ b/api/src/opentrons/hardware_control/backends/ot3simulator.py
@@ -58,7 +58,6 @@ from opentrons.hardware_control.dev_types import (
AttachedGripper,
OT3AttachedInstruments,
)
-from opentrons_hardware.drivers.gpio import OT3GPIO
log = logging.getLogger(__name__)
@@ -115,7 +114,6 @@ class OT3Simulator:
"""
self._configuration = config
self._loop = loop
- self._gpio_dev = OT3GPIO()
self._strict_attached = bool(strict_attached_instruments)
self._stubbed_attached_modules = attached_modules
@@ -157,11 +155,6 @@ class OT3Simulator:
self._current_settings: Optional[OT3AxisMap[CurrentConfig]] = None
@property
- def gpio_chardev(self) -> OT3GPIO:
- """Get the GPIO device."""
- return self._gpio_dev
-
- @property
def board_revision(self) -> BoardRevision:
"""Get the board revision"""
return BoardRevision.UNKNOWN
|
fix(api): do not acquire gpio in ot3simulator (#<I>) Because it shouldn't try, since it's a simulator, and it needs to not crash
|
py
|
diff --git a/salt/modules/git.py b/salt/modules/git.py
index <HASH>..<HASH> 100644
--- a/salt/modules/git.py
+++ b/salt/modules/git.py
@@ -1375,10 +1375,8 @@ def fetch(cwd,
command.extend(
[x for x in _format_opts(opts) if x not in ('-f', '--force')]
)
- if not isinstance(remote, six.string_types):
- remote = str(remote)
if remote:
- command.append(remote)
+ command.append(str(remote))
if refspecs is not None:
if isinstance(refspecs, (list, tuple)):
refspec_list = []
|
Check if the remote exists before casting to a string. The bug before was if the origin wasn't specified None would be cast to a string-- which would then be interpreted as True-- which would end up trying to run `git fetch None` which doesn't work :)
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.