diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
---|---|---|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -18,7 +18,7 @@ setup(
url='https://github.com/cea-cosmic/ModOpt',
download_url='https://github.com/cea-cosmic/ModOpt',
packages=find_packages(),
- install_requires=['numpy>=1.14.1', 'future>=0.16.0', 'scipy==1.2.1',
+ install_requires=['numpy==1.16.0', 'future>=0.16.0', 'scipy==1.2.1',
'progressbar2>=3.34.3'],
license='MIT',
description='Modular Optimisation tools for soliving inverse problems.',
|
Retaining Python <I> support
|
py
|
diff --git a/sphinx_markdown_builder/markdown_writer.py b/sphinx_markdown_builder/markdown_writer.py
index <HASH>..<HASH> 100644
--- a/sphinx_markdown_builder/markdown_writer.py
+++ b/sphinx_markdown_builder/markdown_writer.py
@@ -68,6 +68,9 @@ class MarkdownTranslator(Translator):
def visit_desc_name(self, node):
# name of the class/method
+ # Escape "__" which is a formating string for markdown
+ if node.rawsource.startswith("__"):
+ self.add('\\')
pass
def depart_desc_name(self, node):
|
Escaping '__' coming from python methods Python specials methods like constructors starts with '__' which is a bold tag for markdown, so this tag is now escaped.
|
py
|
diff --git a/stegano/slsbset.py b/stegano/slsbset.py
index <HASH>..<HASH> 100644
--- a/stegano/slsbset.py
+++ b/stegano/slsbset.py
@@ -47,7 +47,7 @@ def hide(input_image_file, message, generator_function):
npixels = width * height
if len(message_bits) > npixels * 3:
- return """Too long message (%s > %s).""" % (len(message_bits), npixels * 3)
+ raise Exception("""The message you want to hide is too long (%s > %s).""" % (len(message_bits), npixels * 3))
generator = getattr(generators, generator_function)()
@@ -180,4 +180,4 @@ if __name__ == '__main__':
with open(options.secret_binary, "w") as f:
f.write(data)
else:
- print secret
\ No newline at end of file
+ print secret
|
Raise an exception if the message to hide is too long. closed #1
|
py
|
diff --git a/splinter/driver/webdriver/chrome.py b/splinter/driver/webdriver/chrome.py
index <HASH>..<HASH> 100644
--- a/splinter/driver/webdriver/chrome.py
+++ b/splinter/driver/webdriver/chrome.py
@@ -23,9 +23,3 @@ class WebDriver(BaseWebDriver):
self._cookie_manager = ChromeCookieManager(self.driver)
super(WebDriver, self).__init__()
-
- def attach_file(self, name, value):
- """
- Chrome doesn't have support for file uploading.
- """
- raise NotImplementedError
|
Recent versions of the chrome driver support the attach_file feature
|
py
|
diff --git a/pyramid_webassets/__init__.py b/pyramid_webassets/__init__.py
index <HASH>..<HASH> 100644
--- a/pyramid_webassets/__init__.py
+++ b/pyramid_webassets/__init__.py
@@ -156,7 +156,6 @@ def includeme(config):
config.add_directive('add_webasset', add_webasset)
config.add_directive('get_webassets_env', get_webassets_env)
config.add_directive('add_webassets_setting', add_setting)
- config.add_static_view(assets_env.url, assets_env.directory)
config.set_request_property(get_webassets_env_from_request,
'webassets_env', reify=True)
config.set_request_property(assets, 'webassets', reify=True)
|
Remove automatic addition of static view, because it potentially leaks source files.
|
py
|
diff --git a/src/hamster/widgets/activityentry.py b/src/hamster/widgets/activityentry.py
index <HASH>..<HASH> 100644
--- a/src/hamster/widgets/activityentry.py
+++ b/src/hamster/widgets/activityentry.py
@@ -637,7 +637,7 @@ class CategoryEntry():
else:
# return whether the entered string is
# anywhere in the first column data
- return key.strip() in self.model.get_value(iter, 0)
+ return key.strip() in self.model.get_value(iter, 0).lower()
def on_action_activated(self, completion, index):
if index == self.unsorted_action_index:
|
Fix completion in categories TreeView Because `key` was being returned in lowercase, matching would fail.
|
py
|
diff --git a/graphql_ws/base.py b/graphql_ws/base.py
index <HASH>..<HASH> 100644
--- a/graphql_ws/base.py
+++ b/graphql_ws/base.py
@@ -105,8 +105,7 @@ class BaseSubscriptionServer(object):
return connection_context.close(1011)
def get_graphql_params(self, connection_context, payload):
- context = payload.get("context") or {}
- context.setdefault("request_context", connection_context.request_context)
+ context = payload.get("context", connection_context.request_context)
return {
"request_string": payload.get("query"),
"variable_values": payload.get("variables"),
|
Add request context directly to the payload rather than a request_context key
|
py
|
diff --git a/jplephem/jplephem/test.py b/jplephem/jplephem/test.py
index <HASH>..<HASH> 100644
--- a/jplephem/jplephem/test.py
+++ b/jplephem/jplephem/test.py
@@ -11,7 +11,10 @@ smaller and more feature-oriented suite can be run with::
import numpy as np
from functools import partial
from jplephem import Ephemeris, DateError
-from unittest import SkipTest, TestCase
+try:
+ from unittest import SkipTest, TestCase
+except ImportError:
+ from unittest2 import SkipTest, TestCase
class Tests(TestCase):
|
Try to make tests importable under Python <I>
|
py
|
diff --git a/bootstrap.py b/bootstrap.py
index <HASH>..<HASH> 100755
--- a/bootstrap.py
+++ b/bootstrap.py
@@ -31,6 +31,7 @@ parser.add_option('--verbose', action='store_true',
help='enable verbose build',)
parser.add_option('--x64', action='store_true',
help='force 64-bit build (Windows)',)
+# TODO: make this --platform to match configure.py.
parser.add_option('--windows', action='store_true',
help='force native Windows build (when using Cygwin Python)',
default=sys.platform.startswith('win32'))
|
add a TODO from a pull request
|
py
|
diff --git a/pyvisa/thirdparty/prettytable.py b/pyvisa/thirdparty/prettytable.py
index <HASH>..<HASH> 100644
--- a/pyvisa/thirdparty/prettytable.py
+++ b/pyvisa/thirdparty/prettytable.py
@@ -41,8 +41,6 @@ import sys
import textwrap
import unicodedata
-import pkg_resources
-
__version__ = "0.7.3.dev.43cdb910a6fbee396e1fceb76a7775fa7314ee1d"
py3k = sys.version_info[0] >= 3
if py3k:
@@ -51,15 +49,16 @@ if py3k:
itermap = map
iterzip = zip
uni_chr = chr
- from html.parser import HTMLParser
from html import escape
+ from html.parser import HTMLParser
else:
itermap = itertools.imap
iterzip = itertools.izip
uni_chr = unichr # noqa: F821
- from HTMLParser import HTMLParser
from cgi import escape
+ from HTMLParser import HTMLParser
+
# hrule styles
FRAME = 0
ALL = 1
|
do not import pkg_resources in prettytable (it is unused !)
|
py
|
diff --git a/MAVProxy/modules/mavproxy_battery.py b/MAVProxy/modules/mavproxy_battery.py
index <HASH>..<HASH> 100644
--- a/MAVProxy/modules/mavproxy_battery.py
+++ b/MAVProxy/modules/mavproxy_battery.py
@@ -32,7 +32,7 @@ class BatteryModule(mp_module.MPModule):
MPSetting('servowarn', float, 4.3, 'Servo voltage warning level'))
self.settings.append(
MPSetting('vccwarn', float, 4.3, 'Vcc voltage warning level'))
- self.settings.append(MPSetting('numcells', int, 0, range=(0,10), increment=1))
+ self.settings.append(MPSetting('numcells', int, 0, range=(0,50), increment=1))
self.battery_period = mavutil.periodic_event(5)
def cmd_bat(self, args):
|
battery: increase max numcells
|
py
|
diff --git a/wallace/models.py b/wallace/models.py
index <HASH>..<HASH> 100644
--- a/wallace/models.py
+++ b/wallace/models.py
@@ -175,6 +175,8 @@ class Node(Base):
if isinstance(other_node, list):
for node in other_node:
self.connect_to(node)
+ elif self.has_connection_to(other_node):
+ print "Warning! {} is already connected to {}, cannot make another vector without killing the old one.".format(self, other_node)
elif self == other_node:
raise(ValueError("{} cannot connect to itself.".format(self)))
elif isinstance(other_node, Source):
|
can't connect twice to the same node
|
py
|
diff --git a/src/python/twitter/pants/targets/python_tests.py b/src/python/twitter/pants/targets/python_tests.py
index <HASH>..<HASH> 100644
--- a/src/python/twitter/pants/targets/python_tests.py
+++ b/src/python/twitter/pants/targets/python_tests.py
@@ -30,11 +30,11 @@ class PythonTests(PythonTarget):
soft_dependencies: Whether or not we should ignore dependency resolution
errors for this test. [Default: False]
"""
- self.add_label('python')
- self.add_label('tests')
self._timeout = timeout
self._soft_dependencies = bool(soft_dependencies)
PythonTarget.__init__(self, name, sources, resources, dependencies)
+ self.add_label('python')
+ self.add_label('tests')
@property
def timeout(self):
|
Allow annotations on thrift typedefs. I have made the corresponding change in the canonical thrift compiler: <URL>
|
py
|
diff --git a/datajoint/version.py b/datajoint/version.py
index <HASH>..<HASH> 100644
--- a/datajoint/version.py
+++ b/datajoint/version.py
@@ -1 +1 @@
-__version__ = "0.12.dev3"
+__version__ = "0.12.dev4"
|
increment release version to <I>.dev4
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -35,7 +35,7 @@ setup(
maintainer_email="[email protected]",
url="http://www.xhtml2pdf.com",
keywords="PDF, HTML, XHTML, XML, CSS",
- install_requires = ["html5lib", "pyPdf2", "Pillow", "reportlab"],
+ install_requires = ["html5lib", "pyPdf2", "Pillow", "reportlab>=2.2,<3.0"],
include_package_data=True,
packages=find_packages(exclude=["tests", "tests.*"]),
# test_suite = "tests", They're not even working yet
|
Quick fix to limit reportlab version range. Version <I>, if installed, will not be detected by xhtml2pdf, so define the reportlab dependency as a range (>=<I>, <<I>).
|
py
|
diff --git a/spyder/utils/iofuncs.py b/spyder/utils/iofuncs.py
index <HASH>..<HASH> 100644
--- a/spyder/utils/iofuncs.py
+++ b/spyder/utils/iofuncs.py
@@ -1,6 +1,6 @@
# -*- coding:utf-8 -*-
#
-# Copyright © The Spyder Development Team
+# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
|
Change copyright to the "Spyder Project Contributors" - This follows recent discussions about the same topic on the Jupyter project.
|
py
|
diff --git a/pyemma/msm/estimators/maximum_likelihood_msm.py b/pyemma/msm/estimators/maximum_likelihood_msm.py
index <HASH>..<HASH> 100644
--- a/pyemma/msm/estimators/maximum_likelihood_msm.py
+++ b/pyemma/msm/estimators/maximum_likelihood_msm.py
@@ -187,8 +187,9 @@ class _MSMEstimator(_Estimator, _MSM):
'Consider using sparse=True.'.format(nstates=dtrajstats.nstates))
# count lagged
+ show_progress = getattr(self, 'show_progress', False)
dtrajstats.count_lagged(self.lag, count_mode=self.count_mode,
- mincount_connectivity=self.mincount_connectivity)
+ mincount_connectivity=self.mincount_connectivity, show_progress=show_progress)
# for other statistics
return dtrajstats
|
[msm_estimator-base] pass show_progress attribute to count function defaults to False, in case there is no such attribute.
|
py
|
diff --git a/engine/handler.py b/engine/handler.py
index <HASH>..<HASH> 100644
--- a/engine/handler.py
+++ b/engine/handler.py
@@ -14,14 +14,6 @@ import importlib
class Handler(tornado.web.RequestHandler):
- _instance = None
-
- def __new__(cls, *args, **kwargs):
- if not cls._instance:
- cls._instance = super(Handler, cls).__new__(cls, *args)
-
- return cls._instance
-
def __init__(self, application, request, **kwargs):
super(Handler, self).__init__(application, request, **kwargs)
|
Handler class does not suited for singleton object.
|
py
|
diff --git a/dfttopif/parsers/abinit.py b/dfttopif/parsers/abinit.py
index <HASH>..<HASH> 100644
--- a/dfttopif/parsers/abinit.py
+++ b/dfttopif/parsers/abinit.py
@@ -4,6 +4,7 @@ import glob
from ase.calculators.abinit import Abinit
from pypif.obj.common import Value, Property, Scalar
+
class AbinitParser(DFTParser):
'''
Parser for ABINIT calculations
@@ -12,6 +13,7 @@ class AbinitParser(DFTParser):
def __init__(self, directory):
# Check whether any file has as name ABINIT in the file in the first two lines
+ super(AbinitParser, self).__init__(directory)
files = [f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))]
is_abinit = False
for f in files:
|
Fixed constructor for Abinit Missing call to superclass
|
py
|
diff --git a/salt/modules/network.py b/salt/modules/network.py
index <HASH>..<HASH> 100644
--- a/salt/modules/network.py
+++ b/salt/modules/network.py
@@ -896,7 +896,7 @@ def ip_addrs6(interface=None, include_loopback=False, cidr=None):
addrs = salt.utils.network.ip_addrs6(interface=interface,
include_loopback=include_loopback)
if cidr:
- return [i for i in addrs if salt.utils.network.ip_in_subnet(cidr, [i])]
+ return [i for i in addrs if salt.utils.network.in_subnet(cidr, [i])]
else:
return addrs
|
Fix the 'cidr' arg in salt.modules.network.ip_addrs6() The two arguments to ip_in_subnet() should be swapped around, but since ip_in_subnet() is being deprecated in favor of in_subnet(), removing three characters seemed like the most appropiate bugfix. This fixes #<I>
|
py
|
diff --git a/openquake/nrml/__init__.py b/openquake/nrml/__init__.py
index <HASH>..<HASH> 100644
--- a/openquake/nrml/__init__.py
+++ b/openquake/nrml/__init__.py
@@ -28,4 +28,4 @@ def nrml_schema_file():
"""Returns the absolute path to the NRML schema file"""
return os.path.join(
os.path.abspath(os.path.dirname(__file__)),
- 'schema', NRML_SCHEMA_FILE)
+ 'schema', '0.2', NRML_SCHEMA_FILE)
|
Finished moving everything to the <I> folder (and all associated references)
|
py
|
diff --git a/sphinxcontrib/openapi/openapi30.py b/sphinxcontrib/openapi/openapi30.py
index <HASH>..<HASH> 100644
--- a/sphinxcontrib/openapi/openapi30.py
+++ b/sphinxcontrib/openapi/openapi30.py
@@ -371,8 +371,9 @@ def openapihttpdomain(spec, **options):
# https://github.com/OAI/OpenAPI-Specification/blob/3.0.2/versions/3.0.0.md#paths-object
if 'group' in options:
- groups = collections.OrderedDict()
- groups.update({x['name']: [] for x in spec.get('tags', {})})
+ groups = collections.OrderedDict(
+ [(x['name'], []) for x in spec.get('tags', {})]
+ )
for endpoint in options.get('paths', spec['paths']):
for method, properties in spec['paths'][endpoint].items():
|
Fix losing the original order of tag definition
|
py
|
diff --git a/haystack_panel/__init__.py b/haystack_panel/__init__.py
index <HASH>..<HASH> 100644
--- a/haystack_panel/__init__.py
+++ b/haystack_panel/__init__.py
@@ -4,7 +4,7 @@
haystack_panel
~~~~~~~~~~~~~~
-:copyright: (c) 2012 by Chris Streeter.
+:copyright: (c) 2014 by Chris Streeter.
:license: See LICENSE for more details.
"""
@@ -19,6 +19,6 @@ except Exception, e:
__title__ = 'haystack_panel'
__author__ = 'Chris Streeter'
-__copyright__ = 'Copyright 2012 Chris Streter'
+__copyright__ = 'Copyright 2014 Chris Streeter'
VERSION = __version__
|
Update copyright date and fix my name
|
py
|
diff --git a/adventure/__main__.py b/adventure/__main__.py
index <HASH>..<HASH> 100644
--- a/adventure/__main__.py
+++ b/adventure/__main__.py
@@ -14,12 +14,19 @@ def baudout(s):
stdout.write(c)
stdout.flush()
-game = Game()
-load_advent_dat(game)
-game.start()
-baudout(game.output)
-while not game.is_finished:
- line = input('> ')
- words = re.findall(r'\w+', line)
- if words:
- baudout(game.do_command(words))
+def loop():
+ game = Game()
+ load_advent_dat(game)
+ game.start()
+ baudout(game.output)
+
+ while not game.is_finished:
+ line = input('> ')
+ words = re.findall(r'\w+', line)
+ if words:
+ baudout(game.do_command(words))
+
+try:
+ loop()
+except EOFError:
+ pass
|
Made it possible to cleanly exit the custom prompt with control-D.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,8 @@ setup(name='pyinter',
description="An interval package which deals with open, closed or half open intervals.",
long_description="""\
Another Python package with deals with interval arithmetic, this one hopes to be useful.""",
- classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
+ classifiers=['Development Status :: 3 - Alpha',
+ 'License :: OSI Approved :: MIT License'], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='interval range discontinous-range union intersection',
author='Inti Ocean',
author_email='[email protected]',
|
Add classifiers to the setup.cfg
|
py
|
diff --git a/great_expectations/render/renderer/content_block/validation_results_table_content_block.py b/great_expectations/render/renderer/content_block/validation_results_table_content_block.py
index <HASH>..<HASH> 100644
--- a/great_expectations/render/renderer/content_block/validation_results_table_content_block.py
+++ b/great_expectations/render/renderer/content_block/validation_results_table_content_block.py
@@ -381,6 +381,8 @@ class ValidationResultsTableContentBlockRenderer(ExpectationStringRenderer):
)
except KeyError:
return "unknown % null"
+ except TypeError:
+ return "NaN% null"
elif expectation_type == "expect_column_values_to_not_be_null":
try:
null_percent = result["unexpected_percent"]
@@ -390,6 +392,8 @@ class ValidationResultsTableContentBlockRenderer(ExpectationStringRenderer):
)
except KeyError:
return "unknown % not null"
+ except TypeError:
+ return "NaN% not null"
elif result.get("unexpected_percent") is not None:
return (
num_to_str(result.get("unexpected_percent"), precision=5)
|
[ENHANCEMENT] Show NaN when null percentage is undefined (#<I>)
|
py
|
diff --git a/ldapdb/backends/ldap/base.py b/ldapdb/backends/ldap/base.py
index <HASH>..<HASH> 100644
--- a/ldapdb/backends/ldap/base.py
+++ b/ldapdb/backends/ldap/base.py
@@ -95,7 +95,7 @@ class DatabaseWrapper(BaseDatabaseWrapper):
if self.connection is None:
self.connection = ldap.initialize(self.settings_dict['NAME'])
- options = self.settings_dict.get('CONNECTION_OPTIONS', [])
+ options = self.settings_dict.get('CONNECTION_OPTIONS', {})
for opt, value in options.items():
self.connection.set_option(opt, value)
|
CONNECTION_OPTIONS is a dict, not a list
|
py
|
diff --git a/system_tests/test_default.py b/system_tests/test_default.py
index <HASH>..<HASH> 100644
--- a/system_tests/test_default.py
+++ b/system_tests/test_default.py
@@ -24,7 +24,5 @@ def test_application_default_credentials(verify_refresh):
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
- else:
- assert project_id is None
verify_refresh(credentials)
|
Fix system tests when running on GCE The new project ID logic for Cloud SDK invokes Cloud SDK directly. Cloud SDK helpfully falls back to the GCE project ID if the project ID is unset in the configuration. This breaks one of our previous expectations.
|
py
|
diff --git a/pymux/key_mappings.py b/pymux/key_mappings.py
index <HASH>..<HASH> 100644
--- a/pymux/key_mappings.py
+++ b/pymux/key_mappings.py
@@ -57,6 +57,11 @@ def prompt_toolkit_key_to_vt100_key(key, application_mode=False):
Keys.Down: '\x1bOB',
}
+ if key == Keys.ControlJ:
+ # Required for redis-cli. This can be removed when prompt_toolkit stops
+ # replacing \r by \n.
+ return '\r'
+
if key == '\n':
return '\r'
|
Send \r instead of \n to the application when enter has been pressed.
|
py
|
diff --git a/api.py b/api.py
index <HASH>..<HASH> 100644
--- a/api.py
+++ b/api.py
@@ -95,7 +95,7 @@ class Results(object):
body={
'size': 9999999,
'fields': ['control_number'],
- 'query': self.query
+ 'query': self.body.get("query")
}
)
return intbitset([int(r['_id']) for r in results['hits']['hits']])
|
search: response recids fix * Fixes an exception when calling `Results.recids`. (closes #<I>)
|
py
|
diff --git a/telemetry/telemetry/core/platform/profiler/iprofiler_profiler.py b/telemetry/telemetry/core/platform/profiler/iprofiler_profiler.py
index <HASH>..<HASH> 100644
--- a/telemetry/telemetry/core/platform/profiler/iprofiler_profiler.py
+++ b/telemetry/telemetry/core/platform/profiler/iprofiler_profiler.py
@@ -13,7 +13,10 @@ from telemetry.core.platform import profiler
sys.path.append(os.path.join(
os.path.abspath(os.path.dirname(__file__)), '..', '..', '..', '..', '..',
'..', 'third_party', 'pexpect'))
-import pexpect # pylint: disable=F0401
+try:
+ import pexpect # pylint: disable=F0401
+except ImportError:
+ pass
class IprofilerProfiler(profiler.Profiler):
|
[Telemetry] Win build fix: try-except pexpect import This fixes an import error on windows. BUG=None TEST=moz page cycler on windows NOTRY=True TBR=<EMAIL> Review URL: <URL>
|
py
|
diff --git a/cumulusci/core/keychain.py b/cumulusci/core/keychain.py
index <HASH>..<HASH> 100644
--- a/cumulusci/core/keychain.py
+++ b/cumulusci/core/keychain.py
@@ -63,11 +63,15 @@ class BaseProjectKeychain(BaseConfig):
def create_scratch_org(self, org_name, config_name, days=None):
""" Adds/Updates a scratch org config to the keychain from a named config """
scratch_config = getattr(self.project_config, 'orgs__scratch__{}'.format(config_name))
- scratch_config.setdefault('days', 7)
+ if days is not None:
+ # Allow override of scratch config's default days
+ scratch_config['days'] = days
+ else:
+ # Use scratch config days or default of 1 day
+ scratch_config.setdefault('days', 1)
scratch_config['scratch'] = True
scratch_config.setdefault('namespaced', False)
scratch_config['config_name'] = config_name
- scratch_config['days'] = days
scratch_config['sfdx_alias'] = '{}__{}'.format(
self.project_config.project__name,
org_name,
|
Fix bug in setting days in create_scratch_org
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -35,7 +35,7 @@ requirements = [
# Protocol and data packages
"pytmpdir >= 0.2.3", # A temporary directory, useful for extracting archives to
"txhttputil >= 0.2.7", # Utility class for http requests
- "vortexpy >= 0.10.0", # Data serialisation and transport layer, observable based
+ "vortexpy >= 0.11.0", # Data serialisation and transport layer, observable based
# SOAP interface packages
"SOAPpy-py3 >= 0.52.24", # See http://soappy.ooz.ie for tutorials
|
Updated to vortexpy==<I>
|
py
|
diff --git a/frigg_worker/jobs.py b/frigg_worker/jobs.py
index <HASH>..<HASH> 100644
--- a/frigg_worker/jobs.py
+++ b/frigg_worker/jobs.py
@@ -90,9 +90,8 @@ class Build(object):
if not self.clone_repo():
return self.error('git clone', 'Access denied')
- self.start_services()
-
try:
+ self.start_services()
self.finished = False
self.create_pending_tasks()
for task in self.settings['tasks']:
|
Move start_services within try catch block build_settings throws errors, that needs to be catched
|
py
|
diff --git a/salt/renderers/yaml.py b/salt/renderers/yaml.py
index <HASH>..<HASH> 100644
--- a/salt/renderers/yaml.py
+++ b/salt/renderers/yaml.py
@@ -47,7 +47,7 @@ def render(yaml_data, saltenv='base', sls='', argline='', **kws):
try:
data = load(yaml_data, Loader=get_yaml_loader(argline))
except ScannerError as exc:
- err_type = _ERROR_MAP.get(exc.problem, 'Unknown yaml render error')
+ err_type = _ERROR_MAP.get(exc.problem, exc.problem)
line_num = exc.problem_mark.line + 1
raise SaltRenderError(err_type, line_num, exc.problem_mark.buffer)
except ConstructorError as exc:
|
Always let the real YAML error through The real message is always (probably) going to be more helpful than "Unknown".
|
py
|
diff --git a/pyrax/clouddns.py b/pyrax/clouddns.py
index <HASH>..<HASH> 100644
--- a/pyrax/clouddns.py
+++ b/pyrax/clouddns.py
@@ -971,7 +971,7 @@ class CloudDNSManager(BaseManager):
class CloudDNSClient(BaseClient):
"""
- This is the primary class for interacting with Cloud Databases.
+ This is the primary class for interacting with Cloud DNS.
"""
def _configure_manager(self):
"""
|
Fixed a typo found by joeracker. Issue #<I>.
|
py
|
diff --git a/django_bouncy/utils.py b/django_bouncy/utils.py
index <HASH>..<HASH> 100644
--- a/django_bouncy/utils.py
+++ b/django_bouncy/utils.py
@@ -1,8 +1,5 @@
# -*- coding: utf-8 -*-
"""Utility functions for the django_bouncy app"""
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
try:
import urllib2 as urllib
except ImportError:
|
do not use unicode_literals rather mark as unicode
|
py
|
diff --git a/reservations/reservations.py b/reservations/reservations.py
index <HASH>..<HASH> 100755
--- a/reservations/reservations.py
+++ b/reservations/reservations.py
@@ -66,14 +66,13 @@ if res_posts:
try:
with open(resfile, 'r') as f:
local_data = f.read()
- except:
- with open(resfile, 'w') as f:
- f.write(reservations)
+ except: pass
else:
if local_data.strip() != reservations.strip():
Print("Warning: local file and forum post do not match!")
- Print("Continuing using local data, any new information in the forum post will be lost!")
- Print("Delete the local file to use the forum post as a working base.")
+ Print("Continuing using forum data, local changes are being lost!")
+ with open(resfile, 'w') as f:
+ f.write(reservations)
################################################################################
# Begin class and function definitions, the remaining top-level logic is at the very bottom
|
Ignore any local changes, not configurable as yet
|
py
|
diff --git a/dbaas_zabbix/dbaas_api.py b/dbaas_zabbix/dbaas_api.py
index <HASH>..<HASH> 100644
--- a/dbaas_zabbix/dbaas_api.py
+++ b/dbaas_zabbix/dbaas_api.py
@@ -55,6 +55,8 @@ class DatabaseAsAServiceApi(object):
@property
def hosts(self):
+ if self.using_agent:
+ return []
return list({instance.hostname for instance in self.instances})
@property
@@ -91,3 +93,10 @@ class DatabaseAsAServiceApi(object):
if organization:
return organization.get_grafana_hostgroup_external_org()
return None
+
+ @property
+ def using_agent(self):
+ zabbix_agent = self.credentials.get_parameter_by_name("zabbix_agent")
+ if zabbix_agent.lower == 'true':
+ return True
+ return False
|
do not list hosts when zabbix agent is enabled
|
py
|
diff --git a/django_x509/base/admin.py b/django_x509/base/admin.py
index <HASH>..<HASH> 100644
--- a/django_x509/base/admin.py
+++ b/django_x509/base/admin.py
@@ -3,7 +3,6 @@ from django.conf.urls import url
from django.contrib.admin import ModelAdmin
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, render
-from django.templatetags.static import static
from django.urls import reverse
from django.utils.html import format_html
from django.utils.translation import ngettext
@@ -50,7 +49,7 @@ class BaseAdmin(ModelAdmin):
]
class Media:
- css = {'all': (static('django-x509/css/admin.css'),)}
+ css = {'all': ('django-x509/css/admin.css',)}
def __init__(self, *args, **kwargs):
self.readonly_fields += ('created', 'modified')
|
[fix] Removed static() call from admin media Related to <URL>
|
py
|
diff --git a/cherry_picker/cherry_picker/cherry_picker.py b/cherry_picker/cherry_picker/cherry_picker.py
index <HASH>..<HASH> 100755
--- a/cherry_picker/cherry_picker/cherry_picker.py
+++ b/cherry_picker/cherry_picker/cherry_picker.py
@@ -202,7 +202,7 @@ Co-authored-by: {get_author_info_from_short_sha(self.commit_sha1)}"""
def push_to_remote(self, base_branch, head_branch, commit_message=""):
""" git push <origin> <branchname> """
- cmd = ['git', 'push', self.pr_remote, head_branch]
+ cmd = ['git', 'push', self.pr_remote, f'{head_branch}:{head_branch}']
try:
self.run_cmd(cmd)
except subprocess.CalledProcessError:
|
Explicit push location to ignore user git config (#<I>) Users can configure what strategy "git push" uses to determine which remote branch it should push to. Cherry-picker doesn't work with all of the git push strategies but we can make explicit what the remote branch should be which works around that problem.
|
py
|
diff --git a/bigchaindb/commands/utils.py b/bigchaindb/commands/utils.py
index <HASH>..<HASH> 100644
--- a/bigchaindb/commands/utils.py
+++ b/bigchaindb/commands/utils.py
@@ -198,6 +198,7 @@ base_parser.add_argument('-c', '--config',
'(use "-" for stdout)')
base_parser.add_argument('-l', '--log-level',
+ type=lambda l: l.upper(), # case insensitive conversion
choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
default='INFO',
help='Log level')
|
Treat --log-level argument as case-insensitive
|
py
|
diff --git a/gns3server/modules/vmware/vmware_vm.py b/gns3server/modules/vmware/vmware_vm.py
index <HASH>..<HASH> 100644
--- a/gns3server/modules/vmware/vmware_vm.py
+++ b/gns3server/modules/vmware/vmware_vm.py
@@ -437,7 +437,10 @@ class VMwareVM(BaseVM):
log.debug("enabling remaining adapter {}".format(adapter_number))
self._vmx_pairs["ethernet{}.startconnected".format(adapter_number)] = "TRUE"
- self.manager.write_vmx_file(self._vmx_path, self._vmx_pairs)
+ try:
+ self.manager.write_vmx_file(self._vmx_path, self._vmx_pairs)
+ except OSError as e:
+ raise VMwareError('Could not write VMware VMX file "{}": {}'.format(self._vmx_path, e))
log.info("VMware VM '{name}' [{id}] stopped".format(name=self.name, id=self.id))
|
Catch Permission denied when writing to VMX file while closing VMware VM. Fixes #<I>.
|
py
|
diff --git a/src/argcmdr.py b/src/argcmdr.py
index <HASH>..<HASH> 100644
--- a/src/argcmdr.py
+++ b/src/argcmdr.py
@@ -1,6 +1,7 @@
import argcomplete
import argparse
import collections
+import collections.abc
import enum
import functools
import importlib
@@ -254,7 +255,7 @@ class Command:
args.__parser__.print_usage()
def __getitem__(self, key):
- if isinstance(key, (str, bytes)) or not isinstance(key, collections.Sequence):
+ if isinstance(key, (str, bytes)) or not isinstance(key, collections.abc.Sequence):
return self.__getitem__((key,))
if not key:
|
fix for Python <I>: Sequence moved: collections -> collections.abc (...and copied a while back!)
|
py
|
diff --git a/tests/test_project.py b/tests/test_project.py
index <HASH>..<HASH> 100644
--- a/tests/test_project.py
+++ b/tests/test_project.py
@@ -151,7 +151,6 @@ class TestProject:
def test_delete(self, session, network_with_data):
net = network_with_data
- print session.dirty
project_id = net.project_id
log.info("Purging project %s", project_id)
res = hb.delete_project(project_id, user_id=pytest.root_user_id)
|
Fix print statement in tests causing failures
|
py
|
diff --git a/hpOneView/metrics.py b/hpOneView/metrics.py
index <HASH>..<HASH> 100644
--- a/hpOneView/metrics.py
+++ b/hpOneView/metrics.py
@@ -53,7 +53,7 @@ class metrics(object):
self._con = con
self._activity = activity(con)
- def get_metrics_capabilty(self):
+ def get_metrics_capability(self):
body = self._con.get(uri['metricsCapabilities'])
return body
@@ -61,9 +61,9 @@ class metrics(object):
body = self._con.get(uri['metricsConfiguration'])
return body
- def set_metrics_configuration(self, MetricsConfig, blocking=True,
+ def set_metrics_configuration(self, metrics_config, blocking=True,
verbose=False):
- task, body = self._con.put(uri['metricsConfiguration'], MetricsConfig)
+ task, body = self._con.put(uri['metricsConfiguration'], metrics_config)
if blocking is True:
task = self._activity.wait4task(task, tout=600, verbose=verbose)
return body
|
Update metrics.py Fix typo and variable casing.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -2,16 +2,19 @@
from setuptools import setup
+
+version = '0.4.5'
+
setup(
name='upcloud-api',
- version='0.4.5',
+ version=version,
description='UpCloud API Client',
author='Elias Nygren',
maintainer='Mika Lackman',
maintainer_email='[email protected]',
url='https://github.com/UpCloudLtd/upcloud-python-api',
packages=['upcloud_api', 'upcloud_api.cloud_manager'],
- download_url='https://github.com/UpCloudLtd/upcloud-python-api/archive/0.4.5.tar.gz',
+ download_url='https://github.com/UpCloudLtd/upcloud-python-api/archive/%s.tar.gz' % version,
license='MIT',
install_requires=[
'requests>=2.6.0',
|
setup.py: move version to a variable
|
py
|
diff --git a/polyaxon/scheduler/spawners/tensorboard_spawner.py b/polyaxon/scheduler/spawners/tensorboard_spawner.py
index <HASH>..<HASH> 100644
--- a/polyaxon/scheduler/spawners/tensorboard_spawner.py
+++ b/polyaxon/scheduler/spawners/tensorboard_spawner.py
@@ -76,10 +76,6 @@ class TensorboardSpawner(ProjectJobSpawner):
return volumes, volume_mounts
- @staticmethod
- def fii():
- import json; data = json.loads(open('k.json').read()); content = []; for k in data: content.append('export {}={}'.format(k, data[k])); output = open('somefile.txt', 'w'); output.write('\n'.join(content)); output.close()
-
@classmethod
def get_stores_secrets_command_args(cls, stores_secrets):
"""Create an auth command for S3 and GCS."""
|
Fix tensorboard spawner
|
py
|
diff --git a/pcapfile/protocols/transport/tcp.py b/pcapfile/protocols/transport/tcp.py
index <HASH>..<HASH> 100644
--- a/pcapfile/protocols/transport/tcp.py
+++ b/pcapfile/protocols/transport/tcp.py
@@ -22,9 +22,7 @@ class TCP(ctypes.Structure):
('syn', ctypes.c_bool), # SYN
('fin', ctypes.c_bool), # FIN
('win', ctypes.c_ushort), # window size
- ('sum', ctypes.c_ushort), # checksum
- ('opt', ctypes.c_char_p), # options
- ('payload', ctypes.c_char_p)] # packet payload
+ ('sum', ctypes.c_ushort)] # checksum
tcp_min_header_size = 20
@@ -53,8 +51,8 @@ class TCP(ctypes.Structure):
self.opt = b''
self.payload = b''
else:
- self.opt = ctypes.c_char_p(packet[20:self.data_offset])
- self.payload = ctypes.c_char_p(packet[self.data_offset:])
+ self.opt = packet[20:self.data_offset]
+ self.payload = packet[self.data_offset:]
def __str__(self):
packet = 'tcp %s packet from port %d to port %d carrying %d bytes'
|
Fixed decoding of very short TCP segments
|
py
|
diff --git a/b2handle/tests/main_test_script.py b/b2handle/tests/main_test_script.py
index <HASH>..<HASH> 100644
--- a/b2handle/tests/main_test_script.py
+++ b/b2handle/tests/main_test_script.py
@@ -20,7 +20,7 @@ from handleconnector_access_patched_test import EUDATHandleConnectorAccessPatche
# Logging:
log_b2handle = False
if log_b2handle == True:
- LOGGER = logging.getLogger('b2handle.handleclient')
+ LOGGER = logging.getLogger()
LOGGER.setLevel("DEBUG")
LOGGER.addHandler(
logging.FileHandler(
|
If logging is switched on during tests, all library modules get logged now, not just the client module.
|
py
|
diff --git a/holoviews/operation/element.py b/holoviews/operation/element.py
index <HASH>..<HASH> 100644
--- a/holoviews/operation/element.py
+++ b/holoviews/operation/element.py
@@ -548,16 +548,17 @@ class histogram(Operation):
else:
edges = np.linspace(hist_range[0], hist_range[1], self.p.num_bins + 1)
normed = False if self.p.mean_weighted and self.p.weight_dimension else self.p.normed
- try:
- hist, edges = np.histogram(data[np.isfinite(data)], normed=normed,
- range=hist_range, weights=weights, bins=edges)
+
+ data = data[np.isfinite(data)]
+ if len(data):
+ hist, edges = np.histogram(data, normed=normed, range=hist_range,
+ weights=weights, bins=edges)
if not normed and self.p.weight_dimension and self.p.mean_weighted:
- hist_mean, _ = np.histogram(data[np.isfinite(data)], normed=normed,
+ hist_mean, _ = np.histogram(data, normed=normed,
range=hist_range, bins=self.p.num_bins)
hist /= hist_mean
- except:
+ else:
hist = np.zeros(self.p.num_bins)
-
hist[np.isnan(hist)] = 0
params = {}
|
Suppress warning when data empty in histogram operation (#<I>)
|
py
|
diff --git a/shapefile.py b/shapefile.py
index <HASH>..<HASH> 100644
--- a/shapefile.py
+++ b/shapefile.py
@@ -64,10 +64,10 @@ if PYTHON3:
# Error.
raise Exception('Unknown input type')
- def u(v, encoding='utf-8'):
+ def u(v, encoding='utf-8', encodingErrors='strict'):
if isinstance(v, bytes):
# For python 3 decode bytes to str.
- return v.decode(encoding)
+ return v.decode(encoding, encodingErrors)
elif isinstance(v, str):
# Already str.
return v
@@ -301,6 +301,7 @@ class Reader:
self.fields = []
self.__dbfHdrLength = 0
self.encoding = kwargs.pop('encoding', 'utf-8')
+ self.encodingErrors = kwargs.pop('encodingErrors', 'strict')
# See if a shapefile name was passed as an argument
if len(args) > 0:
if is_string(args[0]):
@@ -635,7 +636,7 @@ class Reader:
value = None # unknown value is set to missing
else:
# anything else is forced to string/unicode
- value = u(value, self.encoding)
+ value = u(value, self.encoding, self.encodingErrors)
value = value.strip()
record.append(value)
return record
|
Added user control of how to handle encodingErrors
|
py
|
diff --git a/seleniumbase/fixtures/base_case.py b/seleniumbase/fixtures/base_case.py
index <HASH>..<HASH> 100755
--- a/seleniumbase/fixtures/base_case.py
+++ b/seleniumbase/fixtures/base_case.py
@@ -4453,10 +4453,10 @@ class BaseCase(unittest.TestCase):
will still be taken after the last step of your tearDown(), where
you should be calling "super(SubClassOfBaseCase, self).tearDown()"
"""
- test_id = self.__get_test_id()
- test_logpath = self.log_path + "/" + test_id
- self.__create_log_path_as_needed(test_logpath)
if self.__has_exception() or self.save_screenshot_after_test:
+ test_id = self.__get_test_id()
+ test_logpath = self.log_path + "/" + test_id
+ self.__create_log_path_as_needed(test_logpath)
self.__set_last_page_screenshot()
if self.is_pytest:
self.__add_pytest_html_extra()
|
Prevent unused log folders from getting created
|
py
|
diff --git a/bika/lims/browser/analysisrequest.py b/bika/lims/browser/analysisrequest.py
index <HASH>..<HASH> 100644
--- a/bika/lims/browser/analysisrequest.py
+++ b/bika/lims/browser/analysisrequest.py
@@ -142,12 +142,16 @@ class AnalysisRequestWorkflowAction(WorkflowAction):
new = ar.setAnalyses(objects.keys(), prices = prices)
# link analyses and partitions
- for service_uid, service in objects.items():
- part_id = form['Partition'][0][service_uid]
- part = sample[part_id]
- analysis = ar[service.getKeyword()]
- analysis.setSamplePartition(part)
- analysis.reindexObject()
+ # If Bika Setup > Analyses > 'Display individual sample
+ # partitions' is checked, no Partitions available.
+ # https://github.com/bikalabs/Bika-LIMS/issues/1030
+ if 'Partition' in form:
+ for service_uid, service in objects.items():
+ part_id = form['Partition'][0][service_uid]
+ part = sample[part_id]
+ analysis = ar[service.getKeyword()]
+ analysis.setSamplePartition(part)
+ analysis.reindexObject()
if new:
ar_state = workflow.getInfoFor(ar, 'review_state')
|
Fix #<I> Error when submitting AR's "Manage Analysis View" when 'Display individual sample partitions' is unchecked in Bika Setup
|
py
|
diff --git a/bonobo/logging.py b/bonobo/logging.py
index <HASH>..<HASH> 100644
--- a/bonobo/logging.py
+++ b/bonobo/logging.py
@@ -16,7 +16,8 @@ def get_format():
yield '{b}][{w}'.join(('%(spent)04d', '%(name)s'))
yield '{b}]'
yield ' %(fg)s%(message)s{r}'
- yield CLEAR_EOL
+ if not iswindows:
+ yield CLEAR_EOL
colors = {
|
[logging] Removes kill-until-eol character on windows platform.
|
py
|
diff --git a/master/buildbot/test/util/steps.py b/master/buildbot/test/util/steps.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/test/util/steps.py
+++ b/master/buildbot/test/util/steps.py
@@ -105,6 +105,9 @@ class BuildStepMixin:
"""
def setUpBuildStep(self):
+ if not hasattr(self, 'reactor'):
+ raise Exception('Reactor has not yet been setup for step')
+
# make an (admittedly global) reference to this test case so that
# the fakes can call back to us
remotecommand.FakeRemoteCommand.testcase = self
|
test: Require fake reactor to be setup when using BuildStepMixin
|
py
|
diff --git a/alerta/app/auth.py b/alerta/app/auth.py
index <HASH>..<HASH> 100644
--- a/alerta/app/auth.py
+++ b/alerta/app/auth.py
@@ -121,7 +121,10 @@ def google():
r = requests.get(people_api_url, headers=headers)
profile = json.loads(r.text)
- token = create_token(profile['sub'], profile['name'], profile['email'], provider='google')
+ try:
+ token = create_token(profile['sub'], profile['name'], profile['email'], provider='google')
+ except KeyError:
+ return jsonify(status="error", message="Google+ API is not enabled for this Client ID")
return jsonify(token=token)
|
catch exception when Google+ API is not enabled
|
py
|
diff --git a/fmn/rules/utils.py b/fmn/rules/utils.py
index <HASH>..<HASH> 100644
--- a/fmn/rules/utils.py
+++ b/fmn/rules/utils.py
@@ -148,7 +148,10 @@ def _get_pkgdb2_packages_for(config, username):
data = req.json()
- packages_of_interest = data['point of contact'] + data['co-maintained']
+ packages_of_interest = \
+ data['point of contact'] + \
+ data['co-maintained'] + \
+ data['watch']
packages_of_interest = set([p['name'] for p in packages_of_interest])
log.debug("done talking with pkgdb2 for now. %0.2fs", time.time() - start)
return packages_of_interest
|
Add watchcommits/watchbugs to the package-ownership fmn rule.
|
py
|
diff --git a/address/models.py b/address/models.py
index <HASH>..<HASH> 100644
--- a/address/models.py
+++ b/address/models.py
@@ -234,16 +234,17 @@ class AddressField(models.ForeignKey):
description = 'An address'
def __init__(self, **kwargs):
- super(AddressField, self).__init__(Address, **kwargs)
+ kwargs['to'] = Address
+ super(AddressField, self).__init__(**kwargs)
def contribute_to_class(self, cls, name, virtual_only=False):
super(ForeignObject, self).contribute_to_class(cls, name, virtual_only=virtual_only)
setattr(cls, self.name, AddressDescriptor(self))
- def deconstruct(self):
- name, path, args, kwargs = super(AddressField, self).deconstruct()
- del kwargs['to']
- return name, path, args, kwargs
+ # def deconstruct(self):
+ # name, path, args, kwargs = super(AddressField, self).deconstruct()
+ # del kwargs['to']
+ # return name, path, args, kwargs
def formfield(self, **kwargs):
from forms import AddressField as AddressFormField
|
Still wrapping my head around the migrations code. An error was reported about failing migrations, the issue is to do with my automatically setting the "to" relation of `AddressField` to the `Address` model. This is a workaround.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -56,6 +56,8 @@ setup(
'apio': [
'commands/*.py',
'managers/*.py',
+ 'resources/ecp5/*',
+ 'resources/ice40/*',
'resources/*'
]
},
|
Fix ecp5 and ice<I> definitions in resources dir
|
py
|
diff --git a/pydsl/Grammar/Symbol.py b/pydsl/Grammar/Symbol.py
index <HASH>..<HASH> 100644
--- a/pydsl/Grammar/Symbol.py
+++ b/pydsl/Grammar/Symbol.py
@@ -139,6 +139,9 @@ class UnknownSymbol(TerminalSymbol):
def __eq__(self, other):
return isinstance(other, UnknownSymbol)
+ def check(self, data):
+ return bool(data)
+
class NullSymbol(Symbol):
def __init__(self):
Symbol.__init__(self, "Null", 100)
|
added check to unknownsymbol
|
py
|
diff --git a/napalm/base/mock.py b/napalm/base/mock.py
index <HASH>..<HASH> 100644
--- a/napalm/base/mock.py
+++ b/napalm/base/mock.py
@@ -86,11 +86,12 @@ class MockDevice(object):
self.profile = profile
def run_commands(self, commands):
- """Only useful for EOS"""
- if "eos" in self.profile:
- return list(self.parent.cli(commands).values())[0]
- else:
- raise AttributeError("MockedDriver instance has not attribute '_rpc'")
+ """Mock for EOS"""
+ return list(self.parent.cli(commands).values())[0]
+
+ def show(self, command):
+ """Mock for nxos"""
+ return self.run_commands([command])
class MockDriver(NetworkDriver):
|
Added mocking method for nxos
|
py
|
diff --git a/firestore/tests/unit/test__helpers.py b/firestore/tests/unit/test__helpers.py
index <HASH>..<HASH> 100644
--- a/firestore/tests/unit/test__helpers.py
+++ b/firestore/tests/unit/test__helpers.py
@@ -14,6 +14,7 @@
import collections
import datetime
+import sys
import unittest
import mock
@@ -673,6 +674,8 @@ class Test_decode_value(unittest.TestCase):
value = _value_pb(double_value=float_val)
self.assertEqual(self._call_fut(value), float_val)
+ @unittest.skipIf((3,) <= sys.version_info < (3,4,4),
+ 'known datetime bug (bpo-23517) in Python')
def test_datetime(self):
from google.protobuf import timestamp_pb2
from google.cloud._helpers import UTC
@@ -811,6 +814,8 @@ class Test_decode_dict(unittest.TestCase):
return decode_dict(value_fields, client)
+ @unittest.skipIf((3,) <= sys.version_info < (3,4,4),
+ 'known datetime bug (bpo-23517) in Python')
def test_many_types(self):
from google.protobuf import struct_pb2
from google.protobuf import timestamp_pb2
|
Skip tests that hit known bug in Python pre-<I> interpreters. (#<I>)
|
py
|
diff --git a/pyvisa/constants.py b/pyvisa/constants.py
index <HASH>..<HASH> 100644
--- a/pyvisa/constants.py
+++ b/pyvisa/constants.py
@@ -1077,7 +1077,7 @@ class WireMode(enum.IntEnum):
@enum.unique
-class ControlFlow(enum.IntEnum):
+class ControlFlow(enum.IntFlag):
"""Control flow for a serial resource."""
none = VI_ASRL_FLOW_NONE
|
Update IntEnum to IntFlag for ControlFlow Per suggest in pyvisa-py issue #<I>, updating ControlFlow enum from IntEnum to IntFlag.
|
py
|
diff --git a/tests/loader_test.py b/tests/loader_test.py
index <HASH>..<HASH> 100644
--- a/tests/loader_test.py
+++ b/tests/loader_test.py
@@ -32,6 +32,9 @@ class LoaderTestCase(object):
yield
@pytest.fixture(autouse=True)
+ def content_to_file(self):
+ self.write_content_to_file()
+
def write_content_to_file(self, content=None):
content = content or self.content
if not content:
|
stop calling fixture as a function by refactoring it into a fixture that calls a function and can be separately called
|
py
|
diff --git a/pyqode/core/api/encodings.py b/pyqode/core/api/encodings.py
index <HASH>..<HASH> 100644
--- a/pyqode/core/api/encodings.py
+++ b/pyqode/core/api/encodings.py
@@ -102,6 +102,9 @@ def convert_to_codec_key(value):
:param value: value to convert.
"""
+ if not value:
+ # fallback to utf-8
+ value = 'UTF-8'
# UTF-8 -> utf_8
converted = value.replace('-', '_').lower()
# fix some corner cases, see https://github.com/pyQode/pyQode/issues/11
|
Fallback to utf-8 if value is None Fix issue #<I>
|
py
|
diff --git a/tests_dweepy.py b/tests_dweepy.py
index <HASH>..<HASH> 100644
--- a/tests_dweepy.py
+++ b/tests_dweepy.py
@@ -37,6 +37,24 @@ def check_valid_get_response(testcase, dweets):
class BaseTestCase(unittest.TestCase):
+ def assertDictEqual(self, a, b, *args, **kwargs):
+ """Python < v2.7 compatibility. Assert 'a' > 'b'"""
+ try:
+ f = super(BaseTestCase, self).assertDictEqual
+ except AttributeError:
+ self.assertTrue(a == b, *args, **kwargs)
+ else:
+ f(a, b, *args, **kwargs)
+
+ def assertGreater(self, a, b, *args, **kwargs):
+ """Python < v2.7 compatibility. Assert 'a' > 'b'"""
+ try:
+ f = super(BaseTestCase, self).assertGreater
+ except AttributeError:
+ self.assertTrue(a > b, *args, **kwargs)
+ else:
+ f(a, b, *args, **kwargs)
+
def assertIn(self, a, b, *args, **kwargs):
"""Python < v2.7 compatibility. Assert 'a' in 'b'"""
try:
|
more shims for python <I> (we'll get there, i promise)
|
py
|
diff --git a/semantic_release/__init__.py b/semantic_release/__init__.py
index <HASH>..<HASH> 100644
--- a/semantic_release/__init__.py
+++ b/semantic_release/__init__.py
@@ -1,6 +1,6 @@
"""Semantic Release
"""
-__version__ = "6.0.1"
+__version__ = "6.1.0"
from .errors import UnknownCommitMessageStyleError # noqa; noqa
|
<I> Automatically generated by python-semantic-release
|
py
|
diff --git a/blockstack/atlas.py b/blockstack/atlas.py
index <HASH>..<HASH> 100644
--- a/blockstack/atlas.py
+++ b/blockstack/atlas.py
@@ -70,7 +70,7 @@ PEER_CRAWL_NEIGHBOR_WORK_INTERVAL = 300 # minimum amount of time (seconds) t
PEER_HEALTH_NEIGHBOR_WORK_INTERVAL = 1 # minimum amount of time (seconds) that must pass between randomly pinging someone
PEER_CRAWL_ZONEFILE_WORK_INTERVAL = 300 # minimum amount of time (seconds) that must pass between two zonefile crawls
PEER_PUSH_ZONEFILE_WORK_INTERVAL = 300 # minimum amount of time (seconds) that must pass between two zonefile pushes
-PEER_CRAWL_ZONEFILE_STORAGE_RETRY_INTERVAL = 3600 * 12 # retry storage for missing zonefiles every 12 hours
+PEER_CRAWL_ZONEFILE_STORAGE_RETRY_INTERVAL = 3600 * 2 # retry storage for missing zonefiles every 2 hours
NUM_NEIGHBORS = 80 # number of neighbors a peer can report
|
retry storage every 2 hours, not <I>
|
py
|
diff --git a/troposphere/ecs.py b/troposphere/ecs.py
index <HASH>..<HASH> 100644
--- a/troposphere/ecs.py
+++ b/troposphere/ecs.py
@@ -102,7 +102,7 @@ class DeploymentCircuitBreaker(AWSProperty):
"""
props = {
"Enable": (boolean, True),
- "RollBack": (boolean, True)
+ "Rollback": (boolean, True)
}
|
Fix typo in ECS DeploymentCircuitBreaker RollBack => Rollback (Fixes #<I>)
|
py
|
diff --git a/src/cobra/sampling/optgp.py b/src/cobra/sampling/optgp.py
index <HASH>..<HASH> 100644
--- a/src/cobra/sampling/optgp.py
+++ b/src/cobra/sampling/optgp.py
@@ -214,12 +214,8 @@ class OptGPSampler(HRSampler):
# limit errors, something weird going on with multiprocessing
args = list(zip([n_process] * self.processes, range(self.processes)))
- # No with statement or starmap here since Python 2.x
- # does not support it :(
- mp = Pool(self.processes, initializer=mp_init, initargs=(self,))
- results = mp.map(_sample_chain, args, chunksize=1)
- mp.close()
- mp.join()
+ with Pool(self.processes, initializer=mp_init, initargs=(self,)) as pool:
+ results = pool.map(_sample_chain, args, chunksize=1)
chains = np.vstack([r[1] for r in results])
self.retries += sum(r[0] for r in results)
|
refactor: use context manager for multiprocessing.Pool in optgp.py
|
py
|
diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py
index <HASH>..<HASH> 100644
--- a/satpy/tests/modifier_tests/test_parallax.py
+++ b/satpy/tests/modifier_tests/test_parallax.py
@@ -251,12 +251,10 @@ def xfail_selected_clearsky_combis(request):
This fixture helps to mark only those combinations as failing.
"""
# solution inspired by https://stackoverflow.com/q/64349115/974555
- ar_lat = request.getfixturevalue("ar_lat")
resolution = request.getfixturevalue("resolution")
resampler = request.getfixturevalue("resampler")
if (resampler.__name__ == "resample_bilinear" and
- ar_lat == 40 and
math.isclose(resolution, 0.01)):
request.node.add_marker(pytest.mark.xfail(
reason="parallax correction may fail with bilinear"))
|
Mark one more case as xfail On different architecture another variant of bilinear fails, such as on GitHub. Mark as fail because bilinear fails anyway.
|
py
|
diff --git a/urbansim/developer/tests/test_developer.py b/urbansim/developer/tests/test_developer.py
index <HASH>..<HASH> 100644
--- a/urbansim/developer/tests/test_developer.py
+++ b/urbansim/developer/tests/test_developer.py
@@ -32,14 +32,15 @@ def test_developer(simple_dev_inputs):
current_units)
assert len(bldgs) == 1
- bldgs = dev.pick(["residential", "residential"], target_units, parcel_size, ave_unit_size,
- current_units)
- assert len(bldgs) == 1
+ # bldgs = dev.pick(["residential", "office"], target_units,
+ # parcel_size, ave_unit_size, current_units)
+ # assert len(bldgs) == 1
target_units = 1000
bldgs = dev.pick("residential", target_units, parcel_size, ave_unit_size,
current_units)
- assert len(bldgs) == 1
+ print bldgs
+ assert len(bldgs) == 2
target_units = 2
bldgs = dev.pick("residential", target_units, parcel_size, ave_unit_size,
|
the two tests have been "fixed" the second test was right this time and wrong before and was changed because the default settings changed. the first test is difficult to fix, but it makes sense that Pandas doesn't allow this anymore. I will take a look at it when I get a chance but should probably not spend too much time on it right now.
|
py
|
diff --git a/claripy/frontend.py b/claripy/frontend.py
index <HASH>..<HASH> 100644
--- a/claripy/frontend.py
+++ b/claripy/frontend.py
@@ -333,7 +333,7 @@ class Frontend(ana.Storable):
cached_n = 0
# if there's enough in the cache, return that
- if cached_n >= n or len(cached_results) < cached_n:
+ if cached_n >= n or len(cached_results) < cached_n or len(cached_results) >= n:
return tuple(sorted(cached_results))[:n]
# try to make sure we don't get more of the same
|
fixed a subtle cache issue that led to too many results being returned
|
py
|
diff --git a/test/test_mapper.py b/test/test_mapper.py
index <HASH>..<HASH> 100644
--- a/test/test_mapper.py
+++ b/test/test_mapper.py
@@ -126,8 +126,8 @@ class TestLens():
last_point = data[-1]
for tag, func in options:
lens = mapper.fit_transform(data, projection=tag, scaler=None)
- assert lens[0][0] == func(first_point)
- assert lens[-1][0] == func(last_point)
+ np.testing.assert_almost_equal(lens[0][0], func(first_point))
+ np.testing.assert_almost_equal(lens[-1][0], func(last_point))
def test_lens_size(self):
|
approximate equality for python<I> slightly different result
|
py
|
diff --git a/responses/test_responses.py b/responses/test_responses.py
index <HASH>..<HASH> 100644
--- a/responses/test_responses.py
+++ b/responses/test_responses.py
@@ -907,6 +907,18 @@ def test_response_filebody():
assert_reset()
+def test_use_stream_twice_to_double_raw_io():
+ @responses.activate
+ def run():
+ url = "http://example.com"
+ responses.add(responses.GET, url, body=b"42", stream=True)
+ resp = requests.get(url, stream=True)
+ assert resp.raw.read() == b"42"
+
+ run()
+ assert_reset()
+
+
def test_assert_all_requests_are_fired():
def request_callback(request):
raise BaseException()
|
tests for raw requests, push through #<I> (#<I>)
|
py
|
diff --git a/frojd_fabric/ext/npm.py b/frojd_fabric/ext/npm.py
index <HASH>..<HASH> 100644
--- a/frojd_fabric/ext/npm.py
+++ b/frojd_fabric/ext/npm.py
@@ -1,3 +1,10 @@
+# -*- coding: utf-8 -*-
+
+"""
+frojd_fabric.ext.npm
+-------------------------
+"""
+
from fabric.decorators import task
from fabric.state import env
|
Added file header declaration to npm
|
py
|
diff --git a/assembly/preprocess.py b/assembly/preprocess.py
index <HASH>..<HASH> 100644
--- a/assembly/preprocess.py
+++ b/assembly/preprocess.py
@@ -566,7 +566,7 @@ def correct(args):
p = OptionParser(correct.__doc__ + FastqNamings)
p.add_option("--dir", default="data",
help="Working directory [default: %default]")
- p.add_option("--nofragsdedup", default=False, action="store_true",
+ p.add_option("--fragsdedup", default=False, action="store_true",
help="Don't deduplicate the fragment reads [default: %default]")
p.add_option("--ploidy", default="2", choices=("1", "2"),
help="Ploidy [default: %default]")
@@ -603,8 +603,7 @@ def correct(args):
sh(cmd)
if op.exists(origfastb):
- dedup = not opts.nofragsdedup
- correct_frag(datadir, tag, origfastb, nthreads, dedup=dedup,
+ correct_frag(datadir, tag, origfastb, nthreads, dedup=opts.fragsdedup,
haploidify=haploidify)
origj = datadir + "/{0}_orig".format(tagj)
|
don't run dedup for fragments by default in assembly.preprocess.correct()
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -128,8 +128,8 @@ class PyTest(TestCommand):
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
- errno = pytest.main('treq_kerberos --flake8 ' +
- self.pytest_args)
+ args = 'treq_kerberos --flake8 ' + self.pytest_args
+ errno = pytest.main(args.split())
sys.exit(errno)
|
setup.py: split args to pytest.main The latest versions of pytest (<I>) require a list of strings here, not one single string.
|
py
|
diff --git a/xmantissa/signup.py b/xmantissa/signup.py
index <HASH>..<HASH> 100644
--- a/xmantissa/signup.py
+++ b/xmantissa/signup.py
@@ -58,9 +58,9 @@ class PasswordResetResource(Page):
if req.method == 'POST':
if 'username' in req.args:
- (user,) = req.args['username']
+ user = unicode(req.args['username'][0], 'ascii')
- att = self.original.newAttemptForUser(unicode(user))
+ att = self.original.newAttemptForUser(user)
if self.original.accountByAddress(user) is not None:
self._sendEmail(ctx, att)
else:
@@ -91,7 +91,7 @@ class PasswordResetResource(Page):
'to': attempt.username,
'date': rfc822.formatdate(),
'message-id': smtp.messageid(),
- 'link': 'http://%s:%s/%s/%s' % (host, port, self.prefixURL, attempt.key)}
+ 'link': 'http://%s:%s/%s/%s' % (host, port, self.original.prefixURL, attempt.key)}
_sendEmail('reset@' + host, attempt.username, body)
|
apply patch from #<I> + unicode(..., 'ascii'). fixes password reset. author: moe, reviewer: exarkun. closes #<I>.
|
py
|
diff --git a/src/python/setup.py b/src/python/setup.py
index <HASH>..<HASH> 100644
--- a/src/python/setup.py
+++ b/src/python/setup.py
@@ -173,7 +173,7 @@ if __name__ == '__main__':
package_data={
'turicreate': [
'_cython/*.so', '_cython/*.pyd',
- '*.so', '*.dylib',
+ '*.so', '*.dylib', 'toolkits/*.so',
# macOS visualization
'Turi Create Visualization.app/Contents/*',
|
Added .so to setup.py (#<I>)
|
py
|
diff --git a/pyDigitalWaveTools/vcd/common.py b/pyDigitalWaveTools/vcd/common.py
index <HASH>..<HASH> 100644
--- a/pyDigitalWaveTools/vcd/common.py
+++ b/pyDigitalWaveTools/vcd/common.py
@@ -47,7 +47,13 @@ class VcdVarScope():
buff = []
o = self
while True:
- buff.append(o.name)
+ try:
+ n = o.name
+ except AttributeError:
+ buff.append(repr(o))
+ break
+
+ buff.append(n)
o = o.parent
if o is None:
break
@@ -61,4 +67,3 @@ class VcdVarScope():
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self._getDebugName())
-
|
VcdVarScope fix __repr__ for writer
|
py
|
diff --git a/pyvisa-py/tcpip.py b/pyvisa-py/tcpip.py
index <HASH>..<HASH> 100644
--- a/pyvisa-py/tcpip.py
+++ b/pyvisa-py/tcpip.py
@@ -100,7 +100,7 @@ class TCPIPInstrSession(Session):
flags = vxi11.OP_FLAG_TERMCHAR_SET
term_char = str(term_char).encode('utf-8')[0]
- read_data = b''
+ read_data = bytearray()
end_reason = vxi11.RX_END | vxi11.RX_CHR
@@ -115,7 +115,7 @@ class TCPIPInstrSession(Session):
if error:
return read_data, StatusCode.error_io
- read_data += data
+ read_data.extend(data)
count -= len(data)
if count <= 0:
@@ -124,7 +124,7 @@ class TCPIPInstrSession(Session):
chunk_length = min(count, chunk_length)
- return read_data, status
+ return bytes(read_data), status
def write(self, data):
"""Writes data to device or interface synchronously.
|
VXI<I> protocol performance enhancement
|
py
|
diff --git a/categories/__init__.py b/categories/__init__.py
index <HASH>..<HASH> 100644
--- a/categories/__init__.py
+++ b/categories/__init__.py
@@ -1,18 +1,18 @@
__version_info__ = {
'major': 0,
'minor': 7,
- 'micro': 1,
- 'releaselevel': 'final',
+ 'micro': 2,
+ 'releaselevel': 'beta',
'serial': 1
}
-def get_version():
+def get_version(short=False):
+ assert __version_info__['releaselevel'] in ('alpha', 'beta', 'final')
vers = ["%(major)i.%(minor)i" % __version_info__, ]
-
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
- if __version_info__['releaselevel'] != 'final':
- vers.append('%(releaselevel)s%(serial)i' % __version_info__)
+ if __version_info__['releaselevel'] != 'final' and not short:
+ vers.append('%s%i' % (__version_info__['releaselevel'][0], __version_info__['serial']))
return ''.join(vers)
__version__ = get_version()
|
Updated the get_version function to be PEP <I> compliant and version bump to <I>b1
|
py
|
diff --git a/dvc/command/move.py b/dvc/command/move.py
index <HASH>..<HASH> 100644
--- a/dvc/command/move.py
+++ b/dvc/command/move.py
@@ -16,12 +16,15 @@ class CmdMove(CmdBase):
def add_parser(subparsers, parent_parser):
- MOVE_HELP = 'Move output of DVC file.'
+ description = "Rename or move a data file or a directory that "\
+ "is under DVC control. It renames and modifies "\
+ "the corresponding DVC file to reflect the changes."
+ help = "Rename or move a DVC controlled data file or a directory."
move_parser = subparsers.add_parser(
'move',
parents=[parent_parser],
- description=MOVE_HELP,
- help=MOVE_HELP)
- move_parser.add_argument('src', help='Source.')
- move_parser.add_argument('dst', help='Destination.')
+ description=description,
+ help=help)
+ move_parser.add_argument('src', help='Source path to a data file or directory.')
+ move_parser.add_argument('dst', help='Destination path.')
move_parser.set_defaults(func=CmdMove)
|
dvc move: improve help and description messages
|
py
|
diff --git a/phypno/ioeeg/edf.py b/phypno/ioeeg/edf.py
index <HASH>..<HASH> 100644
--- a/phypno/ioeeg/edf.py
+++ b/phypno/ioeeg/edf.py
@@ -172,7 +172,7 @@ class Edf:
endrec = int(floor(endsam / n_sam_rec[i_chan]))
endsam_rec = int(endsam % n_sam_rec[i_chan])
- dat = empty(shape=(endsam - begsam), dtype='int16')
+ dat = empty(shape=(int(endsam) - int(begsam)), dtype='int16')
i_dat = 0
with open(self.filename, 'rb') as f:
|
convert to int to avoid warnings about casting the wrong type
|
py
|
diff --git a/satpy/tests/reader_tests/__init__.py b/satpy/tests/reader_tests/__init__.py
index <HASH>..<HASH> 100644
--- a/satpy/tests/reader_tests/__init__.py
+++ b/satpy/tests/reader_tests/__init__.py
@@ -40,7 +40,8 @@ from satpy.tests.reader_tests import (test_abi_l1b, test_agri_l1, test_hrit_base
test_fci_l1c_fdhsi, test_tropomi_l2,
test_hsaf_grib, test_abi_l2_nc, test_eum_base,
test_ami_l1b, test_viirs_compact, test_seviri_l2_bufr,
- test_geos_area, test_nwcsaf_msg, test_seviri_l1b_icare)
+ test_geos_area, test_nwcsaf_msg, test_seviri_l1b_icare,
+ test_glm_l2)
if sys.version_info < (2, 7):
import unittest2 as unittest
@@ -103,5 +104,6 @@ def suite():
mysuite.addTests(test_seviri_l2_bufr.suite())
mysuite.addTests(test_nwcsaf_msg.suite())
mysuite.addTests(test_seviri_l1b_icare.suite())
+ mysuite.addTests(test_glm_l2.suite())
return mysuite
|
Update seviri_l1b_icare branch to allow merging with master
|
py
|
diff --git a/pylint/pyreverse/writer.py b/pylint/pyreverse/writer.py
index <HASH>..<HASH> 100644
--- a/pylint/pyreverse/writer.py
+++ b/pylint/pyreverse/writer.py
@@ -35,9 +35,7 @@ class DiagramWriter:
basename = diagram.title.strip().replace(" ", "_")
file_name = f"{basename}.{self.config.output_format}"
if os.path.exists(self.config.output_directory):
- file_name = os.path.join(
- self.config.output_directory,
- file_name)
+ file_name = os.path.join(self.config.output_directory, file_name)
self.set_printer(file_name, basename)
if diagram.TYPE == "class":
self.write_classes(diagram)
|
pyreverse: Add output directory command-line option - Use the formatting of `black` pre-commit hook Issue: #<I>
|
py
|
diff --git a/views/holdingpen.py b/views/holdingpen.py
index <HASH>..<HASH> 100644
--- a/views/holdingpen.py
+++ b/views/holdingpen.py
@@ -16,6 +16,7 @@
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
+
"""
Holding Pen is a web interface overlay for all BibWorkflowObject's.
@@ -149,8 +150,9 @@ def maintable():
tags=json.dumps(tags_to_print))
[email protected]('/<int:objectid>', methods=['GET', 'POST'])
@blueprint.route('/details/<int:objectid>', methods=['GET', 'POST'])
-@register_breadcrumb(blueprint, '.details', _("Record Details"))
+@register_breadcrumb(blueprint, '.details', _("Object Details"))
@login_required
@permission_required(viewholdingpen.name)
def details(objectid):
|
workflows: Holding Pen alternate endpoint * Adds a new alternative endpoint to Holding Pen details pages.
|
py
|
diff --git a/inplaceeditform_extra_fields/adaptors.py b/inplaceeditform_extra_fields/adaptors.py
index <HASH>..<HASH> 100644
--- a/inplaceeditform_extra_fields/adaptors.py
+++ b/inplaceeditform_extra_fields/adaptors.py
@@ -18,4 +18,5 @@
ADAPTOR_INPLACEEDIT = {'auto_fk': 'inplaceeditform_extra_fields.fields.AdaptorAutoCompleteForeingKeyField',
'auto_m2m': 'inplaceeditform_extra_fields.fields.AdaptorAutoCompleteManyToManyField',
'image_thumb': 'inplaceeditform_extra_fields.fields.AdaptorImageThumbnailField',
- 'tiny': 'inplaceeditform_extra_fields.fields.AdaptorTinyMCEField'}
+ 'tiny': 'inplaceeditform_extra_fields.fields.AdaptorTinyMCEField',
+ 'tiny_simple': 'inplaceeditform_extra_fields.fields.AdaptorSimpleTinyMCEField'}
|
Add the new adaptor to adaptors file
|
py
|
diff --git a/example/overlap_example.py b/example/overlap_example.py
index <HASH>..<HASH> 100644
--- a/example/overlap_example.py
+++ b/example/overlap_example.py
@@ -1,7 +1,7 @@
# coding=utf-8
from example.commons import Collector, Faker
from pyecharts import options as opts
-from pyecharts.charts import Bar, Line, Page
+from pyecharts.charts import Bar, Line, Page, Scatter
C = Collector()
@@ -12,23 +12,23 @@ v3 = [2.0, 2.2, 3.3, 4.5, 6.3, 10.2, 20.3, 23.4, 23.0, 16.5, 12.0, 6.2]
@C.funcs
-def overlap_line_scatter() -> Bar:
+def overlap_line_scatter() -> Line:
x = Faker.choose()
- bar = (
- Bar()
+ line = (
+ Line()
.add_xaxis(x)
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
.set_global_opts(title_opts=opts.TitleOpts(title="Overlap-line+scatter"))
)
- line = (
- Line()
+ scatter = (
+ Scatter()
.add_xaxis(x)
.add_yaxis("商家A", Faker.values())
.add_yaxis("商家B", Faker.values())
)
- bar.overlap(line)
- return bar
+ line.overlap(scatter)
+ return line
@C.funcs
|
Update overlap_example.py (#<I>) * Update overlap_example.py Fix The code and description are different in overlap * Update overlap_example.py
|
py
|
diff --git a/glue/pipeline.py b/glue/pipeline.py
index <HASH>..<HASH> 100644
--- a/glue/pipeline.py
+++ b/glue/pipeline.py
@@ -174,6 +174,16 @@ class CondorJob:
"""
self.__options[opt] = value
+ def get_opt( self, opt):
+ """
+ Returns the value associated with the given command line option.
+ Returns None if the option does not exist in the options list.
+ @param opt: command line option
+ """
+ if self.__options.has_key(opt):
+ return self.__options[opt]
+ return None
+
def add_file_opt(self, opt, file):
"""
Add a command line option to the executable. The order that the arguments
|
added function get_opt to CondorJob
|
py
|
diff --git a/peep.py b/peep.py
index <HASH>..<HASH> 100755
--- a/peep.py
+++ b/peep.py
@@ -211,7 +211,7 @@ def version_of_download(filename, package_name):
# Handle github sha tarball downloads.
if is_git_sha(filename):
filename = package_name + '-' + filename
- if not filename.replace('_', '-').startswith(package_name):
+ if not filename.lower().replace('_', '-').startswith(package_name.lower()):
# TODO: Should we replace runs of [^a-zA-Z0-9.], not just _, with -?
give_up(filename, package_name)
return filename[len(package_name) + 1:] # Strip off '-' before version.
|
Package checks are now case-insensitive.
|
py
|
diff --git a/pypot/vrep/__init__.py b/pypot/vrep/__init__.py
index <HASH>..<HASH> 100644
--- a/pypot/vrep/__init__.py
+++ b/pypot/vrep/__init__.py
@@ -25,9 +25,16 @@ class vrep_time():
def __init__(self, vrep_io):
self.io = vrep_io
- def get_time(self):
+ def get_time(self, trial=0):
t = self.io.get_simulation_current_time()
+ if t == 0:
+ sys_time.sleep(.5)
+ return self.get_time(trial + 1)
+
+ if trial > 10:
+ raise EnvironmentError('Could not get current simulation time. Make sure the V-REP simulation is running. And that you have added the "time" child script to your scene.')
+
return t
def sleep(self, t):
|
Raise a clear error when can't retrieve simulation time from V-REP * Not really a fix, but at least a clearer exception is raised.
|
py
|
diff --git a/angr/path.py b/angr/path.py
index <HASH>..<HASH> 100644
--- a/angr/path.py
+++ b/angr/path.py
@@ -242,8 +242,15 @@ class TreeIter(object):
raise IndexError(k)
def count(self, v):
+ """
+ Count occurrences of value v in the entire history. Note that the subclass must implement the __reversed__
+ method, otherwise an exception will be thrown.
+ :param object v: The value to look for
+ :return: The number of occurrences
+ :rtype: int
+ """
ctr = 0
- for item in self:
+ for item in reversed(self):
if item == v:
ctr += 1
return ctr
|
Fix my previous fix according to Andrew's suggestion
|
py
|
diff --git a/kernel_tuner/cuda.py b/kernel_tuner/cuda.py
index <HASH>..<HASH> 100644
--- a/kernel_tuner/cuda.py
+++ b/kernel_tuner/cuda.py
@@ -11,7 +11,7 @@ try:
from pycuda.compiler import DynamicSourceModule
except ImportError:
drv = None
- SourceModule = None
+ DynamicSourceModule = None
class CudaFunctions(object):
|
fix test when pycuda not installed
|
py
|
diff --git a/plenum/client/client.py b/plenum/client/client.py
index <HASH>..<HASH> 100644
--- a/plenum/client/client.py
+++ b/plenum/client/client.py
@@ -136,7 +136,7 @@ class Client(Motor,
if self.nodeReg:
logger.info("Client {} initialized with the following node registry:"
- .format(self.name))
+ .format(self.alias))
lengths = [max(x) for x in zip(*[
(len(name), len(host), len(str(port)))
for name, (host, port) in self.nodeReg.items()])]
@@ -146,7 +146,7 @@ class Client(Motor,
logger.info(fmt.format(name, host, port))
else:
logger.info(
- "Client {} found an empty node registry:".format(self.name))
+ "Client {} found an empty node registry:".format(self.alias))
Motor.__init__(self)
@@ -216,7 +216,7 @@ class Client(Motor,
oldstatus = self.status
if oldstatus in Status.going():
logger.info("{} is already {}, so start has no effect".
- format(self, self.status.name))
+ format(self.alias, self.status.name))
else:
super().start(loop)
self.nodestack.start()
|
changing logs to print alias
|
py
|
diff --git a/src/Python/somoclu/train.py b/src/Python/somoclu/train.py
index <HASH>..<HASH> 100644
--- a/src/Python/somoclu/train.py
+++ b/src/Python/somoclu/train.py
@@ -458,7 +458,7 @@ class Somoclu(object):
self._kernel_type)
def _pca_init(self):
- from sklearn.decomposition import RandomizedPCA
+ from sklearn.decomposition import PCA
coord = np.zeros((self._n_columns*self._n_rows, 2))
for i in range(self._n_columns*self._n_rows):
coord[i, 0] = int(i / self._n_columns)
@@ -467,7 +467,7 @@ class Somoclu(object):
coord = (coord - .5)*2
me = np.mean(self._data, 0)
self.codebook = np.tile(me, (self._n_columns*self._n_rows, 1))
- pca = RandomizedPCA(n_components=2)
+ pca = PCA(n_components=2, svd_solver="randomized")
pca.fit(self._data - me)
eigvec = pca.components_
eigval = pca.explained_variance_
|
Updated to new sk-learn PCA call signature
|
py
|
diff --git a/pyxel/editor/image_editor.py b/pyxel/editor/image_editor.py
index <HASH>..<HASH> 100644
--- a/pyxel/editor/image_editor.py
+++ b/pyxel/editor/image_editor.py
@@ -38,7 +38,7 @@ class EditWindow(Widget):
def _draw_line(self, x1, y1, x2, y2, col):
if x1 == x2 and y1 == y2:
- if x1 >= 0 and x1 < 16 and y1 >= 0 and y1 <= 16:
+ if x1 >= 0 and x1 < 16 and y1 >= 0 and y1 < 16:
self._canvas[y1, x1] = col
return
|
Fixed the line function for the image editor
|
py
|
diff --git a/openupgradelib/openupgrade_merge_records.py b/openupgradelib/openupgrade_merge_records.py
index <HASH>..<HASH> 100644
--- a/openupgradelib/openupgrade_merge_records.py
+++ b/openupgradelib/openupgrade_merge_records.py
@@ -332,6 +332,7 @@ def _adjust_merged_values_orm(env, model_name, record_ids, target_record_id,
elif field.type in ('binary', 'many2one'):
op = op or 'merge'
if op == 'merge':
+ l = [x for x in l if x]
if not getattr(target_record, field.name) and l and not \
vals.get(field.name):
vals[field.name] = l[:1]
|
[FIX] merge_records: ensure value in merging many2one and binary types
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.