diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
---|---|---|
diff --git a/pylint/checkers/imports.py b/pylint/checkers/imports.py
index <HASH>..<HASH> 100644
--- a/pylint/checkers/imports.py
+++ b/pylint/checkers/imports.py
@@ -337,18 +337,14 @@ given file (report RP0402 must not be disabled)'}
# check imports are grouped by category (standard, 3rd party, local)
std_imports, ext_imports, loc_imports = self._check_imports_order(node)
# check imports are grouped by package within a given category
- for imports in (std_imports, ext_imports, loc_imports):
- packages = []
- for imp in imports:
- if packages and packages[-1] == imp[1]:
- continue
- # check if an import from the same package has already been made
- for package in packages:
- if imp[1] == package:
- self.add_message('ungrouped-imports', node=imp[0],
- args=package)
- break
- packages.append(imp[1])
+ met = set()
+ curr_package = None
+ for imp in std_imports + ext_imports + loc_imports:
+ package, _, _ = imp[1].partition('.')
+ if curr_package and curr_package != package and package in met:
+ self.add_message('ungrouped-imports', node=imp[0], args=package)
+ curr_package = package
+ met.add(package)
self._imports_stack = []
self._first_non_import_node = None
|
Simplify a bit the algorithm checking for ungrouped imports related to issue #<I>
|
py
|
diff --git a/matplotlib2tikz.py b/matplotlib2tikz.py
index <HASH>..<HASH> 100644
--- a/matplotlib2tikz.py
+++ b/matplotlib2tikz.py
@@ -145,7 +145,8 @@ def save( filepath,
import codecs
file_handle = codecs.open(filepath, 'w', encoding)
- print(file_handle.encoding)
+ if show_info:
+ print('file encoding: {0}'.format(file_handle.encoding))
# gather the file content
data, content = _handle_children( data, mpl.pyplot.gcf() )
@@ -181,7 +182,8 @@ def save( filepath,
file_handle.close()
# print message about necessary pgfplot libs to command line
- _print_pgfplot_libs_message( data )
+ if show_info:
+ _print_pgfplot_libs_message( data )
return
# ==============================================================================
def _tex_comment( comment ):
|
Only print info when show_info==True The messages concerning encoding and libs are not printed anymore when show_info==False. This prevents pollution of output, e.g. when using tikz_save in an ipython notebook.
|
py
|
diff --git a/build.py b/build.py
index <HASH>..<HASH> 100755
--- a/build.py
+++ b/build.py
@@ -298,7 +298,10 @@ def build_check_requires_timestamp(t):
if m:
require_linenos[m.group(1)] = lineno
continue
- for lineno, line in _strip_comments(lines):
+ ignore_linenos = require_linenos.values()
+ for lineno, line in enumerate(lines):
+ if lineno in ignore_linenos:
+ continue
for require in require_linenos.iterkeys():
if require in line:
uses.add(require)
|
Fix check for unused goog.require directives
|
py
|
diff --git a/scigraph.py b/scigraph.py
index <HASH>..<HASH> 100755
--- a/scigraph.py
+++ b/scigraph.py
@@ -16,6 +16,7 @@ class restService:
self._session.mount('http://', adapter)
if cache:
+ print('WARNING: cache enabled, if you mutate the contents of return values you will mutate the cache!')
self._cache = dict()
self._get = self._cache_get
else:
@@ -353,6 +354,17 @@ class State:
def dolist(self, list_):
blocks = []
+ def sortkey(d):
+ if 'path' in d:
+ return d['path']
+ elif 'nickname' in d:
+ return d['nickname']
+ elif 'name' in d:
+ return d['name']
+ else:
+ return 0
+
+ list_.sort(key=sortkey)
for dict_ in list_:
code = self.dodict(dict_)
blocks.append(code)
|
scigraph added warning to cache and list sorting Added warning that the values returned from the cache are mutable. Added a sort key for lists in the codegen so that the ordering is consistent between versions.
|
py
|
diff --git a/torchtext/data/example.py b/torchtext/data/example.py
index <HASH>..<HASH> 100644
--- a/torchtext/data/example.py
+++ b/torchtext/data/example.py
@@ -28,14 +28,12 @@ class Example(object):
@classmethod
def fromTSV(cls, data, fields):
- if data[-1] == '\n':
- data = data[:-1]
+ data = data.rstrip("\n")
return cls.fromlist(data.split('\t'), fields)
@classmethod
def fromCSV(cls, data, fields):
- if data[-1] == '\n':
- data = data[:-1]
+ data = data.rstrip("\n")
# If Python 2, encode to utf-8 since CSV doesn't take unicode input
if six.PY2:
data = data.encode('utf-8')
|
Use rstrip instead of slicing when making examples from lines
|
py
|
diff --git a/src/AppiumLibrary/keywords/_applicationmanagement.py b/src/AppiumLibrary/keywords/_applicationmanagement.py
index <HASH>..<HASH> 100644
--- a/src/AppiumLibrary/keywords/_applicationmanagement.py
+++ b/src/AppiumLibrary/keywords/_applicationmanagement.py
@@ -2,6 +2,7 @@
import os
import robot
+import inspect
from appium import webdriver
from AppiumLibrary.utils import ApplicationCache
from .keywordgroup import KeywordGroup
@@ -136,13 +137,14 @@ class _ApplicationManagementKeywords(KeywordGroup):
The `loglevel` argument defines the used log level. Valid log levels are
`WARN`, `INFO` (default), `DEBUG`, `TRACE` and `NONE` (no logging).
"""
- ll = loglevel.upper()
- if ll == 'NONE':
- return ''
- else:
- source = self._current_application().page_source
- self._log(source, ll)
- return source
+ if "run_keyword_and_ignore_error" not in [check_error_ignored[3] for check_error_ignored in inspect.stack()]:
+ ll = loglevel.upper()
+ if ll == 'NONE':
+ return ''
+ else:
+ source = self._current_application().page_source
+ self._log(source, ll)
+ return source
def go_back(self):
"""Goes one step backward in the browser history."""
|
Check the stack to see if "run_keyword_and_ignore_error" is called; if called, do not log source (with the intention of preventing large log files)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -30,6 +30,7 @@ suggestions and criticisms from the community are always very welcome.
Copyright (C) 2012-2013 GEM Foundation.
"""
import re
+import sys
from setuptools import setup, find_packages, Extension
import numpy
|
setup.py: Added missing import.
|
py
|
diff --git a/bulbs/instant_articles/parser.py b/bulbs/instant_articles/parser.py
index <HASH>..<HASH> 100644
--- a/bulbs/instant_articles/parser.py
+++ b/bulbs/instant_articles/parser.py
@@ -47,6 +47,26 @@ def parse_youtube(tag):
pass
+def parse_onion_video(tag):
+ # return {'onion_video': {'iframe': iframe}}
+ pass
+
+
+def parse_vimeo(tag):
+ # return {'vimeo': {'iframe': iframe}}
+ pass
+
+
+def parse_soundcloud(tag):
+ # return {'soundcloud': {'iframe': iframe}}
+ pass
+
+
+def parse_imgur(tag):
+ # return {'imgur': {'iframe': iframe}}
+ pass
+
+
PARSERS = [
# Sorted by precedence
parse_betty,
|
add parser stubs
|
py
|
diff --git a/fitsio/fitslib.py b/fitsio/fitslib.py
index <HASH>..<HASH> 100644
--- a/fitsio/fitslib.py
+++ b/fitsio/fitslib.py
@@ -3538,6 +3538,7 @@ class FITSHDR:
'ZQUANTIZ','ZDITHER0','ZIMAGE','ZCMPTYPE',
'ZSIMPLE','ZTENSION','ZPCOUNT','ZGCOUNT',
'ZBITPIX','ZEXTEND',
+ 'FZTILELN','FZALGOR',
'CHECKSUM','DATASUM']
self.delete(rmnames)
@@ -3574,7 +3575,7 @@ class FITSHDR:
nbase = ['TFORM','TTYPE','TDIM','TUNIT','TSCAL','TZERO',
'TNULL','TDISP','TDMIN','TDMAX','TDESC','TROTA',
- 'TRPIX','TRVAL','TDELT','TCUNI']
+ 'TRPIX','TRVAL','TDELT','TCUNI','FZALG']
for i in xrange(1,tfields+1):
names=['%s%d' % (n,i) for n in nbase]
self.delete(names)
|
Ignore more keywords when writing a header These are new keywords associated with compressed tables.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -37,7 +37,6 @@ setup(
'gcloud/storage/demo.key']},
include_package_data=True,
zip_safe=False,
- setup_requires=REQUIREMENTS,
install_requires=REQUIREMENTS,
classifiers=[
'Development Status :: 1 - Planning',
|
Remove install-time requirements from 'setup_requires'. That field is used for stuff needed at packaging time, but causes eggs to be downloaded and installed in the project directory, which is undesirable.
|
py
|
diff --git a/aigerbv/aigbv.py b/aigerbv/aigbv.py
index <HASH>..<HASH> 100644
--- a/aigerbv/aigbv.py
+++ b/aigerbv/aigbv.py
@@ -90,6 +90,9 @@ class AIGBV:
latch_map=self.latch_map | other.latch_map,
)
+ def __lshift__(self, other):
+ return other >> self
+
def __or__(self, other):
assert not self.outputs & other.outputs
assert not self.latches & other.latches
@@ -110,8 +113,8 @@ class AIGBV:
if shared_inputs:
for orig in shared_inputs:
new1, new2 = relabels1[orig], relabels2[orig]
- circ = common.tee(len(input_map[orig]), {orig: [new1, new2]}) \
- >> circ
+ circ <<= common.tee(len(input_map[orig]), {orig: [new1, new2]})
+
return circ
def __call__(self, inputs, latches=None):
|
define a << b == b >> a
|
py
|
diff --git a/anytemplate/engines/string.py b/anytemplate/engines/string.py
index <HASH>..<HASH> 100644
--- a/anytemplate/engines/string.py
+++ b/anytemplate/engines/string.py
@@ -16,7 +16,7 @@ import anytemplate.compat
LOGGER = logging.getLogger(__name__)
-class StringTemplateEngine(anytemplate.engines.base.BaseEngine):
+class StringTemplateEngine(anytemplate.engines.base.Engine):
_name = "string.Template"
_supported = True
|
follow the rename of base class
|
py
|
diff --git a/openquake/hazardlib/contexts.py b/openquake/hazardlib/contexts.py
index <HASH>..<HASH> 100644
--- a/openquake/hazardlib/contexts.py
+++ b/openquake/hazardlib/contexts.py
@@ -369,14 +369,14 @@ class PmapMaker():
L, G = len(self.imtls.array), len(self.gsims)
poemap = ProbabilityMap(L, G)
dists = []
- for rups, sites, maxdist in self._gen_rups_sites():
- if maxdist is not None:
- dists.append(maxdist)
+ for rups, sites, mdist in self._gen_rups_sites():
+ if mdist is not None:
+ dists.append(mdist)
for rup in rups:
try:
with self.ctx_mon:
r_sites, dctx = self.cmaker.make_contexts(
- sites, rup, maxdist)
+ sites, rup, mdist)
except FarAwayRupture:
continue
with self.gmf_mon:
|
Small renaming [skip CI]
|
py
|
diff --git a/libsubmit/providers/aws/aws.py b/libsubmit/providers/aws/aws.py
index <HASH>..<HASH> 100644
--- a/libsubmit/providers/aws/aws.py
+++ b/libsubmit/providers/aws/aws.py
@@ -183,16 +183,26 @@ class EC2Provider(ExecutionProvider):
logger.error("Site:[{0}] Failed to initialize".format(self))
raise e
+ state_file_exists = False
try:
self.statefile = self.config["execution"]["block"]["options"].get(
"stateFile", '.ec2site_{0}.json'.format(self.sitename)
)
self.read_state_file(self.statefile)
-
+ state_file_exists = True
except Exception as e:
- self.create_vpc().id
logger.info("No State File. Cannot load previous options. Creating new infrastructure")
- self.write_state_file()
+
+ if not state_file_exists:
+ try :
+ self.create_vpc().id
+ except Exception as e:
+ logger.info("Failed to create ec2 infrastructure : {0}".format(e))
+ raise
+ else:
+ # If infrastructure creation worked write state file
+ self.write_state_file()
+
@property
def channels_required(self):
|
Moving infra creation for #<I> If any failure happens during infra creation it is not properly reported. This adds better reporting. @reidmcy to test.
|
py
|
diff --git a/tests/common_serialization.py b/tests/common_serialization.py
index <HASH>..<HASH> 100644
--- a/tests/common_serialization.py
+++ b/tests/common_serialization.py
@@ -142,14 +142,12 @@ class TestTypeSerializationDummy(object):
class MetaTestTypeSerializationDummy(type):
pass
-# TODO: figure out why this works:
-if PY3:
- class TestTypeSerializationDummyWithMeta(
- with_metaclass(MetaTestTypeSerializationDummy, object)):
- pass
-else:
- class TestTypeSerializationDummyWithMeta(object):
- __metaclass__ = MetaTestTypeSerializationDummy
+
+class TestTypeSerializationDummyWithMeta(
+ with_metaclass(MetaTestTypeSerializationDummy, object)):
+ __metaclass_save_equal_hack__ = MetaTestTypeSerializationDummy
+ # our `safe_equal` use `__dict__` and `__metaclass__` is in
+ # this dict so we need some other __other_field__ to ident it
class ConverterTest(common.TestCase):
|
fix error with class equality in tests: all tests pass
|
py
|
diff --git a/progressbar/__about__.py b/progressbar/__about__.py
index <HASH>..<HASH> 100644
--- a/progressbar/__about__.py
+++ b/progressbar/__about__.py
@@ -19,7 +19,7 @@ A Python Progressbar library to provide visual (yet text based) progress to
long running operations.
'''.strip().split())
__email__ = '[email protected]'
-__version__ = '3.42.0'
+__version__ = '3.42.1'
__license__ = 'BSD'
__copyright__ = 'Copyright 2015 Rick van Hattem (Wolph)'
__url__ = 'https://github.com/WoLpH/python-progressbar'
|
Incrementing version to <I>
|
py
|
diff --git a/util/preprocess.py b/util/preprocess.py
index <HASH>..<HASH> 100644
--- a/util/preprocess.py
+++ b/util/preprocess.py
@@ -8,8 +8,8 @@ from multiprocessing.dummy import Pool
from util.audio import audiofile_to_input_vector
from util.text import text_to_char_array
-def pmap(fun, iterable, threads=8):
- pool = Pool(threads)
+def pmap(fun, iterable):
+ pool = Pool()
results = pool.map(fun, iterable)
pool.close()
return results
|
Preprocessing: use all available threads ...the limitation to 8 threads looks a bit random to me.
|
py
|
diff --git a/dynamic_rest/prefetch.py b/dynamic_rest/prefetch.py
index <HASH>..<HASH> 100644
--- a/dynamic_rest/prefetch.py
+++ b/dynamic_rest/prefetch.py
@@ -227,7 +227,8 @@ class FastQueryCompatMixin(object):
prefetches = []
for field, fprefetch in self.prefetches.items():
- qs = fprefetch.query.queryset if fprefetch.query else None
+ has_query = hasattr(fprefetch, 'query')
+ qs = fprefetch.query.queryset if has_query else None
prefetches.append(
Prefetch(field, queryset=qs)
)
|
prevents execution of base query through accessing query attribute on prefetch object
|
py
|
diff --git a/angr/analyses/cfg/cfg_fast.py b/angr/analyses/cfg/cfg_fast.py
index <HASH>..<HASH> 100644
--- a/angr/analyses/cfg/cfg_fast.py
+++ b/angr/analyses/cfg/cfg_fast.py
@@ -2367,6 +2367,8 @@ class CFGFast(ForwardAnalysis, CFGBase): # pylint: disable=abstract-method
# is it in a section with zero bytes, like .bss?
obj = self.project.loader.find_object_containing(data_addr)
+ if obj is None:
+ return None, None
section = obj.find_section_containing(data_addr)
if section is not None and section.only_contains_uninitialized_data:
# Nothing much you can do
|
CFGFast: Handle cases where find_object_containing() returns None.
|
py
|
diff --git a/src/pyechonest/artist.py b/src/pyechonest/artist.py
index <HASH>..<HASH> 100644
--- a/src/pyechonest/artist.py
+++ b/src/pyechonest/artist.py
@@ -107,7 +107,9 @@ class Artist(object):
['audio', 'urls', 'images', 'biographies', 'blogs',
'familiarity', 'hotttnesss', 'news', 'reviews', 'video']
@param refresh=False : refresh the cache"""
- if self._profile is None or not CACHE or refresh:
+ make_the_call = (self._profile is None) or (not CACHE) or refresh or \
+ any([x for x in buckets if not x in self._profile.keys()])
+ if make_the_call:
response = util.call('get_profile', {'id':self.identifier}, buckets=buckets)
result = response.find('artist')
self._profile = dictify(result)
|
automatically refreshes if you ask for different buckets
|
py
|
diff --git a/specter/expect.py b/specter/expect.py
index <HASH>..<HASH> 100644
--- a/specter/expect.py
+++ b/specter/expect.py
@@ -118,9 +118,16 @@ class ExpectAssert(object):
if not self.success:
was = 'wasn\'t' if self.used_negative else 'was'
- msg = _('Function {func_name} {was} expected to raise "{excpt}"'
+
+ # Make sure we have a name to use
+ if hasattr(self.expected, '__name__'):
+ name = self.expected.__name__
+ else:
+ name = type(self.expected).__name__
+
+ msg = _('function {func_name} {was} expected to raise "{excpt}"'
''.format(func_name=self.target_src_param,
- excpt=self.expected.__name__,
+ excpt=name,
was=was))
self.custom_msg = msg
|
Adding error handling for missing __name__ Fixes the edge case where an exception used doesn't contain a double underscore name definition. If this case is hit, Specter should grab the name from the type. Addresses: #<I>
|
py
|
diff --git a/bin/run_tests.py b/bin/run_tests.py
index <HASH>..<HASH> 100755
--- a/bin/run_tests.py
+++ b/bin/run_tests.py
@@ -175,8 +175,9 @@ def main(parser, parse_args):
# Run tests
- pytest.main(args + list(tests))
+ return pytest.main(args + list(tests))
if __name__ == "__main__":
- main(parser, sys.argv[1:])
+ result = main(parser, sys.argv[1:])
+ sys.exit(result)
|
Return proper exit code from pytest.main()
|
py
|
diff --git a/django_webpack/bundle.py b/django_webpack/bundle.py
index <HASH>..<HASH> 100644
--- a/django_webpack/bundle.py
+++ b/django_webpack/bundle.py
@@ -1,3 +1,4 @@
+import sys
import os
from django.utils.safestring import mark_safe
from django.contrib.staticfiles import finders
@@ -7,7 +8,7 @@ if six.PY2:
elif six.PY3:
from urllib.parse import urljoin
from .services import WebpackService
-from .exceptions import ConfigNotFound
+from .exceptions import ConfigNotFound, BundlingError
from .settings import BUNDLE_ROOT, BUNDLE_URL, CACHE
webpack_service = WebpackService()
@@ -61,7 +62,12 @@ class WebpackBundle(object):
Returns HTML script elements pointing to the assets generated by webpack
"""
scripts = []
- for url in self.get_urls():
+ # Ensure that all exceptions raise flags, even the ones that django will silently suppress
+ try:
+ urls = self.get_urls()
+ except (TypeError, AttributeError) as e:
+ six.reraise(BundlingError, BundlingError(*e.args), sys.exc_info()[2])
+ for url in urls:
scripts.append(
'<script src="{url}"></script>'.format(url=url)
)
|
Ensuring that django will not silently fail when rendering a bundle.
|
py
|
diff --git a/tornado/test/web_test.py b/tornado/test/web_test.py
index <HASH>..<HASH> 100644
--- a/tornado/test/web_test.py
+++ b/tornado/test/web_test.py
@@ -2393,6 +2393,7 @@ class FinishExceptionTest(SimpleHandlerTestCase):
self.assertEqual(b'authentication required', response.body)
+@wsgi_safe
class DecoratorTest(WebTestCase):
def get_handlers(self):
class RemoveSlashHandler(RequestHandler):
@@ -2428,6 +2429,7 @@ class DecoratorTest(WebTestCase):
self.assertEqual(response.headers['Location'], "/addslash/?foo=bar")
+@wsgi_safe
class CacheTest(WebTestCase):
def get_handlers(self):
class EtagHandler(RequestHandler):
|
Add @wsgi_safe decorator to a couple of web tests.
|
py
|
diff --git a/subliminal/subtitle.py b/subliminal/subtitle.py
index <HASH>..<HASH> 100644
--- a/subliminal/subtitle.py
+++ b/subliminal/subtitle.py
@@ -105,8 +105,9 @@ def is_valid_subtitle(subtitle_text):
try:
pysrt.from_string(subtitle_text, error_handling=pysrt.ERROR_RAISE)
return True
- except pysrt.Error:
- pass
+ except pysrt.Error as e:
+ if e.args[0] > 80:
+ return True
except:
logger.exception('Unexpected error when validating subtitle')
return False
|
Be more permissive in subtitle validation
|
py
|
diff --git a/src/_pytest/fixtures.py b/src/_pytest/fixtures.py
index <HASH>..<HASH> 100644
--- a/src/_pytest/fixtures.py
+++ b/src/_pytest/fixtures.py
@@ -93,7 +93,7 @@ def get_scope_package(node, fixturedef):
cls = pytest.Package
current = node
- fixture_package_name = os.path.join(fixturedef.baseid, "__init__.py")
+ fixture_package_name = "%s/%s" % (fixturedef.baseid, "__init__.py")
while current and (
type(current) is not cls or fixture_package_name != current.nodeid
):
|
Fix the package fixture ordering in Windows.
|
py
|
diff --git a/api/symboltable.py b/api/symboltable.py
index <HASH>..<HASH> 100644
--- a/api/symboltable.py
+++ b/api/symboltable.py
@@ -649,8 +649,6 @@ class SymbolTable(object):
if entry is None:
return
entry.declared = True
- entry.scope = SCOPE.parameter
-
if entry.type_ == self.basic_types[TYPE.string] and entry.t[0] != '$':
entry.t = '$' + entry.t # FIXME: This must be worked out
|
Removed useless line. SCOPE is set in the PARAMDECL class.
|
py
|
diff --git a/stanza/server/client.py b/stanza/server/client.py
index <HASH>..<HASH> 100644
--- a/stanza/server/client.py
+++ b/stanza/server/client.py
@@ -113,6 +113,7 @@ class RobustService(object):
self.be_quiet = be_quiet
self.host = host
self.port = port
+ atexit.register(self.atexit_kill)
def is_alive(self):
try:
@@ -140,6 +141,13 @@ class RobustService(object):
stderr=stderr,
stdout=stderr)
+ def atexit_kill(self):
+ # make some kind of effort to stop the service (such as a
+ # CoreNLP server) at the end of the program. not waiting so
+ # that the python script exiting isn't delayed
+ if self.server and self.server.poll() is None:
+ self.server.terminate()
+
def stop(self):
if self.server:
self.server.terminate()
|
Try to terminate a server at exit even if it wasn't made as a context
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@
from setuptools import find_packages
from setuptools import setup
-with open("README.md", "r") as readme:
+with open("README.md", "r", encoding="utf-8") as readme:
long_description = readme.read()
setup(
|
Force utf-8 when reading readme
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -9,12 +9,18 @@ with open(
) as f:
VERSION = re.match(r".*__version__ = '(.*?)'", f.read(), re.S).group(1)
+with open(
+ os.path.join(os.path.dirname(__file__), 'README.rst')
+) as f:
+ long_description = f.read()
setup(
name='grpclib',
version=VERSION,
description='Pure-Python gRPC implementation, based on asyncio and '
'hyper-h2',
+ long_description=long_description,
+ long_description_content_type='text/x-rst',
author='Vladimir Magamedov',
author_email='[email protected]',
url='https://github.com/vmagamedov/grpclib',
|
Add long description to setup.py to show in PyPI
|
py
|
diff --git a/salt/modules/win_system.py b/salt/modules/win_system.py
index <HASH>..<HASH> 100644
--- a/salt/modules/win_system.py
+++ b/salt/modules/win_system.py
@@ -31,6 +31,7 @@ except ImportError:
# Import salt libs
import salt.utils
import salt.utils.locales
+import salt.ext.six as six
# Set up logging
log = logging.getLogger(__name__)
@@ -354,7 +355,7 @@ def set_computer_name(name):
salt 'minion-id' system.set_computer_name 'DavesComputer'
'''
- if name and isinstance(name, str):
+ if name and not six.PY3:
name = name.decode('utf-8')
if windll.kernel32.SetComputerNameExW(win32con.ComputerNamePhysicalDnsHostname,
|
don't decode strings on Python 3
|
py
|
diff --git a/peglet.py b/peglet.py
index <HASH>..<HASH> 100644
--- a/peglet.py
+++ b/peglet.py
@@ -85,7 +85,7 @@ def _parse(rules, actions, rule, text):
vals = ()
for token in tokens:
ok, pos, vals = parse_token(token, pos, vals)
- if not ok: return False, pos, None
+ if not ok: return False, pos, vals
return True, pos, vals
def parse_token(token, pos, vals):
|
code a bit clearer, I think
|
py
|
diff --git a/tests/docker/test_docker_run.py b/tests/docker/test_docker_run.py
index <HASH>..<HASH> 100644
--- a/tests/docker/test_docker_run.py
+++ b/tests/docker/test_docker_run.py
@@ -147,7 +147,7 @@ describe HarpoonCase, "Building docker images":
if isinstance(output, six.binary_type):
output = output.decode('utf-8')
- output = [line for line in output.split('\n') if "lxc-start" not in line]
+ output = [line.strip() for line in output.split('\n') if "lxc-start" not in line]
self.assertEqual(output[-2:], ["hi1", "there2"])
|
Make the test pass on travis
|
py
|
diff --git a/hwt/hdl/types/structCast.py b/hwt/hdl/types/structCast.py
index <HASH>..<HASH> 100644
--- a/hwt/hdl/types/structCast.py
+++ b/hwt/hdl/types/structCast.py
@@ -1,12 +1,13 @@
from hwt.code import Concat
from hwt.doc_markers import internal
from hwt.hdl.typeShortcuts import vec
+from hwt.hdl.types.array import HArray
from hwt.hdl.types.bits import Bits
from hwt.hdl.types.hdlType import default_reinterpret_cast_fn, HdlType
+from hwt.hdl.types.struct import HStruct
from hwt.hdl.value import HValue
+from hwt.interfaces.std import Signal
from hwt.synthesizer.rtlLevel.mainBases import RtlSignalBase
-from hwt.hdl.types.array import HArray
-from hwt.hdl.types.struct import HStruct
@internal
@@ -19,6 +20,8 @@ def hstruct_reinterpret_to_bits(self, sigOrVal, toType: HdlType):
part = vec(None, width)
else:
part = getattr(sigOrVal, f.name)
+ if isinstance(part, Signal):
+ part = part._sig
if not isinstance(part, (HValue, RtlSignalBase)):
part = f.dtype.from_py(part)
|
hstruct_reinterpret_to_bits: support for Signal instances
|
py
|
diff --git a/pyphi/subsystem.py b/pyphi/subsystem.py
index <HASH>..<HASH> 100644
--- a/pyphi/subsystem.py
+++ b/pyphi/subsystem.py
@@ -865,9 +865,9 @@ def cause_emd(d1, d2):
If the distributions are independent we can use the same shortcut we use
for effect repertoires. Otherwise fall back to the Hamming EMD.
"""
- # TODO: only check independence for large repertoires
+ # TODO: benchmark with real repertoires and find the best cutoff
# TODO: do we need to check both distributions? or just one?
- if utils.independent(d1) and utils.independent(d2):
+ if d1.ndim > 7 and utils.independent(d1) and utils.independent(d2):
return effect_emd(d1, d2)
return utils.hamming_emd(d1, d2)
|
Only check independence for n > 7
|
py
|
diff --git a/AlphaTwirl/EventReader/MPEventLoopRunner.py b/AlphaTwirl/EventReader/MPEventLoopRunner.py
index <HASH>..<HASH> 100755
--- a/AlphaTwirl/EventReader/MPEventLoopRunner.py
+++ b/AlphaTwirl/EventReader/MPEventLoopRunner.py
@@ -58,9 +58,7 @@ class MPEventLoopRunner(object):
self._progressMonitor.last()
- for i in xrange(self._nworkers):
- self._tasks.put(None) # end workers
- self._tasks.join()
+ self.end_workers()
def collectTaskResults(self):
if self._results.empty(): return False
@@ -68,4 +66,9 @@ class MPEventLoopRunner(object):
self._allReaders[reader.id].setResults(reader.results())
return True
+ def end_workers(self):
+ for i in xrange(self._nworkers):
+ self._tasks.put(None) # end workers
+ self._tasks.join()
+
##____________________________________________________________________________||
|
extract a method end_workers() in MPEventLoopRunner
|
py
|
diff --git a/holoviews/plotting/plot.py b/holoviews/plotting/plot.py
index <HASH>..<HASH> 100644
--- a/holoviews/plotting/plot.py
+++ b/holoviews/plotting/plot.py
@@ -580,7 +580,10 @@ class GenericElementPlot(DimensionedPlot):
else:
y0, y1 = (np.NaN, np.NaN)
if self.projection == '3d':
- z0, z1 = ranges[dims[2].name]
+ if len(dims) > 2:
+ z0, z1 = ranges[dims[2].name]
+ else:
+ z0, z1 = np.NaN, np.NaN
else:
x0, x1 = view.range(0)
y0, y1 = view.range(1) if ndims > 1 else (np.NaN, np.NaN)
|
Small fix for extents when using 3d projection
|
py
|
diff --git a/spyderplugins/widgets/pylintgui.py b/spyderplugins/widgets/pylintgui.py
index <HASH>..<HASH> 100644
--- a/spyderplugins/widgets/pylintgui.py
+++ b/spyderplugins/widgets/pylintgui.py
@@ -352,7 +352,7 @@ class PylintWidget(QWidget):
line_nb = line[i1+1:i2].strip()
if not line_nb:
continue
- line_nb = int(line_nb)
+ line_nb = int(line_nb.split(',')[0])
message = line[i2+1:]
item = (module, line_nb, message, msg_id)
results[line[0]+':'].append(item)
|
(Fixes Issue <I>) Pylint/bugfix: fixed parsing error
|
py
|
diff --git a/rollbar/contrib/starlette/requests.py b/rollbar/contrib/starlette/requests.py
index <HASH>..<HASH> 100644
--- a/rollbar/contrib/starlette/requests.py
+++ b/rollbar/contrib/starlette/requests.py
@@ -46,13 +46,7 @@ def get_current_request() -> Optional[Request]:
)
return None
- request = _current_request.get()
-
- if request is None:
- log.error('Request is not available in the present context.')
- return None
-
- return request
+ return _current_request.get()
def store_current_request(
|
Do not log error if request is missing in the context Return None object instead
|
py
|
diff --git a/openquake/db/models.py b/openquake/db/models.py
index <HASH>..<HASH> 100644
--- a/openquake/db/models.py
+++ b/openquake/db/models.py
@@ -1756,6 +1756,32 @@ class Gmf(djm.Model):
class DisaggResult(djm.Model):
+ """
+ Storage for disaggregation historgrams. Each histogram is stored in
+ `matrix` as a 6-dimensional numpy array (pickled). The dimensions of the
+ matrix are as follows, in order:
+
+ * magnitude
+ * distance
+ * longitude
+ * latitude
+ * epsilon
+ * tectonic region type
+
+ Bin edges are defined for all of these dimensions (except tectonic region
+ type) as:
+
+ * `mag_bin_edges`
+ * `dist_bin_edges`
+ * `lat_bin_edges`
+ * `lon_bin_edges`
+ * `eps_bin_edges`
+
+ Additional metadata for the disaggregation histogram is stored, including
+ location (POINT geometry), disaggregation PoE (Probability of Exceedance)
+ and the corresponding IML (Intensity Measure Level) extracted from the
+ hazard curve, logic tree path information, and investigation time.
+ """
output = djm.ForeignKey('Output')
lt_realization = djm.ForeignKey('LtRealization')
|
db/models: Added some doc for `DisaggResult` table model.
|
py
|
diff --git a/stop_words/__init__.py b/stop_words/__init__.py
index <HASH>..<HASH> 100644
--- a/stop_words/__init__.py
+++ b/stop_words/__init__.py
@@ -1,7 +1,7 @@
import json
import os
-__VERSION__ = (2015, 2, 21)
+__VERSION__ = (2015, 2, 23)
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
STOP_WORDS_DIR = os.path.join(CURRENT_DIR, 'stop-words')
STOP_WORDS_CACHE = {}
|
__VERSION__ changed
|
py
|
diff --git a/algorithms/tree/red_black_tree/red_black_tree.py b/algorithms/tree/red_black_tree/red_black_tree.py
index <HASH>..<HASH> 100644
--- a/algorithms/tree/red_black_tree/red_black_tree.py
+++ b/algorithms/tree/red_black_tree/red_black_tree.py
@@ -241,8 +241,7 @@ class RBTree:
node.parent.color = 0
node_brother.right.color = 0
self.left_rotate(node.parent)
- node = self.root
- break
+ node = self.root
else:
node_brother = node.parent.left
if node_brother.color == 1:
@@ -264,8 +263,7 @@ class RBTree:
node.parent.color = 0
node_brother.left.color = 0
self.right_rotate(node.parent)
- node = self.root
- break
+ node = self.root
node.color = 0
def inorder(self):
|
fixing delete_fixup() in red_black_tree (#<I>)
|
py
|
diff --git a/tests/test_hooks.py b/tests/test_hooks.py
index <HASH>..<HASH> 100644
--- a/tests/test_hooks.py
+++ b/tests/test_hooks.py
@@ -1,10 +1,11 @@
+import os
from io import BytesIO
from unittest.mock import MagicMock, patch
from isort import hooks
-def test_git_hook():
+def test_git_hook(src_dir):
"""Simple smoke level testing of git hooks"""
# Ensure correct subprocess command is called
@@ -15,11 +16,13 @@ def test_git_hook():
)
# Test with incorrectly sorted file returned from git
- with patch("isort.hooks.get_lines", MagicMock(return_value=["isort/isort.py"])) as run_mock:
+ with patch(
+ "isort.hooks.get_lines", MagicMock(return_value=[os.path.join(src_dir, "main.py")])
+ ) as run_mock:
- class FakeProecssResponse(object):
+ class FakeProcessResponse(object):
stdout = b"import b\nimport a"
- with patch("subprocess.run", MagicMock(return_value=FakeProecssResponse())) as run_mock:
- with patch("isort.hooks.api", MagicMock()):
+ with patch("subprocess.run", MagicMock(return_value=FakeProcessResponse())) as run_mock:
+ with patch("isort.api", MagicMock(return_value=False)):
hooks.git_hook(modify=True)
|
<I>% hooks coverage
|
py
|
diff --git a/billy/bin/update.py b/billy/bin/update.py
index <HASH>..<HASH> 100755
--- a/billy/bin/update.py
+++ b/billy/bin/update.py
@@ -107,14 +107,7 @@ def _run_scraper(scraper_type, options, metadata):
# run scraper against year/session/term
for time in times:
for chamber in options.chambers:
- try:
- scraper.scrape(chamber, time)
- except Exception as e: #We're re-raising.
- scrape['end_time'] = dt.datetime.utcnow()
- scrape['exception'] = e
- runs.append(scrape)
- e._billy_scrape_runlog = runs
- raise
+ scraper.scrape(chamber, time)
if scraper_type == 'events' and len(options.chambers) == 2:
scraper.scrape('other', time)
@@ -373,8 +366,7 @@ def main(old_scrape_compat=False):
if args.bills:
run_record += _run_scraper('bills', args, metadata)
except Exception as e :
- print e
- run_record += e._billy_scrape_runlog
+ run_record += [{ "exception" : e }]
lex = e
exec_end = dt.datetime.utcnow()
|
changing update up a skitch
|
py
|
diff --git a/test_path.py b/test_path.py
index <HASH>..<HASH> 100644
--- a/test_path.py
+++ b/test_path.py
@@ -1221,7 +1221,3 @@ class TestMultiPath:
assert not isinstance(first, Multi)
assert next(items) == '/baz/bing'
assert path == input
-
-
-if __name__ == '__main__':
- pytest.main()
|
Remove executable function of test_path. Tester is always expected to invoke pytest directly.
|
py
|
diff --git a/tests/conftest.py b/tests/conftest.py
index <HASH>..<HASH> 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -64,8 +64,8 @@ def pytest_configure(config):
else:
# we're not running integration tests
print("running without integration tests")
- # if we're on Travis, this is an error
- if os.getenv('TRAVIS_PYTHON_VERSION'):
+ # if we're on GitHub CI, this is an error
+ if os.getenv('CI'):
sys.exit(1)
# display the working directory and the OpenSSL/GPG/pgpdump versions
|
Error if we are running on GitHub CI without integration tests
|
py
|
diff --git a/tests/test_rs.py b/tests/test_rs.py
index <HASH>..<HASH> 100644
--- a/tests/test_rs.py
+++ b/tests/test_rs.py
@@ -315,7 +315,8 @@ class ReplicaSetTestCase(unittest.TestCase):
# self.repl = ReplicaSet(self.repl_cfg)
def tearDown(self):
- self.repl.cleanup()
+ if hasattr(self, 'repl'):
+ self.repl.cleanup()
if os.path.exists(self.db_path):
os.remove(self.db_path)
|
fix issue with ReplicaSetTestCase hasn't attribute repl
|
py
|
diff --git a/tmdbsimple/movies.py b/tmdbsimple/movies.py
index <HASH>..<HASH> 100644
--- a/tmdbsimple/movies.py
+++ b/tmdbsimple/movies.py
@@ -102,7 +102,7 @@ class Movies(TMDB):
def external_ids(self, **kwargs):
"""
- Get the external ids for a specific person id.
+ Get the external ids for a specific movie id.
Args:
language: (optional) ISO 639-1 code.
|
Change movies.py external_ids comment from "specific person id" to "specific movie id"
|
py
|
diff --git a/pyvista/plotting/widgets.py b/pyvista/plotting/widgets.py
index <HASH>..<HASH> 100644
--- a/pyvista/plotting/widgets.py
+++ b/pyvista/plotting/widgets.py
@@ -1161,7 +1161,7 @@ class WidgetHelper:
theta_resolution: int , optional
Set the number of points in the longitude direction (ranging from
- start_theta to end theta).
+ start_theta to end_theta).
phi_resolution : int, optional
Set the number of points in the latitude direction (ranging from
|
typo fix (#<I>)
|
py
|
diff --git a/src/moneyed/localization.py b/src/moneyed/localization.py
index <HASH>..<HASH> 100644
--- a/src/moneyed/localization.py
+++ b/src/moneyed/localization.py
@@ -303,7 +303,7 @@ _sign(DEFAULT, moneyed.SZL, prefix='E')
_sign(DEFAULT, moneyed.THB, prefix='฿')
_sign(DEFAULT, moneyed.TND, prefix='د.ت')
_sign(DEFAULT, moneyed.TOP, prefix='TOP$')
-_sign(DEFAULT, moneyed.TRY, prefix='TL')
+_sign(DEFAULT, moneyed.TRY, prefix='₺')
_sign(DEFAULT, moneyed.TTD, prefix='TT$')
_sign(DEFAULT, moneyed.TVD, prefix='$T')
_sign(DEFAULT, moneyed.TWD, prefix='NT$')
|
Turkish money symbol As reported in django-money#<I>
|
py
|
diff --git a/autofit/graphical/factor_graphs/numerical.py b/autofit/graphical/factor_graphs/numerical.py
index <HASH>..<HASH> 100644
--- a/autofit/graphical/factor_graphs/numerical.py
+++ b/autofit/graphical/factor_graphs/numerical.py
@@ -66,7 +66,7 @@ def numerical_func_jacobian(
for det, val in det_vars0.items()
}
det_slices = {
- v: (slice(None),) * np.ndim(a) for v, a in values.items()}
+ v: (slice(None),) * np.ndim(a) for v, a in det_vars0.items()}
for v, grad in fjac.items():
x0 = p0[v]
@@ -85,7 +85,7 @@ def numerical_func_jacobian(
if _calc_deterministic:
det_vars = f.deterministic_values
for det, val in det_vars.items():
- v_jac[det][det_slices[v] + ind] = \
+ v_jac[det][det_slices[det] + ind] = \
(val - det_vars0[det]) / _eps
else:
p0[v] += _eps
|
bugfixing calculation of deterministic Jacobians
|
py
|
diff --git a/src/ossos-pipeline/scripts/update_astrometry.py b/src/ossos-pipeline/scripts/update_astrometry.py
index <HASH>..<HASH> 100644
--- a/src/ossos-pipeline/scripts/update_astrometry.py
+++ b/src/ossos-pipeline/scripts/update_astrometry.py
@@ -159,8 +159,11 @@ def recompute_mag(mpc_in):
cutout = image_slice_downloader.download_cutout(reading, needs_apcor=True)
cutout.zmag = new_zp
+ if math.fabs(new_zp - old_zp) > 0.3:
+ logging.warning("Large change in zeropoint detected: {} -> {}".format(old_zp, new_zp))
+
try:
- (x, y, mag, merr) = cutout.get_observed_magnitude(zmag=old_zp)
+ (x, y, mag, merr) = cutout.get_observed_magnitude(zmag=new_zp)
(x, y) = cutout.get_observed_coordinates((x, y))
except:
logging.warn("Failed to do photometry.")
|
Corrected the use of ZP for flux computing. The update_astrometry script should use the ZP in the astrometric header when computing magnitudes, not the original image header. Now the script does that.
|
py
|
diff --git a/python/ray/util/collective/collective.py b/python/ray/util/collective/collective.py
index <HASH>..<HASH> 100644
--- a/python/ray/util/collective/collective.py
+++ b/python/ray/util/collective/collective.py
@@ -26,9 +26,6 @@ try:
gloo_collective_group import GLOOGroup
except ImportError:
_GLOO_AVAILABLE = False
- logger.warning("PyGloo seems unavailable. Please install PyGloo "
- "following the guide at: "
- "https://github.com/ray-project/pygloo.")
def nccl_available():
|
[Collective] silent the pygloo warning as it is not commonly used (#<I>)
|
py
|
diff --git a/setup-jottacloudclient.py b/setup-jottacloudclient.py
index <HASH>..<HASH> 100644
--- a/setup-jottacloudclient.py
+++ b/setup-jottacloudclient.py
@@ -44,8 +44,8 @@ setup(name='jottacloudclient',
url='https://github.com/havardgulldahl/jottalib',
package_dir={'':'src/tools'},
packages=['jottacloudclient', ],
- scripts=['src/jottacloudclientmonitor.py',
- 'src/jottacloudclientscanner.py',
+ scripts=['src/tools/jottacloudclientmonitor.py',
+ 'src/tools/jottacloudclientscanner.py',
'src/jottafuse.py',
'src/jottashare.py'],
install_requires=['jottalib>=0.2.10',
|
correct path to scripts in setup.py
|
py
|
diff --git a/django_mobile/conf.py b/django_mobile/conf.py
index <HASH>..<HASH> 100644
--- a/django_mobile/conf.py
+++ b/django_mobile/conf.py
@@ -15,15 +15,15 @@ class SettingsProxy(object):
try:
return getattr(self.defaults, attr)
except AttributeError:
- raise AttributeError, 'settings object has no attribute "%s"' % attr
+ raise AttributeError, u'settings object has no attribute "%s"' % attr
class defaults(object):
- FLAVOURS = ('full', 'mobile',)
- DEFAULT_MOBILE_FLAVOUR = 'mobile'
- FLAVOURS_TEMPLATE_DIRS_PREFIX = ''
- FLAVOURS_GET_PARAMETER = 'flavour'
- FLAVOURS_SESSION_KEY = 'flavour'
+ FLAVOURS = (u'full', u'mobile',)
+ DEFAULT_MOBILE_FLAVOUR = u'mobile'
+ FLAVOURS_TEMPLATE_DIRS_PREFIX = u''
+ FLAVOURS_GET_PARAMETER = u'flavour'
+ FLAVOURS_SESSION_KEY = u'flavour'
settings = SettingsProxy(django_settings, defaults)
|
Using unicode strings in conf.py. Just to be sure.
|
py
|
diff --git a/aioxmpp/protocol.py b/aioxmpp/protocol.py
index <HASH>..<HASH> 100644
--- a/aioxmpp/protocol.py
+++ b/aioxmpp/protocol.py
@@ -10,6 +10,8 @@ import xml.parsers.expat as pyexpat
from . import xml, errors, xso, stream_xsos, stanza
from .utils import namespaces
+logger = logging.getLogger(__name__)
+
class Mode(Enum):
C2S = namespaces.client
@@ -143,6 +145,7 @@ class XMLStream(asyncio.Protocol):
self._transport = None
def data_received(self, blob):
+ logger.debug("RECV %r", blob)
try:
self._rx_feed(blob)
except errors.StreamError as exc:
|
Implement debug logging of received data
|
py
|
diff --git a/pandas/core/internals.py b/pandas/core/internals.py
index <HASH>..<HASH> 100644
--- a/pandas/core/internals.py
+++ b/pandas/core/internals.py
@@ -291,10 +291,9 @@ class BlockManager(object):
return axes_array, block_values, block_items
def __setstate__(self, state):
- if len(state) == 4: # pragma: no cover
- ax_arrays, bvalues, bitems, _ = state
- else:
- ax_arrays, bvalues, bitems = state
+ # discard anything after 3rd, support beta pickling format for a little
+ # while longer
+ ax_arrays, bvalues, bitems = state[:3]
self.axes = [_ensure_index(ax) for ax in ax_arrays]
blocks = []
|
FIX: legacy pickle format
|
py
|
diff --git a/pulsar/client/amqp_exchange.py b/pulsar/client/amqp_exchange.py
index <HASH>..<HASH> 100644
--- a/pulsar/client/amqp_exchange.py
+++ b/pulsar/client/amqp_exchange.py
@@ -86,9 +86,15 @@ class PulsarExchange(object):
except (IOError, socket.error), exc:
# In testing, errno is None
log.warning('Got %s, will retry: %s', exc.__class__.__name__, exc)
- if heartbeat_thread:
- heartbeat_thread.join()
- sleep(DEFAULT_RECONNECT_CONSUMER_WAIT)
+ try:
+ if heartbeat_thread:
+ heartbeat_thread.join(DEFAULT_HEARTBEAT_JOIN_TIMEOUT)
+ except Exception:
+ log.exception("Failed to join heartbeat thread, this is bad?")
+ try:
+ sleep(DEFAULT_RECONNECT_CONSUMER_WAIT)
+ except Exception:
+ log.exception("Interrupted sleep while waiting to reconnect to message queue, may restart unless problems encountered.")
except BaseException:
log.exception("Problem consuming queue, consumer quitting in problematic fashion!")
raise
|
Improved heartbeat thread handling in amqp_exchange. - Add a join timeout incase this is is where things are hanging. - Don't allow heartbeat or sleep exceptions to halt queue consumption - shutdown events need to fire to actually stop this computation. - Add more logging to determine if this is a problematic spot.
|
py
|
diff --git a/python/phonenumbers/phonenumberutil.py b/python/phonenumbers/phonenumberutil.py
index <HASH>..<HASH> 100644
--- a/python/phonenumbers/phonenumberutil.py
+++ b/python/phonenumbers/phonenumberutil.py
@@ -1717,7 +1717,7 @@ def truncate_too_long_number(numobj):
while not is_valid_number(numobj_copy):
# Strip a digit off the RHS
- national_number = national_number / 10
+ national_number = national_number // 10
numobj_copy.national_number = national_number
validation_result = is_possible_number_with_reason(numobj_copy)
if (validation_result == ValidationResult.TOO_SHORT or
|
Use floor division not normal division when removing digits from decimal number
|
py
|
diff --git a/lib/svtplay_dl/utils/stream.py b/lib/svtplay_dl/utils/stream.py
index <HASH>..<HASH> 100644
--- a/lib/svtplay_dl/utils/stream.py
+++ b/lib/svtplay_dl/utils/stream.py
@@ -25,7 +25,7 @@ def list_quality(videos):
data.extend(sort_quality(videos))
for i in range(len(data)):
logging.info(
- f"{str(data[i][0]):<10s} {data[i][1].upper():<10s} {data[i][2]:<10s} {data[i][3]:<15s} {data[i][4]:<20s} {data[i][5]:<20s}",
+ f"{str(data[i][0]):<10s} {data[i][1].upper():<8s} {data[i][2]:<8s} {data[i][3]:<12s} {data[i][4]:<20s} {data[i][5]:<20s}",
)
|
list_quality: make the output a bit smaller
|
py
|
diff --git a/dispatcher/utils/base.py b/dispatcher/utils/base.py
index <HASH>..<HASH> 100644
--- a/dispatcher/utils/base.py
+++ b/dispatcher/utils/base.py
@@ -23,7 +23,10 @@ class Base(object):
if i not in memo:
memo[i] = threading.Event()
rv = super(Base, self).__reduce_ex__(4)
- return copy._reconstruct(self, rv, 1, memo)
+ try:
+ return copy._reconstruct(self, rv, 1, memo)
+ except AttributeError: # py36.
+ return copy._reconstruct(self, memo, *rv)
def plot(self, workflow=None, view=True, depth=-1, name=NONE, comment=NONE,
format=NONE, engine=NONE, encoding=NONE, graph_attr=NONE,
|
FIX copy with py<I>.
|
py
|
diff --git a/treebeard/templatetags/admin_tree.py b/treebeard/templatetags/admin_tree.py
index <HASH>..<HASH> 100644
--- a/treebeard/templatetags/admin_tree.py
+++ b/treebeard/templatetags/admin_tree.py
@@ -166,7 +166,7 @@ def items_for_result(cl, result, form):
else:
attr = pk
value = result.serializable_value(attr)
- result_id = repr(force_str(value))[1:]
+ result_id = "'%s'" % force_str(value)
onclickstr = (
' onclick="opener.dismissRelatedLookupPopup(window, %s);'
' return false;"')
|
Use explicit single quotes for result_id `repr` will produce different strings on PY2/3 JSON doesn't work because string needs to be single quoted inside double quoted html attribute
|
py
|
diff --git a/tests/scripts/thread-cert/test_route_table.py b/tests/scripts/thread-cert/test_route_table.py
index <HASH>..<HASH> 100755
--- a/tests/scripts/thread-cert/test_route_table.py
+++ b/tests/scripts/thread-cert/test_route_table.py
@@ -69,9 +69,11 @@ class TestRouteTable(thread_cert.TestCase):
self.assertEqual(self.nodes[LEADER].get_state(), 'leader')
self.nodes[ROUTER1].start()
- self.nodes[ROUTER2].start()
self.simulator.go(5)
self.assertEqual(self.nodes[ROUTER1].get_state(), 'router')
+
+ self.nodes[ROUTER2].start()
+ self.simulator.go(5)
self.assertEqual(self.nodes[ROUTER2].get_state(), 'router')
self.simulator.go(100)
|
[scripts] fix test_route_table.py fails by chance (#<I>)
|
py
|
diff --git a/alignak_backend_client/backend_client.py b/alignak_backend_client/backend_client.py
index <HASH>..<HASH> 100755
--- a/alignak_backend_client/backend_client.py
+++ b/alignak_backend_client/backend_client.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python
+#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
|
Change backend_client script shebang
|
py
|
diff --git a/sos/component.py b/sos/component.py
index <HASH>..<HASH> 100644
--- a/sos/component.py
+++ b/sos/component.py
@@ -246,13 +246,13 @@ class SoSComponent():
auto_archive = self.policy.get_preferred_archive()
self.archive = auto_archive(archive_name, self.tmpdir,
self.policy, self.opts.threads,
- enc_opts, self.opts.sysroot,
+ enc_opts, self.sysroot,
self.manifest)
else:
self.archive = TarFileArchive(archive_name, self.tmpdir,
self.policy, self.opts.threads,
- enc_opts, self.opts.sysroot,
+ enc_opts, self.sysroot,
self.manifest)
self.archive.set_debug(True if self.opts.debug else False)
|
[component] Use sysroot from Policy when opts doesn't specify it Until --sysroot option is specified, Archive (sub)classes should be called with sysroot determined from Policy. Resolves: #<I>
|
py
|
diff --git a/pychromecast/discovery.py b/pychromecast/discovery.py
index <HASH>..<HASH> 100644
--- a/pychromecast/discovery.py
+++ b/pychromecast/discovery.py
@@ -212,7 +212,9 @@ class ZeroConfListener:
cast_type = CAST_TYPE_GROUP
manufacturer = MF_GOOGLE
else:
- cast_type, manufacturer = CAST_TYPES.get(model_name.lower(), (None, None))
+ cast_type, manufacturer = CAST_TYPES.get(
+ model_name.lower(), (None, None)
+ )
if uuid not in self._devices:
self._devices[uuid] = CastInfo(
{service_info},
|
Format some code (#<I>)
|
py
|
diff --git a/src/configupdater/configupdater.py b/src/configupdater/configupdater.py
index <HASH>..<HASH> 100644
--- a/src/configupdater/configupdater.py
+++ b/src/configupdater/configupdater.py
@@ -394,11 +394,11 @@ class Section(Block, Container, MutableMapping):
return self
def items(self):
- """Return a list of (name, value) tuples for each option in
+ """Return a list of (name, option) tuples for each option in
this section.
Returns:
- list: list of :class:`Section` or :class:`Option` objects
+ list: list of (name, :class:`Option`) tuples
"""
return [(opt.key, opt) for opt in self.option_blocks()]
|
Fix copy/paste error in docstring.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -262,8 +262,8 @@ def get_version_info():
# If this is a release or another kind of source distribution of PyCBC
except:
- version = '1.8.0dev'
- release = 'False'
+ version = '1.7.1'
+ release = 'True'
date = hash = branch = tag = author = committer = status = builder = build_date = ''
with open('pycbc/version.py', 'w') as f:
|
Update setup.py Set to <I> for release
|
py
|
diff --git a/tests/test_tcp_splunk_publisher.py b/tests/test_tcp_splunk_publisher.py
index <HASH>..<HASH> 100644
--- a/tests/test_tcp_splunk_publisher.py
+++ b/tests/test_tcp_splunk_publisher.py
@@ -87,6 +87,8 @@ class TestTCPSplunkPublisher(unittest.TestCase):
self):
"""test_tcp_publish_to_splunk
"""
+ # this looks like it hangs on travis...
+ return
# Silence root logger
log = logging.getLogger('')
for h in log.handlers:
|
travis is failing this test case...
|
py
|
diff --git a/salt/modules/cp.py b/salt/modules/cp.py
index <HASH>..<HASH> 100644
--- a/salt/modules/cp.py
+++ b/salt/modules/cp.py
@@ -342,7 +342,8 @@ def cache_files(paths, saltenv='base', env=None):
return __context__['cp.fileclient'].cache_files(paths, saltenv)
-def cache_dir(path, saltenv='base', include_empty=False, env=None):
+def cache_dir(path, saltenv='base', include_empty=False, include_pat=None,
+ exclude_pat=None, env=None):
'''
Download and cache everything under a directory from the master
@@ -362,7 +363,9 @@ def cache_dir(path, saltenv='base', include_empty=False, env=None):
saltenv = env
_mk_client()
- return __context__['cp.fileclient'].cache_dir(path, saltenv, include_empty)
+ return __context__['cp.fileclient'].cache_dir(
+ path, saltenv, include_empty, include_pat, exclude_pat
+ )
def cache_master(saltenv='base', env=None):
|
Add include_pat/exclude_pat to cp.cache_dir This uses the include_pat/exclude_pat logic from the file.recurse state to the cp.cache_dir function, allowing one to selectively cache the desired files.
|
py
|
diff --git a/django_tenants/middleware/main.py b/django_tenants/middleware/main.py
index <HASH>..<HASH> 100644
--- a/django_tenants/middleware/main.py
+++ b/django_tenants/middleware/main.py
@@ -38,13 +38,13 @@ class TenantMainMiddleware(MiddlewareMixin):
try:
tenant = self.get_tenant(domain_model, hostname)
except domain_model.DoesNotExist:
- self.no_tenant_found(request, hostname)
- return
+ return self.no_tenant_found(request, hostname)
tenant.domain_url = hostname
request.tenant = tenant
connection.set_tenant(request.tenant)
self.setup_url_routing(request)
+ return request
def no_tenant_found(self, request, hostname):
""" What should happen if no tenant is found.
|
Return the request from the main middleware Actually return the request after meddling with it, since this is what django expects. Also allow for more return values when tenant is not found.
|
py
|
diff --git a/tinyrpc/server/gevent.py b/tinyrpc/server/gevent.py
index <HASH>..<HASH> 100644
--- a/tinyrpc/server/gevent.py
+++ b/tinyrpc/server/gevent.py
@@ -11,3 +11,10 @@ class RPCServerGreenlets(RPCServer):
# documentation in docs because of dependencies
def _spawn(self, func, *args, **kwargs):
gevent.spawn(func, *args, **kwargs)
+
+ def start(self):
+ '''
+ Create a Greenlet with serve_forever so you can do a gevenet.joinall of
+ several RPCServerGreenlets
+ '''
+ return gevent.spawn(self.serve_forever)
\ No newline at end of file
|
Add a start method to the gevent servers so you can run multiple servers and do a joinall for all of them.
|
py
|
diff --git a/cgutils/cgroup.py b/cgutils/cgroup.py
index <HASH>..<HASH> 100644
--- a/cgutils/cgroup.py
+++ b/cgutils/cgroup.py
@@ -359,6 +359,17 @@ class SubsystemDevices(Subsystem):
}
+class SubsystemNetPrio(Subsystem):
+ NAME = 'net_prio'
+ STATS = {
+ 'prioidx': long,
+ }
+ __ifs = os.listdir('/sys/class/net')
+ CONFIGS = {
+ 'ifpriomap': SimpleStat(zip(__ifs, [0] * len(__ifs))),
+ }
+
+
class SubsystemName(Subsystem):
NAME = 'name'
@@ -376,6 +387,7 @@ _subsystem_name2class = {
'freezer': SubsystemFreezer,
'net_cls': SubsystemNetCls,
'devices': SubsystemDevices,
+ 'net_prio': SubsystemNetPrio,
}
|
Support missing net_prio subsystem
|
py
|
diff --git a/pandas_td/td.py b/pandas_td/td.py
index <HASH>..<HASH> 100644
--- a/pandas_td/td.py
+++ b/pandas_td/td.py
@@ -23,6 +23,8 @@ class Connection(object):
apikey = os.environ['TD_API_KEY']
if endpoint is None:
endpoint = DEFAULT_ENDPOINT
+ if not endpoint.endswith('/'):
+ endpoint = endpoint + '/'
self.apikey = apikey
self.endpoint = endpoint
self.client = tdclient.Client(apikey, endpoint)
|
Ensure trailing '/' at the end of endpoint
|
py
|
diff --git a/build/zip_libcxx.py b/build/zip_libcxx.py
index <HASH>..<HASH> 100644
--- a/build/zip_libcxx.py
+++ b/build/zip_libcxx.py
@@ -22,6 +22,9 @@ def get_object_files(base_path, archive_name):
if line.startswith(base_path):
object_file = line.split(":")[0]
object_files.add(object_file)
+ if line.startswith('nm: '):
+ object_file = line.split(":")[1].lstrip()
+ object_files.add(object_file)
return list(object_files) + [archive_file]
def main(argv):
|
build: ensure object files are included even if unparsable
|
py
|
diff --git a/cobra/test/unit_tests.py b/cobra/test/unit_tests.py
index <HASH>..<HASH> 100644
--- a/cobra/test/unit_tests.py
+++ b/cobra/test/unit_tests.py
@@ -42,6 +42,15 @@ class TestDictList(TestCase):
self.list = DictList()
self.list.append(self.obj)
+ def testIndependent(self):
+ a = DictList([Object("o1"), Object("o2")])
+ b = DictList()
+ self.assertIn("o1", a)
+ self.assertNotIn("o1", b)
+ b.append(Object("o3"))
+ self.assertNotIn("o3", a)
+ self.assertIn("o3", b)
+
def testAppend(self):
obj2 = Object("test2")
self.list.append(obj2)
@@ -91,7 +100,7 @@ class TestDictList(TestCase):
self.list.append(obj2)
result = self.list.query("test1") # matches only test1
self.assertEqual(len(result), 1)
- self.assertEqual(result[0], self.obj)
+ self.assertEqual(result[0], self.obj)
result = self.list.query("test") # matches test1 and test2
self.assertEqual(len(result), 2)
|
test: ensure DictLists remain independent
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -13,7 +13,7 @@ setup(
keywords='reddit terminal praw curses',
packages=['rtv'],
include_package_data=True,
- install_requires=['praw>=2.1.6', 'six', 'requests', 'kitchen'],
+ install_requires=['praw>=3.1.0', 'six', 'requests', 'kitchen'],
entry_points={'console_scripts': ['rtv=rtv.__main__:main']},
classifiers=[
'Intended Audience :: End Users/Desktop',
|
Upped required PRAW version to 3.
|
py
|
diff --git a/fooster/web/query.py b/fooster/web/query.py
index <HASH>..<HASH> 100644
--- a/fooster/web/query.py
+++ b/fooster/web/query.py
@@ -7,7 +7,7 @@ from fooster import web
__all__ = ['regex', 'QueryMixIn', 'QueryHandler', 'new']
-regex = r'(?:\?(?P<query>[\w=&%.+]*))?'
+regex = r'(?:\?(?P<query>[\w&= !"#$%\'()*+,./:;<>?@[\\\]^`{|}~-]*))?'
class QueryMixIn:
|
update query regex to support more characters
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -3,7 +3,7 @@ import sys
from setuptools import setup, find_packages
NAME = 'django-debug-toolbar-vcs-info'
-VERSION = '1.1.0'
+VERSION = '1.2.0'
def read(filename):
@@ -19,14 +19,6 @@ def readlist(filename):
rows = [x.strip() for x in rows if x.strip()]
return list(rows)
-# if we are running on python 3, enable 2to3 and
-# let it use the custom fixers from the custom_fixers
-# package.
-extra = {}
-if sys.version_info >= (3, 0):
- extra.update(
- use_2to3=True,
- )
setup(
name=NAME,
@@ -69,5 +61,4 @@ setup(
install_requires=readlist('requirements.txt'),
test_suite='runtests.run_tests',
tests_require=readlist('requirements-test.txt'),
- **extra
)
|
Bump up version to <I>
|
py
|
diff --git a/plenum/config.py b/plenum/config.py
index <HASH>..<HASH> 100644
--- a/plenum/config.py
+++ b/plenum/config.py
@@ -195,10 +195,10 @@ CLIENT_REPLY_TIMEOUT = 15
CLIENT_MAX_RETRY_ACK = 5
CLIENT_MAX_RETRY_REPLY = 5
-VIEW_CHANGE_TIMEOUT = 60 # seconds
+VIEW_CHANGE_TIMEOUT = 600 # seconds
INSTANCE_CHANGE_TIMEOUT = 60
MAX_CATCHUPS_DONE_DURING_VIEW_CHANGE = 5
-MIN_TIMEOUT_CATCHUPS_DONE_DURING_VIEW_CHANGE = 15
+MIN_TIMEOUT_CATCHUPS_DONE_DURING_VIEW_CHANGE = 300
# permissions for keyring dirs/files
WALLET_DIR_MODE = 0o700 # drwx------
|
INDY-<I>: Increase viewchange/catchup timeout to 5 minutes
|
py
|
diff --git a/cwltool/main.py b/cwltool/main.py
index <HASH>..<HASH> 100755
--- a/cwltool/main.py
+++ b/cwltool/main.py
@@ -741,7 +741,7 @@ def main(argsl=None, # type: List[str]
make_tool_kwds["find_default_container"] = functools.partial(find_default_container, args)
tool = make_tool(document_loader, avsc_names, metadata, uri,
- makeTool, make_tool_kwds)
+ makeTool, make_tool_kwds)
if args.validate:
return 0
|
Fix whitespace deleted in previous rebase.
|
py
|
diff --git a/torf/_torrent.py b/torf/_torrent.py
index <HASH>..<HASH> 100644
--- a/torf/_torrent.py
+++ b/torf/_torrent.py
@@ -812,12 +812,9 @@ class Torrent():
remove_empty_file()
raise
else:
- fh = os.fdopen(fd, 'rb+')
- fh.truncate()
- fh.write(data)
- finally:
- if fh is not None:
- fh.close()
+ with os.fdopen(fd, 'rb+') as fh:
+ fh.truncate()
+ fh.write(data)
def magnet(self, name=True, size=True, trackers=True, tracker=False, validate=True):
"""
|
Use fdopen() as context manager
|
py
|
diff --git a/tests/integration/test_fs_checks.py b/tests/integration/test_fs_checks.py
index <HASH>..<HASH> 100644
--- a/tests/integration/test_fs_checks.py
+++ b/tests/integration/test_fs_checks.py
@@ -16,7 +16,6 @@
import pytest
import colin
-from colin.core.target import ImageTarget
@pytest.fixture()
@@ -50,7 +49,7 @@ def test_help_file_or_readme_ls(ruleset, target_ls):
def help_file_or_readme_test(ruleset, image, should_pass):
""" verify that help_file_or_readme check works well """
results = colin.run(target=image.name,
- target_type="image" if isinstance(image, ImageTarget) else "ostree",
+ target_type=image.target_type,
ruleset=ruleset, logging_level=10, pull=False)
assert results.ok
assert results.fail is not should_pass
|
Use target_type property for string representation of the target type
|
py
|
diff --git a/tests/test_funcs.py b/tests/test_funcs.py
index <HASH>..<HASH> 100644
--- a/tests/test_funcs.py
+++ b/tests/test_funcs.py
@@ -30,11 +30,17 @@ class FuncsTestCase(TestCaseWithData):
def _test_func(self, func, expected_value=NO_VALUE):
sql = 'SELECT %s AS value' % func.to_sql()
logging.info(sql)
- result = list(self.database.select(sql))
- logging.info('\t==> %s', result[0].value if result else '<empty>')
- if expected_value != NO_VALUE:
- self.assertEqual(result[0].value, expected_value)
- return result[0].value if result else None
+ try:
+ result = list(self.database.select(sql))
+ logging.info('\t==> %s', result[0].value if result else '<empty>')
+ if expected_value != NO_VALUE:
+ self.assertEqual(result[0].value, expected_value)
+ return result[0].value if result else None
+ except ServerError as e:
+ if 'Unknown function' in e.message:
+ logging.warning(e.message)
+ return # ignore functions that don't exist in the used ClickHouse version
+ raise
def _test_aggr(self, func, expected_value=NO_VALUE):
qs = Person.objects_in(self.database).aggregate(value=func)
|
Ignore functions that don't exist in the used ClickHouse version
|
py
|
diff --git a/sphviewer/Render.py b/sphviewer/Render.py
index <HASH>..<HASH> 100644
--- a/sphviewer/Render.py
+++ b/sphviewer/Render.py
@@ -102,8 +102,7 @@ class Render(object):
where xsize and ysize are the number of pixels of the image defined by the Camera.
"""
extent = self.Scene.get_extent()
- pixel_side = abs(extent[1]-extent[0])
- return self.__image / pixel_side**2
+ return self.__image
def get_max(self):
"""
|
Update Render.py removing the normalization by pixel size. It gives more problems than antiipated
|
py
|
diff --git a/conftest.py b/conftest.py
index <HASH>..<HASH> 100644
--- a/conftest.py
+++ b/conftest.py
@@ -1,8 +1,29 @@
+import sys
import platform
collect_ignore = ["hook-keyring.backend.py"]
-if platform.system() != 'Darwin':
- collect_ignore.append('keyring/backends/macOS/api.py')
+
+def macos_api_ignore():
+ """
+ Starting with macOS 11, the security API becomes
+ non-viable except on universal2 binaries.
+
+ Ref #525.
+ """
+
+ def make_ver(string):
+ return tuple(map(int, string.split('.')))
+
+ release, _, _ = platform.mac_ver()
+
+ return (
+ platform.system() != 'Darwin'
+ or make_ver(release) > (11,)
+ and sys.version_info < (3, 8, 7)
+ )
+
+
+collect_ignore.extend(['keyring/backends/macOS/api.py'] * macos_api_ignore())
collect_ignore.append('keyring/devpi_client.py')
|
An initial attempt to skip collection on macOS API module. Doesn't work because mac_ver doesn't work on newer macs with older Pythons. Ref #<I>.
|
py
|
diff --git a/src/you_get/extractor.py b/src/you_get/extractor.py
index <HASH>..<HASH> 100644
--- a/src/you_get/extractor.py
+++ b/src/you_get/extractor.py
@@ -196,7 +196,10 @@ class VideoExtractor():
else:
# Download stream with the best quality
from .processor.ffmpeg import has_ffmpeg_installed
- stream_id = self.streams_sorted[0]['id'] if 'id' in self.streams_sorted[0] else self.streams_sorted[0]['itag']
+ if self.streams_sorted:
+ stream_id = self.streams_sorted[0]['id'] if 'id' in self.streams_sorted[0] else self.streams_sorted[0]['itag']
+ else:
+ stream_id = list(self.dash_streams)[-1]
if 'index' not in kwargs:
self.p(stream_id)
|
[extractor] use best quality from dash_streams if streams_sorted is empty
|
py
|
diff --git a/raiden/api/rest.py b/raiden/api/rest.py
index <HASH>..<HASH> 100644
--- a/raiden/api/rest.py
+++ b/raiden/api/rest.py
@@ -204,11 +204,11 @@ class APIServer(object):
)
self.add_resource(
ConnectionsResource,
- '/connection/<hexaddress:token_address>'
+ '/connections/<hexaddress:token_address>'
)
self.add_resource(
ConnectionManagersResource,
- '/connection'
+ '/connections'
)
def _serve_webui(self, file='index.html'):
|
Rename /connection endpoint to /connections
|
py
|
diff --git a/librsync/__init__.py b/librsync/__init__.py
index <HASH>..<HASH> 100644
--- a/librsync/__init__.py
+++ b/librsync/__init__.py
@@ -71,7 +71,8 @@ def _execute(job, f, o=None):
elif result != RS_BLOCKED:
# TODO: I don't think error reporting works properly.
raise LibRsyncError(result)
- o.seek(0)
+ if o:
+ o.seek(0)
return o
|
`o` is optional. It only needs to be rewound if provided.
|
py
|
diff --git a/tornado_eventsource/__init__.py b/tornado_eventsource/__init__.py
index <HASH>..<HASH> 100644
--- a/tornado_eventsource/__init__.py
+++ b/tornado_eventsource/__init__.py
@@ -1,4 +1,4 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
-__version__ = '1.0.0rc1'
+__version__ = '1.0.0rc2'
|
Bump to <I>rc2
|
py
|
diff --git a/peewee.py b/peewee.py
index <HASH>..<HASH> 100644
--- a/peewee.py
+++ b/peewee.py
@@ -4092,7 +4092,7 @@ class ForeignKeyField(Field):
def db_value(self, value):
if isinstance(value, self.rel_model):
- value = value._get_pk_value()
+ value = value.get_id()
return self.rel_field.db_value(value)
def python_value(self, value):
|
Fix method call mentioned in #<I>
|
py
|
diff --git a/fbchat/_client.py b/fbchat/_client.py
index <HASH>..<HASH> 100644
--- a/fbchat/_client.py
+++ b/fbchat/_client.py
@@ -73,8 +73,8 @@ class Client(object):
self.seq = "0"
# See `createPoll` for the reason for using `OrderedDict` here
self.payloadDefault = OrderedDict()
- self.default_thread_id = None
- self.default_thread_type = None
+ self._default_thread_id = None
+ self._default_thread_type = None
self.req_url = ReqUrl()
self._markAlive = True
self._buddylist = dict()
@@ -539,8 +539,8 @@ class Client(object):
:rtype: tuple
"""
if given_thread_id is None:
- if self.default_thread_id is not None:
- return self.default_thread_id, self.default_thread_type
+ if self._default_thread_id is not None:
+ return self._default_thread_id, self._default_thread_type
else:
raise ValueError("Thread ID is not set")
else:
@@ -554,8 +554,8 @@ class Client(object):
:param thread_type: See :ref:`intro_threads`
:type thread_type: models.ThreadType
"""
- self.default_thread_id = thread_id
- self.default_thread_type = thread_type
+ self._default_thread_id = thread_id
+ self._default_thread_type = thread_type
def resetDefaultThread(self):
"""Resets default thread"""
|
Privatize default_thread_X client variables We have a setter method for them, so there should be no need to access these directly!
|
py
|
diff --git a/moto/apigateway/models.py b/moto/apigateway/models.py
index <HASH>..<HASH> 100644
--- a/moto/apigateway/models.py
+++ b/moto/apigateway/models.py
@@ -317,16 +317,13 @@ class RestAPI(object):
# TODO deal with no matching resource
def resource_callback(self, request):
- headers = request.headers
-
- path = request.path if hasattr(request, 'path') else request.path_url
- path_after_stage_name = '/'.join(path.split("/")[2:])
+ path_after_stage_name = '/'.join(request.path_url.split("/")[2:])
if not path_after_stage_name:
path_after_stage_name = '/'
resource = self.get_resource_for_path(path_after_stage_name)
status_code, response = resource.get_response(request)
- return status_code, headers, response
+ return status_code, {}, response
def update_integration_mocks(self, stage_name):
stage_url = STAGE_URL.format(api_id=self.id, region_name=self.region_name, stage_name=stage_name)
|
Cleanup apigateway callback.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,7 @@ setup(
tests_require=['Nose'],
url='https://github.com/erikrose/blessings',
include_package_data=True,
- classifiers = [
+ classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Environment :: Console',
@@ -28,5 +28,6 @@ setup(
'Topic :: Software Development :: User Interfaces',
'Topic :: Terminals'
],
+ keywords=['terminal', 'tty', 'curses', 'formatting'],
**extra_setup
)
|
Add some keywords to improve PyPI search ranking.
|
py
|
diff --git a/py3status/__init__.py b/py3status/__init__.py
index <HASH>..<HASH> 100755
--- a/py3status/__init__.py
+++ b/py3status/__init__.py
@@ -158,7 +158,7 @@ class I3status(Thread):
if cleanup:
valid_config_params = [_ for _ in self.i3status_module_names
if _ not in
- ['cpu_usage', 'ddate', 'load', 'time']]
+ ['cpu_usage', 'ddate', 'ipv6', 'load', 'time']]
else:
valid_config_params = self.i3status_module_names + [
'general', 'order'
|
respect ordering of the ipv6 i3status module even on empty configuration, fix #<I> as reported by @nazco
|
py
|
diff --git a/emma2/msm/analysis/dense/pcca.py b/emma2/msm/analysis/dense/pcca.py
index <HASH>..<HASH> 100644
--- a/emma2/msm/analysis/dense/pcca.py
+++ b/emma2/msm/analysis/dense/pcca.py
@@ -10,7 +10,11 @@ import decomposition
import numpy
def pcca(T, n):
- eigenvalues,left_eigenvectors,right_eigenvectors = decomposition.rdl_decomposition(T, n)
+ # eigenvalues,left_eigenvectors,right_eigenvectors = decomposition.rdl_decomposition(T, n)
+ R, D, L=decomposition.rdl_decomposition(T, n)
+ eigenvalues=numpy.diagonal(D)
+ left_eigenvectors=numpy.transpose(L)
+ right_eigenvectors=R
# TODO: complex warning maybe?
right_eigenvectors = numpy.real(right_eigenvectors)
@@ -22,4 +26,4 @@ def pcca(T, n):
memberships = numpy.dot(right_eigenvectors[:,:], rot_matrix)
- return memberships
\ No newline at end of file
+ return memberships
|
[msm/analysis] Adapted the rdl_decomposition call so that it respects the new order and type of the return arguments
|
py
|
diff --git a/holoviews/plotting/renderer.py b/holoviews/plotting/renderer.py
index <HASH>..<HASH> 100644
--- a/holoviews/plotting/renderer.py
+++ b/holoviews/plotting/renderer.py
@@ -363,7 +363,8 @@ class Renderer(Exporter):
try:
plotclass = Store.registry[cls.backend][element_type]
except KeyError:
- raise Exception("No corresponding plot type found for %r" % type(obj))
+ raise SkipRendering("No plotting class for {0} "
+ "found".format(element_type.__name__))
return plotclass
|
Skip rendering when no matching plotting class is found
|
py
|
diff --git a/python_utils/logger.py b/python_utils/logger.py
index <HASH>..<HASH> 100644
--- a/python_utils/logger.py
+++ b/python_utils/logger.py
@@ -23,7 +23,7 @@ class Logged(object):
'''
def __new__(cls, *args, **kwargs):
cls.logger = logging.getLogger(
- cls.__get_name(__name__, cls.__class__.__name__))
+ cls.__get_name(cls.__module__, cls.__name__))
return super(Logged, cls).__new__(cls)
@classmethod
|
improved logged module to include the module name when logging
|
py
|
diff --git a/tests/admin/test_simple.py b/tests/admin/test_simple.py
index <HASH>..<HASH> 100644
--- a/tests/admin/test_simple.py
+++ b/tests/admin/test_simple.py
@@ -51,7 +51,7 @@ class AdminSimpleSet(CouchbaseTestCase):
def test_bad_auth(self):
self.assertRaises(AuthError, Admin,
- 'baduser', 'badpass')
+ 'baduser', 'badpass', host=self.host)
def test_bad_host(self):
self.assertRaises(ConnectError, Admin,
|
Pass correct hostname for Admin to get AuthError If the cluster was not 'localhost', a ConnectError was thrown instead Change-Id: I<I>fd<I>a<I>baa<I>b<I>f<I>d7cd5d<I> Reviewed-on: <URL>
|
py
|
diff --git a/ezibpy/ezibpy.py b/ezibpy/ezibpy.py
index <HASH>..<HASH> 100644
--- a/ezibpy/ezibpy.py
+++ b/ezibpy/ezibpy.py
@@ -998,7 +998,7 @@ class ezIBpy():
# -----------------------------------------
def createTriggerableTrailingStop(self, symbol, quantity=1,
triggerPrice=0, trailPercent=100., trailAmount=0.,
- parentId=0, stopOrderId=None, ticksize=None):
+ parentId=0, stopOrderId=None, **kwargs):
""" adds order to triggerable list """
ticksize = self.contractDetails(symbol)["m_minTick"]
@@ -1017,7 +1017,7 @@ class ezIBpy():
# -----------------------------------------
def registerTrailingStop(self, tickerId, orderId=0, quantity=1,
- lastPrice=0, trailPercent=100., trailAmount=0., parentId=0, ticksize=None):
+ lastPrice=0, trailPercent=100., trailAmount=0., parentId=0, **kwargs):
""" adds trailing stop to monitor list """
ticksize = self.contractDetails(tickerId)["m_minTick"]
|
trailing stop uses ticksize from contractDetails
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.