diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
---|---|---|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -18,6 +18,7 @@ setup(
url="https://github.com/czepluch/pysecp256k1",
license="MIT",
packages=find_packages(exclude=["_cffi_build", "_cffi_build/*"]),
+ package_data={'': ['libsecp256k1.so']},
# ext_modules=[sha3],
install_requires=["cffi>=1.2.1"],
setup_requires=["cffi>=1.2.1"],
|
added .so file as data to setup.py to include it in build. tox now pass
|
py
|
diff --git a/wakeonlan.py b/wakeonlan.py
index <HASH>..<HASH> 100755
--- a/wakeonlan.py
+++ b/wakeonlan.py
@@ -29,12 +29,10 @@ def create_magic_packet(macaddress):
magic packet.
"""
- if len(macaddress) == 12:
- pass
- elif len(macaddress) == 17:
+ if len(macaddress) == 17:
sep = macaddress[2]
macaddress = macaddress.replace(sep, '')
- else:
+ elif len(macaddress) != 12:
raise ValueError('Incorrect MAC address format')
# Pad the synchronization stream
@@ -63,16 +61,13 @@ def send_magic_packet(*macs, **kwargs):
(default 9)
"""
- packets = []
ip = kwargs.pop('ip_address', BROADCAST_IP)
port = kwargs.pop('port', DEFAULT_PORT)
for k in kwargs:
raise TypeError('send_magic_packet() got an unexpected keyword '
'argument {!r}'.format(k))
- for mac in macs:
- packet = create_magic_packet(mac)
- packets.append(packet)
+ packets = [create_magic_packet(mac) for mac in macs]
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
|
Cleanup mac formatting and packet creation logic
|
py
|
diff --git a/qimage2ndarray/qimageview_python.py b/qimage2ndarray/qimageview_python.py
index <HASH>..<HASH> 100644
--- a/qimage2ndarray/qimageview_python.py
+++ b/qimage2ndarray/qimageview_python.py
@@ -1,8 +1,8 @@
import numpy as np
from qimage2ndarray.dynqt import qt, QtGui
-def PyQt4_data(image):
- # PyQt4's QImage.bits() returns a sip.voidptr that supports
+def PyQt_data(image):
+ # PyQt4/PyQt5's QImage.bits() returns a sip.voidptr that supports
# conversion to string via asstring(size) or getting its base
# address via int(...):
return (int(image.bits()), False)
@@ -22,7 +22,8 @@ def PySide_data(image):
return (int(ma.group(1), 16), False)
getdata = dict(
- PyQt4 = PyQt4_data,
+ PyQt4 = PyQt_data,
+ PyQt5 = PyQt_data,
PySide = PySide_data,
)[qt.name()]
|
make qimageview_python compatible with PyQt5
|
py
|
diff --git a/django_lab_members_site/settings.py b/django_lab_members_site/settings.py
index <HASH>..<HASH> 100644
--- a/django_lab_members_site/settings.py
+++ b/django_lab_members_site/settings.py
@@ -68,7 +68,7 @@ DATABASES = {
LANGUAGE_CODE = 'en-us'
-TIME_ZONE = 'UTC'
+TIME_ZONE = 'America/Los_Angeles'
USE_I18N = True
|
Change time zone to America/Los_Angeles
|
py
|
diff --git a/workshift/utils.py b/workshift/utils.py
index <HASH>..<HASH> 100644
--- a/workshift/utils.py
+++ b/workshift/utils.py
@@ -260,18 +260,16 @@ def past_sign_out(instance, moment=None):
cutoff_hours = timedelta(hours=instance.pool.sign_out_cutoff)
cutoff_time = start_datetime - cutoff_hours
+ # Let people sign out of shifts if they were assigned to them within the
+ # no-sign-out window (i.e. assigned on Monday, shift on Tuesday)
assigned_time = instance.logs.filter(
- person=instance.workshifter,
+ person=instance.workshifter or instance.liable,
entry_type=ShiftLogEntry.ASSIGNED,
+ entry_time__gte=cutoff_time,
)
- # Let people sign out of shifts if they were assigned to them within the
- # no-sign-out window (i.e. assigned on Monday, shift on Tuesday)
if assigned_time.count() > 0:
- assigned_time = assigned_time.latest("entry_time")
-
- if assigned_time.entry_time > cutoff_time:
- return False
+ return False
return moment > cutoff_time
|
Cleaned up assigned_time a bit more
|
py
|
diff --git a/dsari/render.py b/dsari/render.py
index <HASH>..<HASH> 100644
--- a/dsari/render.py
+++ b/dsari/render.py
@@ -73,6 +73,9 @@ def main(argv):
lh_console.setLevel(logging.INFO)
logger.addHandler(lh_console)
+ if not os.path.exists(os.path.join(config['data_dir'], 'dsari.sqlite3')):
+ return
+
if config['template_dir']:
loader = jinja2.ChoiceLoader(
jinja2.FileSystemLoader(config['template_dir']),
|
Exit render if the DB does not exist
|
py
|
diff --git a/test/unit/pipeline/test_pipeline.py b/test/unit/pipeline/test_pipeline.py
index <HASH>..<HASH> 100644
--- a/test/unit/pipeline/test_pipeline.py
+++ b/test/unit/pipeline/test_pipeline.py
@@ -157,7 +157,7 @@ class DummyPhasePositions(af.AbstractPhase):
self.phase_name = phase_name
self.phase_tag = ''
self.phase_path = phase_name
- self.optimizer = Optimizer()
+ self.optimizer = Optimizer(phase_name)
def run(self, positions, pixel_scale, results):
self.positions = positions
|
fixed mock phase and optimizer naming issues
|
py
|
diff --git a/spamc/client.py b/spamc/client.py
index <HASH>..<HASH> 100644
--- a/spamc/client.py
+++ b/spamc/client.py
@@ -127,7 +127,7 @@ class SpamC(object):
user=None,
timeout=None,
wait_tries=0.3,
- max_tries=3,
+ max_tries=5,
backend="thread",
gzip=None,
compress_level=6,
|
Increase default retries on connection errors
|
py
|
diff --git a/docs/source/conf.py b/docs/source/conf.py
index <HASH>..<HASH> 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -51,6 +51,11 @@ templates_path = ['_templates']
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
+source_parsers = {
+ '.md': 'recommonmark.parser.CommonMarkParser',
+}
+source_suffix = ['.rst', '.md']
+
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
|
doc: dev: include option for markdown in documentation
|
py
|
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python3
+#!/usr/bin/env python3.5
# -*- coding: utf-8 -*-
#
# Astrocats documentation build configuration file, created by
|
MAINT: sphinx stuff
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ with open('README.rst') as file_object:
description = file_object.read()
setup(name='travis-encrypt',
- version='1.2.3',
+ version='1.3.0',
author='Mandeep',
author_email='[email protected]',
url='https://github.com/mandeep/Travis-Encrypt',
|
Increment minor version to <I>
|
py
|
diff --git a/glances/core/glances_monitor_list.py b/glances/core/glances_monitor_list.py
index <HASH>..<HASH> 100644
--- a/glances/core/glances_monitor_list.py
+++ b/glances/core/glances_monitor_list.py
@@ -141,7 +141,7 @@ class MonitorList(object):
except Exception:
self.__monitor_list[i]['result'] = 'Cannot execute command'
# Only save the first line
- self.__monitor_list[i]['result'] = self.__monitor_list[i]['result'].split('\n')[0]
+ self.__monitor_list[i]['result'] = self.__monitor_list[i]['result'].decode('utf-8').split('\n')[0]
if self.command(i) is None or self.__monitor_list[i]['result'] == '':
# If there is no command specified in the conf file
|
Correct an issue with Python <I> and monitored list issue
|
py
|
diff --git a/examples/hhl.py b/examples/hhl.py
index <HASH>..<HASH> 100644
--- a/examples/hhl.py
+++ b/examples/hhl.py
@@ -35,7 +35,7 @@ The result is good if the register size is large enough such that for every pair
the ratio can be approximated by a pair of possible register values. Let s be the scaling factor
from possible register values to eigenvalues. One way to set t is
-t = 2π/sN
+t = 2π/(sN)
For arbitrary matrices, because properties of their eigenvalues are typically unknown, parameters C
and t are fine-tuned based on their condition number.
@@ -148,7 +148,7 @@ class PhaseKickback(cirq.Gate):
class EigenRotation(cirq.Gate):
- """Perform the of the ancilla equivalent to divison of the memory by eigenvalues of matrix.
+ """Perform a rotation on an ancilla equivalent to division by eigenvalues of a matrix.
EigenRotation performs the set of rotation on the ancilla qubit equivalent to division on the
memory register by each eigenvalue of the matrix. The last qubit is the ancilla qubit; all
|
Minor (cosmetic changes) (#<I>) Added a parenthesis I think is required. Fixed a sentence, I think the word "rotation" was missing.
|
py
|
diff --git a/dipper/sources/BioGrid.py b/dipper/sources/BioGrid.py
index <HASH>..<HASH> 100644
--- a/dipper/sources/BioGrid.py
+++ b/dipper/sources/BioGrid.py
@@ -1,7 +1,7 @@
import os
import logging
import re
-from typing import Union
+from typing import Union, Optional
from zipfile import ZipFile
@@ -207,7 +207,7 @@ class BioGrid(Source):
return
- def _interactor_to_gene_curie(self, interactor: str) -> Union[str, None]:
+ def _interactor_to_gene_curie(self, interactor: str) -> Optional[str]:
"""Turn iteractor id like 'entrez gene/locuslink:3645446' or biogrid:12345
into a gene CURIE, like NCBIGene:3645446 or BIOGRID:12345
|
Change Union[str, None] to Optional[str]
|
py
|
diff --git a/pyrap_functionals/trunk/pyrap_functionals/functional.py b/pyrap_functionals/trunk/pyrap_functionals/functional.py
index <HASH>..<HASH> 100644
--- a/pyrap_functionals/trunk/pyrap_functionals/functional.py
+++ b/pyrap_functionals/trunk/pyrap_functionals/functional.py
@@ -18,7 +18,7 @@ class functional(_functional):
if isinstance(mode, dict):
d['mode'] = mode
_functional.__init__(self, d, dtype)
- if isinstance(params, list) or isinstance(params, tuple):
+ if hasattr(params, "__len__")
if len(params) == 0:
pass
elif len(params) == self.npar():
|
accept 'anything' which has a length. This will enable handling of arrays
|
py
|
diff --git a/ryu/tests/integrated/common/docker_base.py b/ryu/tests/integrated/common/docker_base.py
index <HASH>..<HASH> 100644
--- a/ryu/tests/integrated/common/docker_base.py
+++ b/ryu/tests/integrated/common/docker_base.py
@@ -133,7 +133,7 @@ class Command(object):
if out.returncode == 0:
return out
LOG.error(out.stderr)
- if try_times + 1 >= try_times:
+ if i + 1 >= try_times:
break
time.sleep(interval)
raise CommandError(out)
|
scenario test: Fix the wrong retry check in command execution
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ from setuptools import setup, find_packages
setup(
name='django-disqus',
- version='0.3.5beta6',
+ version='0.3.5beta7',
description='Export existing comments and integrate DISQUS into your Django website',
author='Arthur Koziel',
author_email='[email protected]',
|
Version bump to <I>beta7
|
py
|
diff --git a/backtrader/resamplerfilter.py b/backtrader/resamplerfilter.py
index <HASH>..<HASH> 100644
--- a/backtrader/resamplerfilter.py
+++ b/backtrader/resamplerfilter.py
@@ -162,8 +162,8 @@ class _BaseResampler(with_metaclass(metabase.MetaParams, object)):
return point
def _barover_subdays(self, data):
- # Put session end in context of current datetime
- sessend = data.datetime.tm2dtime(data.sessionend)
+ # Put session end in context of last bar datetime
+ sessend = int(self.bar.datetime) + data.sessionend
if data.datetime[0] > sessend:
# Next session is on (defaults to next day)
|
Fixes #<I> by correctly calculating when the current session ends for the last bar (when next session data is already in)
|
py
|
diff --git a/lambda_uploader/shell.py b/lambda_uploader/shell.py
index <HASH>..<HASH> 100644
--- a/lambda_uploader/shell.py
+++ b/lambda_uploader/shell.py
@@ -64,7 +64,11 @@ def _execute(args):
package.extra_file(p)
if cfg.function_path:
pth = cfg.function_path
- pkg = package.build_package(pth, cfg.requirements,
+
+ requirements = cfg.requirements
+ if args.requirements:
+ requirements = path.abspath(args.requirements)
+ pkg = package.build_package(pth, requirements,
venv, cfg.ignore)
if not args.no_clean:
@@ -133,6 +137,8 @@ def main(arv=None):
'can be set with $LAMBDA_UPLOADER_ROLE'))
parser.add_argument('--profile', dest='profile',
help='specify AWS cli profile')
+ parser.add_argument('--requirements', '-r', dest='requirements',
+ help='specify a requirements.txt file')
alias_help = 'alias for published version (WILL SET THE PUBLISH FLAG)'
parser.add_argument('--alias', '-a', dest='alias',
default=None, help=alias_help)
|
Added CLI option to set a requirements file
|
py
|
diff --git a/cyphi/constants.py b/cyphi/constants.py
index <HASH>..<HASH> 100644
--- a/cyphi/constants.py
+++ b/cyphi/constants.py
@@ -10,5 +10,5 @@ DIRECTIONS = ('past', 'future')
# Directory for the persistent joblib Memory cache
CACHE_DIRECTORY = '__cyphi_cache__'
# The maximum percentage of RAM that CyPhi should use for caching.
-# Defaults to 75%.
-MAXMEM = 75
+# Defaults to 50%.
+MAXMEM = 50
|
Use <I>% of available memory
|
py
|
diff --git a/streamteam/inference/model.py b/streamteam/inference/model.py
index <HASH>..<HASH> 100644
--- a/streamteam/inference/model.py
+++ b/streamteam/inference/model.py
@@ -129,7 +129,8 @@ class EmceeModel(object):
ix1 = 0
for group_name,param_name,param in self._walk():
- val = p[ix1:ix1+param.size]
+ val = np.squeeze(p[ix1:ix1+param.size])
+
if group_name is None:
d[param_name] = val
else:
|
hack to squeze param val
|
py
|
diff --git a/scs_core/data/timedelta.py b/scs_core/data/timedelta.py
index <HASH>..<HASH> 100644
--- a/scs_core/data/timedelta.py
+++ b/scs_core/data/timedelta.py
@@ -8,14 +8,11 @@ A JSONable wrapper for timedelta.
import re
-from collections import OrderedDict
from datetime import timedelta
from scs_core.data.json import JSONable
-# TODO: check whether we need to deal with weeks.
-
# --------------------------------------------------------------------------------------------------------------------
class Timedelta(JSONable):
|
Upgraded Timedelta JSON.
|
py
|
diff --git a/pydbc/__init__.py b/pydbc/__init__.py
index <HASH>..<HASH> 100644
--- a/pydbc/__init__.py
+++ b/pydbc/__init__.py
@@ -45,7 +45,7 @@ class cursor():
def is_closed(self):
return self.impl == None
-class connection():
+class Connection():
def _assert_valid(self):
if self.impl is None:
raise Error("Connection already closed")
@@ -78,5 +78,5 @@ class connection():
@translate_exceptions
def connect(dsn):
"""Create ODBC connection"""
- return connection(intern.connect(dsn))
+ return Connection(intern.connect(dsn))
|
renamed class connection to Connection
|
py
|
diff --git a/src/python/grpcio/grpc/experimental/aio/_call.py b/src/python/grpcio/grpc/experimental/aio/_call.py
index <HASH>..<HASH> 100644
--- a/src/python/grpcio/grpc/experimental/aio/_call.py
+++ b/src/python/grpcio/grpc/experimental/aio/_call.py
@@ -15,7 +15,7 @@
import asyncio
from functools import partial
-from typing import AsyncIterable, List, Dict, Optional
+from typing import AsyncIterable, Dict, Optional
import grpc
from grpc import _common
|
Fix pylint issue
|
py
|
diff --git a/dipper/sources/ClinVarXML_alpha.py b/dipper/sources/ClinVarXML_alpha.py
index <HASH>..<HASH> 100755
--- a/dipper/sources/ClinVarXML_alpha.py
+++ b/dipper/sources/ClinVarXML_alpha.py
@@ -619,6 +619,12 @@ with gzip.open(FILENAME, 'rt') as fh:
_evidence_id,
'SEPIO:0000124',
'PMID:' + scv_citation_id)
+ # <:monarch_assoc><dc:source><PMID:scv_citation_id>
+ write_spo(
+ monarch_assoc,
+ 'dc:source',
+ 'PMID:' + scv_citation_id)
+
# <PMID:scv_citation_id><rdf:type><IAO:0000013>
write_spo(
'PMID:' + scv_citation_id,
|
also attach source to association per <I> cmap
|
py
|
diff --git a/tests/graphcut_/__init__.py b/tests/graphcut_/__init__.py
index <HASH>..<HASH> 100644
--- a/tests/graphcut_/__init__.py
+++ b/tests/graphcut_/__init__.py
@@ -1,4 +1,3 @@
-# !TODO: Update these imports
from cut import TestCut
from graph import TestGraph
from generate import TestGenerate
|
Removed obsolete todo comment.
|
py
|
diff --git a/src/fonduer/utils/data_model_utils/structural.py b/src/fonduer/utils/data_model_utils/structural.py
index <HASH>..<HASH> 100644
--- a/src/fonduer/utils/data_model_utils/structural.py
+++ b/src/fonduer/utils/data_model_utils/structural.py
@@ -40,7 +40,7 @@ def get_attributes(mention):
return span.sentence.html_attrs
[email protected]_cache(maxsize=256)
[email protected]_cache(maxsize=16)
def _get_etree_for_text(text):
return etree.ElementTree(fromstring(text))
|
Lowered caching max size, considering candidates are ordered by document during featurization and multiprocess parallelization
|
py
|
diff --git a/dojo/anomaly/algorithms.py b/dojo/anomaly/algorithms.py
index <HASH>..<HASH> 100644
--- a/dojo/anomaly/algorithms.py
+++ b/dojo/anomaly/algorithms.py
@@ -19,7 +19,8 @@ class GaussianDist(BaseModel):
Parameters:
-----------
- None
+ multi : boolean, whether to model Multivariate Gaussian Distribution
+ or a univariate one
"""
|
[Update-Doc] Cover multi gaussian with __doc__
|
py
|
diff --git a/prospector/profiles/profile.py b/prospector/profiles/profile.py
index <HASH>..<HASH> 100644
--- a/prospector/profiles/profile.py
+++ b/prospector/profiles/profile.py
@@ -81,7 +81,7 @@ def parse_profile(name, contents):
if name.endswith('.yaml'):
# this was a full path
name = os.path.splitext(os.path.basename(name))[0]
- data = yaml.load(contents)
+ data = yaml.safe_load(contents)
if data is None:
# this happens if a completely empty YAML file is passed in to
# parse_profile, for example
|
Updating to use yaml.safe_load for profiles (just in case)
|
py
|
diff --git a/asphalt/core/context.py b/asphalt/core/context.py
index <HASH>..<HASH> 100644
--- a/asphalt/core/context.py
+++ b/asphalt/core/context.py
@@ -117,7 +117,7 @@ class Context(EventSource):
call arguments
"""
- def __init__(self, parent: 'Context'=None, default_timeout: int=10):
+ def __init__(self, parent: 'Context'=None, default_timeout: int=5):
super().__init__()
self._register_topics({
'finished': ContextFinishEvent,
|
Changed the default timeout for resource requests to 5 seconds
|
py
|
diff --git a/bf/image.py b/bf/image.py
index <HASH>..<HASH> 100644
--- a/bf/image.py
+++ b/bf/image.py
@@ -81,3 +81,5 @@ class Image(File):
if o!='': print(o)
if DEBUG==True: print('\n', outfn, '\n ', w, h, w*h, maxpixels, downratio, geom_arg)
+ return outfn
+
|
nice to know where the image is going
|
py
|
diff --git a/py/error.py b/py/error.py
index <HASH>..<HASH> 100644
--- a/py/error.py
+++ b/py/error.py
@@ -74,7 +74,7 @@ class ErrorMaker(object):
cls = self._geterrnoclass(errno)
else:
try:
- cls = self._geterrnoclass(_winerrnomap[eno])
+ cls = self._geterrnoclass(_winerrnomap[errno])
except KeyError:
raise value
raise cls("%s%r" % (func.__name__, args))
|
Fixed a typo in error.py causing it to fail on Windows. --HG-- branch : trunk
|
py
|
diff --git a/grimoire_elk/ocean/github.py b/grimoire_elk/ocean/github.py
index <HASH>..<HASH> 100644
--- a/grimoire_elk/ocean/github.py
+++ b/grimoire_elk/ocean/github.py
@@ -39,29 +39,29 @@ class Mapping(BaseMapping):
if es_major != '2':
mapping = '''
- {
- "dynamic":true,
+ {
+ "dynamic":true,
+ "properties": {
+ "data": {
"properties": {
- "data": {
+ "comments_data": {
+ "dynamic":false,
"properties": {
- "comments_data": {
- "dynamic":false,
- "properties": {
- "body": {
- "type": "text",
- "index": true
- }
- }
- },
"body": {
"type": "text",
"index": true
}
}
+ },
+ "body": {
+ "type": "text",
+ "index": true
}
}
+ }
}
- '''
+ }
+ '''
else:
mapping = '''
{
|
[ocean] Fix ES <I>.x mappings for GitHub ocean backend This patch fixes the mappings for GitHub, which now work also with ES <I>.x
|
py
|
diff --git a/jaraco/filesystem/__init__.py b/jaraco/filesystem/__init__.py
index <HASH>..<HASH> 100644
--- a/jaraco/filesystem/__init__.py
+++ b/jaraco/filesystem/__init__.py
@@ -11,7 +11,12 @@ __version__ = '$Rev$'[6:-2]
__svnauthor__ = '$Author$'[9:-2]
__date__ = '$Date$'[7:-2]
-import os, itertools
+import os
+import itertools
+import calendar
+import logging
+
+log = logging.getLogger(__name__)
def GetUniquePathname(path, root = ''):
"""Return a pathname possibly with a number appended to it so that it is
@@ -35,4 +40,13 @@ def __splitext__(filepath):
if os.path.isdir(filepath):
return filepath, ''
else:
- return os.path.splitext(filepath)
\ No newline at end of file
+ return os.path.splitext(filepath)
+
+def set_time(filename, mod_time):
+ """
+ Set the modified time of a file
+ """
+ log.debug('Setting modified time to %s', mod_time)
+ mtime = calendar.timegm(mod_time.utctimetuple())
+ atime = os.stat(filename).st_atime
+ os.utime(filename, (atime, mtime))
|
Added set_time to update the time of a file
|
py
|
diff --git a/patroni/postgresql.py b/patroni/postgresql.py
index <HASH>..<HASH> 100644
--- a/patroni/postgresql.py
+++ b/patroni/postgresql.py
@@ -1733,7 +1733,7 @@ $$""".format(name, ' '.join(options)), name, password, password)
if sync_state == 'potential' and app_name == current:
# Prefer current even if not the best one any more to avoid indecisivness and spurious swaps.
return current, False
- if sync_state == 'async':
+ if sync_state in ('async', 'potential'):
candidates.append(app_name)
if candidates:
|
Search new sync candidate amoung potential and async standbys (#<I>) In synchronos_mode_strict we put '*' into synchronos_standby_names, what makes one connection 'sync' and other connections 'potential'. The code picking up the correct sync standby didn't consider 'potential' as a good candidate. Fixes: <URL>
|
py
|
diff --git a/pmagpy/pmag.py b/pmagpy/pmag.py
index <HASH>..<HASH> 100755
--- a/pmagpy/pmag.py
+++ b/pmagpy/pmag.py
@@ -8766,7 +8766,14 @@ def measurements_methods3(meas_data, noave):
NewSpecs[0]["software_packages"] = version_num
# just copy over the single record as is
SpecOuts.append(NewSpecs[0])
- return SpecOuts
+ # added to get rid of colons in experiment names
+ SpecOutsNoColons=[]
+ for rec in SpecOuts:
+ experiment=rec['experiment']
+ newex=experiment.replace(':','_')
+ rec['experiment']=newex
+ SpecOutsNoColons.append(rec)
+ return SpecOutsNoColons
def mw_measurements_methods(MagRecs):
@@ -11280,3 +11287,4 @@ def vocab_convert(vocab, standard, key=''):
def main():
print("Full PmagPy documentation is available at: https://earthref.org/PmagPy/cookbook/")
+
|
modified: ../../../pmagpy/pmag.py => replaced colons with _ in measurements_magic3
|
py
|
diff --git a/bibliopixel/util/log.py b/bibliopixel/util/log.py
index <HASH>..<HASH> 100644
--- a/bibliopixel/util/log.py
+++ b/bibliopixel/util/log.py
@@ -6,6 +6,9 @@ LOG_NAMES = {'frame': FRAME, 'debug': DEBUG, 'info': INFO, 'warning': WARNING,
'error': ERROR}
SORTED_NAMES = tuple(k for k, v in sorted(LOG_NAMES.items()))
+VERBOSE_FMT = '\
+%(asctime)s,%(msecs)d %(levelname)-7s [%(filename)s:%(lineno)d] %(message)s'
+DATE_FMT = '%d-%m-%Y:%H:%M:%S'
def add_arguments(parser):
@@ -18,7 +21,8 @@ def add_arguments(parser):
def apply_args(args):
if args.verbose and args.loglevel != 'frame':
- set_log_level('debug')
+ logging.basicConfig(
+ format=VERBOSE_FMT, datefmt=DATE_FMT, level=logging.DEBUG)
else:
set_log_level(args.loglevel)
|
Make verbose mode more complete
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,7 @@ os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-fontawesome',
- version='0.2.3',
+ version='0.2.4',
packages=['fontawesome'],
include_package_data=True,
license='BSD License',
|
version bump for pypi, which apparently no longer accept package deletion
|
py
|
diff --git a/docs/source/conf.py b/docs/source/conf.py
index <HASH>..<HASH> 100644
--- a/docs/source/conf.py
+++ b/docs/source/conf.py
@@ -198,3 +198,5 @@ intersphinx_mapping = {
"numpy": ("https://docs.scipy.org/doc/numpy", None),
"configobj": ("https://configobj.readthedocs.io/en/latest", None),
}
+
+linkcheck_ignore = ['https://github.com/psf/black.*']
|
This link is not broken, but sphinx thinks it is.
|
py
|
diff --git a/intake/catalog/base.py b/intake/catalog/base.py
index <HASH>..<HASH> 100644
--- a/intake/catalog/base.py
+++ b/intake/catalog/base.py
@@ -185,7 +185,7 @@ class Catalog(object):
return iter(self.get_catalogs()) if self._children else iter(self.get_entries())
def __dir__(self):
- return self.get_catalogs() or self.get_entries()
+ return self.get_catalogs() if self._children else self.get_entries()
def __getattr__(self, item):
return self.get_catalog(item) if self._children else self.get_entry(item)
|
Fix autocompletion of catalogs/entries
|
py
|
diff --git a/pulsar/net/client/std.py b/pulsar/net/client/std.py
index <HASH>..<HASH> 100644
--- a/pulsar/net/client/std.py
+++ b/pulsar/net/client/std.py
@@ -62,6 +62,7 @@ class HttpClientResponse(object):
status_code = None
url = None
HTTPError = HTTPError
+ URLError = URLError
def __str__(self):
if self.status_code:
@@ -115,6 +116,7 @@ class ResponseStd(HttpClientResponse):
class HttpClientHandler(object):
'''Http client handler.'''
+ HTTPError = HTTPError
URLError = URLError
DEFAULT_HEADERS = [('User-agent', pulsar.SERVER_SOFTWARE),
('Connection', 'Keep-Alive')]
|
added rerrors classes to http clients
|
py
|
diff --git a/git/test/test_remote.py b/git/test/test_remote.py
index <HASH>..<HASH> 100644
--- a/git/test/test_remote.py
+++ b/git/test/test_remote.py
@@ -445,6 +445,19 @@ class TestRemote(TestBase):
origin = rw_repo.remote('origin')
assert origin == rw_repo.remotes.origin
+ # Verify we can handle prunes when fetching
+ # stderr lines look like this: x [deleted] (none) -> origin/experiment-2012
+ # These should just be skipped
+ num_deleted = False
+ for branch in remote_repo.heads:
+ if branch.name != 'master':
+ branch.delete(remote_repo, branch, force=True)
+ num_deleted += 1
+ # end
+ # end for each branch
+ assert num_deleted > 0
+ assert len(rw_repo.remotes.origin.fetch(prune=True)) == 1, "deleted everything but master"
+
@with_rw_repo('HEAD', bare=True)
def test_creation_and_removal(self, bare_rw_repo):
new_name = "test_new_one"
|
Added test to verify we can handle fetch prunes. They are just skipped. Fixes #<I>
|
py
|
diff --git a/opentrons_sdk/drivers/motor.py b/opentrons_sdk/drivers/motor.py
index <HASH>..<HASH> 100644
--- a/opentrons_sdk/drivers/motor.py
+++ b/opentrons_sdk/drivers/motor.py
@@ -7,6 +7,11 @@ import serial
from opentrons_sdk.util import log
+JSON_ERROR = None
+if sys.version_info > (3, 4):
+ JSON_ERROR = ValueError
+else:
+ JSON_ERROR = json.decoder.JSONDecodeError
class GCodeLogger():
@@ -344,7 +349,9 @@ class CNCDriver(object):
coords['target'][letter] = response_dict.get(letter.upper(),0)
- except (ValueError, json.decoder.JSONDecodeError) as e:
+ # TODO (andy): travis-ci is testing on both 3.4 and 3.5
+ # JSONDecodeError does not exist in 3.4 so the build breaks here
+ except JSON_ERROR as e:
log.debug("Serial", "Error parsing JSON string:")
log.debug("Serial", res)
|
json throws diff error depending on python version
|
py
|
diff --git a/tweepy/streaming.py b/tweepy/streaming.py
index <HASH>..<HASH> 100644
--- a/tweepy/streaming.py
+++ b/tweepy/streaming.py
@@ -223,7 +223,8 @@ class Stream(object):
# read the next twitter status object
if delimited_string.strip().isdigit():
next_status_obj = resp.read( int(delimited_string) )
- self._data(next_status_obj)
+ if self.running:
+ self._data(next_status_obj)
if resp.isclosed():
self.on_closed(resp)
|
Only call _data if stream.running=True Not sure why an extra delimiter int is spit out when you disconnect, but this should be enough of a bandaid.
|
py
|
diff --git a/tests/__init__.py b/tests/__init__.py
index <HASH>..<HASH> 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -1,5 +1,8 @@
+import os
from fake_webapp import start_server, stop_server
+TESTS_ROOT = os.path.abspath(os.path.dirname(__file__))
+
def setup():
start_server()
|
Created a help variable which stores the root path of tests
|
py
|
diff --git a/galpy/potential_src/TransientLogSpiralPotential.py b/galpy/potential_src/TransientLogSpiralPotential.py
index <HASH>..<HASH> 100644
--- a/galpy/potential_src/TransientLogSpiralPotential.py
+++ b/galpy/potential_src/TransientLogSpiralPotential.py
@@ -7,11 +7,15 @@ _degtorad= math.pi/180.
class TransientLogSpiralPotential(planarPotential):
"""Class that implements a steady-state spiral potential
- V(r,phi,t) = A(t)/alpha cos(alpha ln(r) - m(phi - Omegas*t-gamma))
+ .. math::
+
+ \\Phi(R,\\phi) = \\frac{\\mathrm{amp}(t)}{\\alpha}\\,\\cos\\left(\\alpha\,\ln R - m\\,(\\phi-\\Omega_s\\,t-\\gamma)\\right)
where
- A(t) = A_max exp(- [t-to]^2/sigma^2/2.)
+ .. math::
+
+ \\mathrm{amp}(t) = \\mathrm{amp}\\,\\times A\\,\\exp\\left(-\\frac{[t-t_0]^2}{2\\,\\sigma^2}\\right)
"""
def __init__(self,amp=1.,omegas=0.65,A=-0.035,
|
add pretty potential for TransientLogSpiralPotential, #<I>
|
py
|
diff --git a/ratings/models.py b/ratings/models.py
index <HASH>..<HASH> 100644
--- a/ratings/models.py
+++ b/ratings/models.py
@@ -5,7 +5,7 @@ from django.db import models, IntegrityError
from django.template.defaultfilters import slugify
from django.utils.hashcompat import sha_constructor
-from ratings.utils import get_content_object_field, is_gfk
+from ratings.utils import get_content_object_field, is_gfk, recommended_items
class RatedItemBase(models.Model):
score = models.FloatField(default=0, db_index=True)
@@ -197,6 +197,9 @@ class _RatingsDescriptor(object):
def similar_items(self, item):
return SimilarItem.objects.get_for_item(item)
+ def recommended_items(self, user):
+ return recommended_items(self.all(), user)
+
def order_by_rating(self, aggregator=models.Sum, descending=True):
ordering = descending and '-score' or 'score'
related_field = self.get_content_object_field()
|
Adding recommended items method to the RatingsDescriptor
|
py
|
diff --git a/azure-storage-blob/azure/storage/blob/_upload_chunking.py b/azure-storage-blob/azure/storage/blob/_upload_chunking.py
index <HASH>..<HASH> 100644
--- a/azure-storage-blob/azure/storage/blob/_upload_chunking.py
+++ b/azure-storage-blob/azure/storage/blob/_upload_chunking.py
@@ -290,17 +290,12 @@ class _BlockBlobChunkUploader(_BlobChunkUploader):
class _PageBlobChunkUploader(_BlobChunkUploader):
- EMPTY_PAGE = bytearray(512)
-
def _is_chunk_empty(self, chunk_data):
- # read until non-zero data is encountered
+ # read until non-zero byte is encountered
# if reached the end without returning, then chunk_data is all 0's
- data = BytesIO(chunk_data)
- page = data.read(512)
- while page != b'':
- if page != self.EMPTY_PAGE:
+ for each_byte in chunk_data:
+ if each_byte != 0:
return False
- page = data.read(512)
return True
def _upload_chunk(self, chunk_start, chunk_data):
|
Minor improvement to page blob upload optimization
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -36,4 +36,9 @@ setup(
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
),
+ project_urls={
+ 'Homepage': 'http://taskcluster.github.io/slugid.py',
+ 'Source': 'https://github.com/taskcluster/slugid.py',
+ },
+
)
|
Add Homepage and Source project URLs for PyPI
|
py
|
diff --git a/src/VertexPartition.py b/src/VertexPartition.py
index <HASH>..<HASH> 100644
--- a/src/VertexPartition.py
+++ b/src/VertexPartition.py
@@ -977,7 +977,7 @@ class CPMVertexPartition(LinearResolutionParameterVertexPartition):
types = list(types)
if set(types) != set([0, 1]):
- new_type = ig.UniqueIdGenerator()
+ new_type = _ig.UniqueIdGenerator()
types = [new_type[t] for t in types]
if set(types) != set([0, 1]):
|
Fixed bug in referencing igraph
|
py
|
diff --git a/tensor2tensor/rl/collect.py b/tensor2tensor/rl/collect.py
index <HASH>..<HASH> 100644
--- a/tensor2tensor/rl/collect.py
+++ b/tensor2tensor/rl/collect.py
@@ -264,6 +264,9 @@ def define_collect(hparams, scope, eval_phase,
new_memory = []
if hasattr(hparams, "effective_num_agents"):
effective_num_agents = hparams.effective_num_agents
+ assert hparams.epoch_length % effective_num_agents == 0, \
+ "The rollout of hparams.epoch_length will be distributed amongst" \
+ "effective_num_agents of agents"
new_epoch_length = int(hparams.epoch_length / effective_num_agents)
for mem, info in zip(memory, rollout_metadata):
shape, _, name = info
|
assert explaining effective_num_agents
|
py
|
diff --git a/advanced_descriptors/__init__.py b/advanced_descriptors/__init__.py
index <HASH>..<HASH> 100644
--- a/advanced_descriptors/__init__.py
+++ b/advanced_descriptors/__init__.py
@@ -18,7 +18,7 @@ from .advanced_property import AdvancedProperty
__all__ = ("SeparateClassMethod", "AdvancedProperty")
-__version__ = "1.0.6"
+__version__ = "2.0.0"
__author__ = "Alexey Stepanov"
__author_email__ = "[email protected]"
__maintainers__ = {
|
Bump to <I> (#<I>) Python <I>+ only Next will be updated py<I> branch
|
py
|
diff --git a/lazysignup/views.py b/lazysignup/views.py
index <HASH>..<HASH> 100644
--- a/lazysignup/views.py
+++ b/lazysignup/views.py
@@ -55,14 +55,15 @@ def convert(request, form_class=UserCreationForm,
return HttpResponse()
else:
return redirect(redirect_to)
+
+ # Invalid form, now check to see if is an ajax call
+ # TODO: Use JsonResponse
+ if request.is_ajax():
+ return HttpResponseBadRequest(content=str(form.errors))
else:
form = form_class()
- if request.is_ajax():
- return HttpResponseBadRequest(content=str(form.errors))
- else:
- return direct_to_template(
- request,
- 'lazysignup/convert.html',
- {'form': form, 'redirect_to': redirect_to},
- )
+ return direct_to_template(request, 'lazysignup/convert.html',{
+ 'form': form,
+ 'redirect_to': redirect_to
+ },)
|
Updating convert view, so it doesn't throw a HttpResponseBadRequest when is a GET request regardless of whether or not it's an AJAX call.
|
py
|
diff --git a/master/buildbot/test/unit/test_util_service.py b/master/buildbot/test/unit/test_util_service.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/test/unit/test_util_service.py
+++ b/master/buildbot/test/unit/test_util_service.py
@@ -112,7 +112,7 @@ class ClusteredService(unittest.TestCase):
def test_create_HasNoServiceIdYet(self):
# has no service id at first
- self.assertIsNone(self.svc.serviceid)
+ self.assertIdentical(self.svc.serviceid, None)
def test_start_UnclaimableSoNotActiveYet(self):
self.svc.startService()
|
don't use assertIsNone, which doesn't exist in py<I>
|
py
|
diff --git a/tests/xmi/test_xmi_deserialization.py b/tests/xmi/test_xmi_deserialization.py
index <HASH>..<HASH> 100644
--- a/tests/xmi/test_xmi_deserialization.py
+++ b/tests/xmi/test_xmi_deserialization.py
@@ -5,6 +5,8 @@ from pyecore.resources.xmi import XMIResource
from pyecore.resources.resource import HttpURI
from os import path
+
+# Modified to it does not need an internet connection anymore
def test_uri_http():
uri = HttpURI('https://api.genmymodel.com/projects/_L0eC8P1oEeW9zv77lynsJg/xmi')
assert uri.plain == 'https://api.genmymodel.com/projects/_L0eC8P1oEeW9zv77lynsJg/xmi'
@@ -13,8 +15,8 @@ def test_uri_http():
assert len(uri.segments) == 4
assert uri.last_segment == 'xmi'
assert uri.segments[0] == 'api.genmymodel.com'
- flike = uri.create_instream()
- assert flike.getcode() == 200
+ # flike = uri.create_instream()
+ # assert flike.getcode() == 200
with pytest.raises(NotImplementedError):
uri.create_outstream()
|
Remove the need for an internect connection for one test The test about HttpURI was also testing the 'in' and 'out' stream creation by reading a model directly in GenMyModel API. Even if this test is relevant, the test implementation is very restrictive as it forces the user to have an internet connection to be able to run it.
|
py
|
diff --git a/abydos/distance.py b/abydos/distance.py
index <HASH>..<HASH> 100644
--- a/abydos/distance.py
+++ b/abydos/distance.py
@@ -326,8 +326,8 @@ be greater than or equal to 0.')
if src == tar:
return 1.0
- elif len(src) == 0 and len(tar) == 0:
- return 1.0
+ elif len(src) == 0 or len(tar) == 0:
+ return 0.0
q_src = QGrams(src, qval)
q_tar = QGrams(tar, qval)
@@ -335,6 +335,9 @@ be greater than or equal to 0.')
q_tar_mag = q_tar.count()
q_intersection_mag = sum((q_src & q_tar).values())
+ if len(q_src) == 0 or len(q_tar) == 0:
+ return 0.0
+
if bias is None:
return q_intersection_mag / (q_intersection_mag + alpha *
(q_src_mag - q_intersection_mag)
@@ -380,7 +383,7 @@ def sim_dice(src, tar, qval=2):
For two sets X and Y, the Sørensen–Dice coefficient is
S(X,Y) = 2 * |X∩Y| / (|X| + |Y|)
This is identical to the Tanimoto similarity coefficient
- and the Tversky index for α = β = 1
+ and the Tversky index for α = β = 0.5
"""
return sim_tversky(src, tar, qval, 0.5, 0.5)
|
fixed some formulae to better deal with short terms
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -45,7 +45,7 @@ http://pythonhosted.org/pypng/
'Topic :: Multimedia :: Graphics',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python',
- 'Programming Language :: Python :: 2.3',
+ 'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
|
Change to Python <I> in metadata
|
py
|
diff --git a/bcbio/variation/gatk.py b/bcbio/variation/gatk.py
index <HASH>..<HASH> 100644
--- a/bcbio/variation/gatk.py
+++ b/bcbio/variation/gatk.py
@@ -90,12 +90,14 @@ def haplotype_caller(align_bams, items, ref_file, assoc_files,
with file_transaction(items[0], out_file) as tx_out_file:
params += ["-T", "HaplotypeCaller",
"-o", tx_out_file,
- "-ploidy", str(ploidy.get_ploidy(items, region)),
"--annotation", "ClippingRankSumTest",
"--annotation", "DepthPerSampleHC"]
# Enable hardware based optimizations in GATK 3.1+
if LooseVersion(broad_runner.gatk_major_version()) >= LooseVersion("3.1"):
params += ["--pair_hmm_implementation", "VECTOR_LOGLESS_CACHING"]
+ # Enable non-diploid calling in GATK 3.3+
+ if LooseVersion(broad_runner.gatk_major_version()) >= LooseVersion("3.3"):
+ params += ["-ploidy", str(ploidy.get_ploidy(items, region))]
if _joint_calling(items): # Prepare gVCFs if doing joint calling
params += ["--emitRefConfidence", "GVCF", "--variant_index_type", "LINEAR",
"--variant_index_parameter", "128000"]
|
Avoid passing ploidy unless using GATK <I> which supports ploidy-based calling. Thanks to Severine Catreux
|
py
|
diff --git a/plash/core.py b/plash/core.py
index <HASH>..<HASH> 100644
--- a/plash/core.py
+++ b/plash/core.py
@@ -291,17 +291,24 @@ def execute(
os.utime(join(layer), None)
if build_only:
- print('*** plash: Build is ready')
+ print('*** plash: Build is ready at: {}'.format(layers[-1]))
else:
mountpoint = mount_layers([join(i, 'payload') for i in layers], mkdtemp(dir=TMP_DIR))
+ os.chmod(mountpoint, 0o755) # that permission the root directory '/' needs
+ # subprocess.Popen(['chmod', '755', mountpoint])
last_layer = layers[-1]
# os.utime(join(last_layer, 'lastused'), times=None) # update the timestamp on this
prepare_rootfs(mountpoint)
- os.chmod(mountpoint, 755)
os.chroot(mountpoint)
os.chdir('/')
+
+ uid = os.environ.get('SUDO_UID')
+ if uid:
+ os.setgid(int(os.environ['SUDO_GID']))
+ os.setuid(int(uid))
+
with friendly_exception([FileNotFoundError]):
os.execvpe(command[0], command, extra_envs)
|
experimental setgid and setuid to user that called it
|
py
|
diff --git a/faker/providers/lorem/zh_TW/__init__.py b/faker/providers/lorem/zh_TW/__init__.py
index <HASH>..<HASH> 100644
--- a/faker/providers/lorem/zh_TW/__init__.py
+++ b/faker/providers/lorem/zh_TW/__init__.py
@@ -1,4 +1,4 @@
-import dragonmapper.# -*- coding: utf-8 -*-
+# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .. import Provider as LoremProvider
|
Removed line that was causing the build to fail.
|
py
|
diff --git a/plenum/test/test_node_connection.py b/plenum/test/test_node_connection.py
index <HASH>..<HASH> 100644
--- a/plenum/test/test_node_connection.py
+++ b/plenum/test/test_node_connection.py
@@ -48,6 +48,7 @@ def tdirAndLooper(nodeReg):
yield td, looper
[email protected](reason='INDY-109. Intermittent failures')
def testNodesConnectsWhenOneNodeIsLate(allPluginsPath, tdirAndLooper,
nodeReg):
tdir, looper = tdirAndLooper
|
INDY-<I> (#<I>)
|
py
|
diff --git a/simulation.py b/simulation.py
index <HASH>..<HASH> 100644
--- a/simulation.py
+++ b/simulation.py
@@ -489,4 +489,7 @@ def run(models, years=None):
for model_name in models:
print('Running model {}'.format(model_name))
model = get_model(model_name)
+ import time
+ t1 = time.time()
model(year=year)
+ print("Time to execute model = %.3fs" % (time.time()-t1))
|
changes to yamlmodelrunner for new dataset style
|
py
|
diff --git a/fireplace/cards/blackrock/adventure.py b/fireplace/cards/blackrock/adventure.py
index <HASH>..<HASH> 100644
--- a/fireplace/cards/blackrock/adventure.py
+++ b/fireplace/cards/blackrock/adventure.py
@@ -289,8 +289,9 @@ class BRMA04_3H:
# Chromatic Dragonkin
class BRMA12_8t:
- # That ID is... correct. What?
- events = Play(OPPONENT, SPELL).on(Buff(SELF, "GVG_100e"))
+ events = Play(OPPONENT, SPELL).on(Buff(SELF, "BRMA12_8te"))
+
+BRMA12_8te = buff(+2, +2)
# Son of the Flame
|
Fix Chromatic Dragonkin buff added in <I>
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -37,12 +37,14 @@ setup(
'drizzlepac': [
'pars/*',
'pars/hap_pars/*',
+ 'pars/hap_pars/mvm_parameters/*',
'pars/hap_pars/mvm_parameters/acs/hrc/*',
'pars/hap_pars/mvm_parameters/acs/sbc/*',
'pars/hap_pars/mvm_parameters/acs/wfc/*',
'pars/hap_pars/mvm_parameters/any/*',
'pars/hap_pars/mvm_parameters/wfc3/ir/*',
'pars/hap_pars/mvm_parameters/wfc3/uvis/*',
+ 'pars/hap_pars/svm_parameters/*',
'pars/hap_pars/svm_parameters/acs/hrc/*',
'pars/hap_pars/svm_parameters/acs/sbc/*',
'pars/hap_pars/svm_parameters/acs/wfc/*',
|
Fix setup.py to include json files which reside at the same level as [mvm|svm]_parameters. (#<I>)
|
py
|
diff --git a/raiden/app.py b/raiden/app.py
index <HASH>..<HASH> 100644
--- a/raiden/app.py
+++ b/raiden/app.py
@@ -142,6 +142,9 @@ def app(address,
config['port'] = listen_port
accmgr = AccountManager(keystore_path)
+ if not accmgr.accounts:
+ raise RuntimeError('No Ethereum accounts found in the user\'s system')
+
if not accmgr.address_in_keystore(address):
addresses = list(accmgr.accounts.keys())
formatted_addresses = [
|
Raise error if no account is found in the system
|
py
|
diff --git a/py/vttest/mysql_flavor.py b/py/vttest/mysql_flavor.py
index <HASH>..<HASH> 100644
--- a/py/vttest/mysql_flavor.py
+++ b/py/vttest/mysql_flavor.py
@@ -55,7 +55,7 @@ class MySQL56(MysqlFlavor):
def my_cnf(self):
files = [
os.path.join(vttop, "config/mycnf/default-fast.cnf"),
- os.path.join(vttop + "config/mycnf/master_mysql56.cnf"),
+ os.path.join(vttop, "config/mycnf/master_mysql56.cnf"),
]
return ":".join(files)
|
vttest: Fix MySQL<I> cnf file path.
|
py
|
diff --git a/minion/request.py b/minion/request.py
index <HASH>..<HASH> 100644
--- a/minion/request.py
+++ b/minion/request.py
@@ -49,7 +49,7 @@ class Manager(object):
"""
def __init__(self):
- self.requests = {}
+ self._requests = {}
def after_response(self, request, fn, *args, **kwargs):
"""
@@ -64,13 +64,13 @@ class Manager(object):
"""
- self.requests[request]["callbacks"].append((fn, args, kwargs))
+ self._requests[id(request)]["callbacks"].append((fn, args, kwargs))
def request_started(self, request):
- self.requests[request] = {"callbacks": [], "assets": {}}
+ self._requests[id(request)] = {"callbacks": [], "assets": {}}
def request_served(self, request, response):
- request_data = self.requests.pop(request)
+ request_data = self._requests.pop(id(request))
for callback, args, kwargs in request_data["callbacks"]:
callback_response = callback(response, *args, **kwargs)
if callback_response is not None:
|
Manager will change soon, but until then, avoid Python's annoying lack of frozendict by being lazy.
|
py
|
diff --git a/monasca_common/kafka/producer.py b/monasca_common/kafka/producer.py
index <HASH>..<HASH> 100644
--- a/monasca_common/kafka/producer.py
+++ b/monasca_common/kafka/producer.py
@@ -40,22 +40,13 @@ class KafkaProducer(object):
"""Takes messages and puts them on the supplied kafka topic
"""
- # Using a key producer to make sure we can distribute messages evenly
- # across all partitions. In the kafka-python library, as of version
- # 0.9.2, it doesn't support sending message batches for keyed
- # producers. Batching writes to kafka is important for performance so
- # we have to work around this limitation. Using the _next_partition
- # function allows us to get proper distribution and the speed of the
- # send_messages function.
-
if not isinstance(messages, list):
messages = [messages]
try:
if key is None:
- key = time.time() * 1000
- partition = self._producer._next_partition(topic, key)
- self._producer.send_messages(topic, partition, *messages)
+ key = int(time.time() * 1000)
+ self._producer.send_messages(topic, str(key), *messages)
except Exception:
log.exception('Error publishing to {} topic.'.format(topic))
raise
|
Convert partition key to str In kafka <I> the partition key needs to be a six.binary_type which is a str in Python <I> Change-Id: I<I>d<I>d<I>bfdafc<I>b<I>f3dca<I>
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
setup(
name='sheetsu',
- version='0.0.4',
+ version='0.0.5',
description='Sheetsu Python client',
url='http://github.com/andreffs18/sheetsu-python',
author='Andre Silva',
@@ -10,5 +10,6 @@ setup(
license='MIT',
keywords='sheetsu api client sdk spreadsheet',
packages=find_packages(),
- zip_safe=False
+ zip_safe=False,
+ install_requires=['requests']
)
|
Add required packages for lib to work and update version
|
py
|
diff --git a/openquake/calculators/tests/event_based_risk_test.py b/openquake/calculators/tests/event_based_risk_test.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/tests/event_based_risk_test.py
+++ b/openquake/calculators/tests/event_based_risk_test.py
@@ -162,9 +162,10 @@ class EventBasedRiskTestCase(CalculatorTestCase):
def test_case_4(self):
# Turkey with SHARE logic tree
self.run_calc(case_4.__file__, 'job.ini')
+ if os.environ.get('TRAVIS'):
+ raise unittest.SkipTest
[fname] = export(('avg_losses-stats', 'csv'), self.calc.datastore)
- self.assertEqualFiles('expected/avg_losses-mean.csv',
- fname, delta=1E-5)
+ self.assertEqualFiles('expected/avg_losses-mean.csv', fname)
fnames = export(('agg_loss_table', 'csv'), self.calc.datastore)
assert fnames, 'No agg_losses exported??'
|
Skipped test on travis [skip hazardlib]
|
py
|
diff --git a/mamba/example.py b/mamba/example.py
index <HASH>..<HASH> 100644
--- a/mamba/example.py
+++ b/mamba/example.py
@@ -43,7 +43,7 @@ class Example(object):
def _finish(self, reporter):
self._elapsed_time = datetime.utcnow() - self._begin
- if not self.error:
+ if not self.failed:
reporter.example_passed(self)
else:
reporter.example_failed(self)
|
Use failed property for checking spec failure
|
py
|
diff --git a/nanofilt/NanoFilt.py b/nanofilt/NanoFilt.py
index <HASH>..<HASH> 100644
--- a/nanofilt/NanoFilt.py
+++ b/nanofilt/NanoFilt.py
@@ -177,7 +177,6 @@ def filter_stream(fq, args):
Record has to be longer than args.length (default 1) after trimming
Use a faster silent quality_check if no filtering on quality is required
"""
- sys.stderr.write(str(args.quality))
if args.quality:
quality_check = ave_qual
else:
|
removing unnecessary stderr print no idea why that one was there
|
py
|
diff --git a/termtool.py b/termtool.py
index <HASH>..<HASH> 100644
--- a/termtool.py
+++ b/termtool.py
@@ -105,3 +105,6 @@ class Termtool(object):
return 1
return 0
+
+ def run(self):
+ sys.exit(self.main(sys.argv[1:]))
|
Add a run() for script running boilerplate
|
py
|
diff --git a/glooey/__init__.py b/glooey/__init__.py
index <HASH>..<HASH> 100644
--- a/glooey/__init__.py
+++ b/glooey/__init__.py
@@ -4,7 +4,7 @@
An object-oriented GUI library for pyglet.
"""
-__version__ = '0.3.0'
+__version__ = '0.3.1'
from .widget import *
from .root import *
|
<I> Automatically generated by python-semantic-release
|
py
|
diff --git a/grimoire/elk/elastic.py b/grimoire/elk/elastic.py
index <HASH>..<HASH> 100644
--- a/grimoire/elk/elastic.py
+++ b/grimoire/elk/elastic.py
@@ -121,7 +121,7 @@ class ElasticSearch(object):
return new_items
- def bulk_upload_sync(self, items, field_id, sync=False):
+ def bulk_upload_sync(self, items, field_id, sync=True):
''' Upload in controlled packs items to ES using bulk API
and wait until the items appears in searches '''
|
[Elastic] Wait in sync bulk upload to avoid using data not already indexed.
|
py
|
diff --git a/fmn/lib/models.py b/fmn/lib/models.py
index <HASH>..<HASH> 100644
--- a/fmn/lib/models.py
+++ b/fmn/lib/models.py
@@ -288,12 +288,13 @@ class Rule(BASE):
raise ValueError("%r is not a valid code_path" % code_path)
@classmethod
- def create_from_code_path(cls, session, valid_paths, code_path, **kw):
+ def create_from_code_path(cls, session, valid_paths, code_path,
+ negated=False, **kw):
# This will raise an exception if invalid
Rule.validate_code_path(valid_paths, code_path, **kw)
- filt = cls(code_path=code_path)
+ filt = cls(code_path=code_path, negated=negated)
filt.arguments = kw
session.add(filt)
|
Allow creating a rule already negated.
|
py
|
diff --git a/lifelines/fitters/__init__.py b/lifelines/fitters/__init__.py
index <HASH>..<HASH> 100644
--- a/lifelines/fitters/__init__.py
+++ b/lifelines/fitters/__init__.py
@@ -500,6 +500,8 @@ class ParametericUnivariateFitter(UnivariateFitter):
if results.success:
# pylint: disable=no-value-for-parameter
hessian_ = hessian(negative_log_likelihood)(results.x, Ts, E, entry, weights)
+ # see issue https://github.com/CamDavidsonPilon/lifelines/issues/801
+ hessian_ = (hessian_ + hessian_.T) / 2
return results.x, -results.fun * weights.sum(), hessian_ * weights.sum()
# convergence failed.
@@ -1508,6 +1510,8 @@ class ParametricRegressionFitter(BaseFitter):
sum_weights = weights.sum()
# pylint: disable=no-value-for-parameter
hessian_ = hessian(self._neg_likelihood_with_penalty_function)(results.x, Ts, E, weights, entries, Xs)
+ # See issue https://github.com/CamDavidsonPilon/lifelines/issues/801
+ hessian_ = (hessian_ + hessian_.T) / 2
return results.x, -sum_weights * results.fun, sum_weights * hessian_
raise utils.ConvergenceError(
|
make sure hessians are always sym
|
py
|
diff --git a/src/collectors/postgres/postgres.py b/src/collectors/postgres/postgres.py
index <HASH>..<HASH> 100644
--- a/src/collectors/postgres/postgres.py
+++ b/src/collectors/postgres/postgres.py
@@ -70,7 +70,7 @@ class PostgresqlCollector(diamond.collector.Collector):
for klass in metrics:
stat = klass(self.connections, underscore=self.config['underscore'])
stat.fetch()
- [self.publish(metric, value) for metric, value in stat]
+ [self.publish(metric, value) for metric, value in stat if value is not None]
# Cleanup
[conn.close() for conn in self.connections.itervalues()]
|
Ignore None stats from postgres
|
py
|
diff --git a/pyqode/core/modes/code_completion.py b/pyqode/core/modes/code_completion.py
index <HASH>..<HASH> 100644
--- a/pyqode/core/modes/code_completion.py
+++ b/pyqode/core/modes/code_completion.py
@@ -337,7 +337,9 @@ class CodeCompletionMode(Mode, QtCore.QObject):
self.__preload(code, self.editor.filePath, self.editor.fileEncoding)
def _onInstall(self, editor):
- get_server().signals.workCompleted.connect(self.__onWorkFinished)
+ srv = get_server()
+ if srv:
+ srv.signals.workCompleted.connect(self.__onWorkFinished)
self.__completer = QtGui.QCompleter([""], editor)
self.__completer.setCompletionMode(self.__completer.PopupCompletion)
self.__completer.activated.connect(self.__insertCompletion)
|
Fix issue when used from qt designer
|
py
|
diff --git a/third_party/tvcm/tvcm/browser_controller.py b/third_party/tvcm/tvcm/browser_controller.py
index <HASH>..<HASH> 100644
--- a/third_party/tvcm/tvcm/browser_controller.py
+++ b/third_party/tvcm/tvcm/browser_controller.py
@@ -121,6 +121,7 @@ class BrowserController(object):
except:
self._browser.Close()
+ raise
def NavigateToPath(self, path):
self._tab.Navigate(self._server.url + path)
|
Re-raise errors that happened during browser start after closing the browser
|
py
|
diff --git a/GPy/kern/_src/stationary.py b/GPy/kern/_src/stationary.py
index <HASH>..<HASH> 100644
--- a/GPy/kern/_src/stationary.py
+++ b/GPy/kern/_src/stationary.py
@@ -171,7 +171,8 @@ class Stationary(Kern):
#the lower memory way with a loop
ret = np.empty(X.shape, dtype=np.float64)
- [np.sum(tmp*(X[:,q][:,None]-X2[:,q][None,:]), axis=1, out=ret[:,q]) for q in xrange(self.input_dim)]
+ for q in xrange(self.input_dim):
+ np.sum(tmp*(X[:,q][:,None]-X2[:,q][None,:]), axis=1, out=ret[:,q])
ret /= self.lengthscale**2
return ret
|
for loop speedup in grdients X
|
py
|
diff --git a/py3status/py3.py b/py3status/py3.py
index <HASH>..<HASH> 100644
--- a/py3status/py3.py
+++ b/py3status/py3.py
@@ -1041,10 +1041,9 @@ class Py3:
"""
Return a string from a Composite.
"""
- if isinstance(format_string, Composite):
- return format_string.text()
- else:
+ if not isinstance(format_string, Composite):
return ''
+ return format_string.text()
def stop_sound(self):
"""
|
Reduce py3.py from 1 line
|
py
|
diff --git a/shapefile.py b/shapefile.py
index <HASH>..<HASH> 100644
--- a/shapefile.py
+++ b/shapefile.py
@@ -549,11 +549,13 @@ class Reader:
"""Returns all records in a dbf file."""
if not self.numRecords:
self.__dbfHeader()
+ records = []
f = self.__getFileObj(self.dbf)
f.seek(self.__dbfHeaderLength())
- flat = unpack(self.__recStruct.format * self.numRecords, f.read(self.__recStruct.size * self.numRecords))
- rowlen = len(self.fields) - 1
- records = list(izip(*(iter(flat),) * rowlen))
+ for i in range(self.numRecords):
+ r = self.__record()
+ if r:
+ records.append(r)
return records
def iterRecords(self):
|
Revert back to original records() method Fixes issue introduced in PR #<I>, see issue #<I>. Previously tried fixing it in PR #<I>, but reverting to original was better.
|
py
|
diff --git a/test/test_babel.py b/test/test_babel.py
index <HASH>..<HASH> 100644
--- a/test/test_babel.py
+++ b/test/test_babel.py
@@ -1164,7 +1164,7 @@ struct S2
t.parse()
self.assertEqual(
t.api.namespaces['ns1'].doc,
- 'This is a docstring for ns1.\n\nThis is another docstring for ns1.\n')
+ 'This is a docstring for ns1.\nThis is another docstring for ns1.\n')
# Test that namespaces without types or routes are deleted.
text = """\
|
Fix test broken by be9ba<I>.
|
py
|
diff --git a/spiketoolkit/preprocessing/normalize_by_quantile.py b/spiketoolkit/preprocessing/normalize_by_quantile.py
index <HASH>..<HASH> 100644
--- a/spiketoolkit/preprocessing/normalize_by_quantile.py
+++ b/spiketoolkit/preprocessing/normalize_by_quantile.py
@@ -7,9 +7,9 @@ class NormalizeByQuantileRecording(RecordingExtractor):
preprocessor_name = 'NormalizeByQuantileRecording'
installed = True # check at class level if installed or not
_gui_params = [
- {'name': 'scalar', 'type': 'float',
+ {'name': 'scale', 'type': 'float',
'title': "Scale for the output distribution"},
- {'name': 'offset', 'type': 'float',
+ {'name': 'median', 'type': 'float',
'title': "Median for the output distribution"},
{'name': 'q1', 'type': 'float',
'title': "Lower quantile used for measuring the scale"},
|
Update normalize_by_quantile.py
|
py
|
diff --git a/great_expectations/data_context/data_context.py b/great_expectations/data_context/data_context.py
index <HASH>..<HASH> 100644
--- a/great_expectations/data_context/data_context.py
+++ b/great_expectations/data_context/data_context.py
@@ -941,6 +941,7 @@ class BaseDataContext(object):
Returns:
True for Success and False for Failure.
"""
+ key = ExpectationSuiteIdentifier(expectation_suite_name=expectation_suite_name)
if not self._stores[self.expectations_store_name].has_key(key):
raise ge_exceptions.DataContextError(
"expectation_suite with name {} does not exist."
|
ayirpmissingkey adding line of missing key
|
py
|
diff --git a/galpy/df_src/streamgapdf.py b/galpy/df_src/streamgapdf.py
index <HASH>..<HASH> 100644
--- a/galpy/df_src/streamgapdf.py
+++ b/galpy/df_src/streamgapdf.py
@@ -143,7 +143,6 @@ class streamgapdf(galpy.df_src.streamdf.streamdf):
2015-06-22 - Written - Bovy (IAS)
"""
- print "NEED TO MAKE SURE THAT KICKS AREN'T APPLIED IF COMING FROM OTHER ARM"
if tdisrupt is None: tdisrupt= self._tdisrupt
dOmin= dangle/tdisrupt
# First determine delta angle_par at timpact
@@ -479,6 +478,9 @@ class streamgapdf(galpy.df_src.streamdf.streamdf):
self._gap_leading= True
else:
self._gap_leading= False
+ if (self._gap_leading and not self._leading) \
+ or (not self._gap_leading and self._leading):
+ raise ValueError('Modeling leading (trailing) impact for trailing (leading) arm; this is not allowed because it is nonsensical in this framework')
self._impact_angle= numpy.fabs(impact_angle)
self._gap_sigMeanSign= 1.
if self._gap_leading and self._progenitor_Omega_along_dOmega/self._sigMeanSign < 0.:
|
don't allow modeling a leading stream with a trailing impact and vice versa
|
py
|
diff --git a/django_q/tasks.py b/django_q/tasks.py
index <HASH>..<HASH> 100644
--- a/django_q/tasks.py
+++ b/django_q/tasks.py
@@ -456,7 +456,7 @@ class Chain(object):
Start queueing the chain to the worker cluster
:return: the chain's group id
"""
- self.group = async_chain(chain=self.chain.copy(), group=self.group, cached=self.cached, sync=self.sync,
+ self.group = async_chain(chain=self.chain[:], group=self.group, cached=self.cached, sync=self.sync,
broker=self.broker)
self.started = True
return self.group
|
Replaces list.copy with a slice for python <I>
|
py
|
diff --git a/tests/test_storages.py b/tests/test_storages.py
index <HASH>..<HASH> 100644
--- a/tests/test_storages.py
+++ b/tests/test_storages.py
@@ -13,6 +13,7 @@ from datetime import datetime
from django.core.files.base import File
from django.core.files.storage import FileSystemStorage, Storage
+from django.conf import settings as django_settings
from django.test import TestCase
from queued_storage.backends import QueuedStorage
@@ -27,8 +28,8 @@ class StorageTests(TestCase):
self.old_celery_always_eager = getattr(
settings, 'CELERY_ALWAYS_EAGER', False)
settings.CELERY_ALWAYS_EAGER = True
- self.local_dir = tempfile.mkdtemp()
- self.remote_dir = tempfile.mkdtemp()
+ self.local_dir = tempfile.mkdtemp(dir=os.path.join(django_settings.MEDIA_ROOT, 'storage_tests_local'))
+ self.remote_dir = tempfile.mkdtemp(dir=os.path.join(django_settings.MEDIA_ROOT, 'storage_tests_local'))
tmp_dir = tempfile.mkdtemp()
self.test_file_name = 'queued_storage.txt'
self.test_file_path = path.join(tmp_dir, self.test_file_name)
|
Applying suggestion to fix StorageTests Since Django <I> no temp files outside MEDIA_ROOT are allowed.
|
py
|
diff --git a/pbf_file_size_estimation/app_settings.py b/pbf_file_size_estimation/app_settings.py
index <HASH>..<HASH> 100644
--- a/pbf_file_size_estimation/app_settings.py
+++ b/pbf_file_size_estimation/app_settings.py
@@ -3,5 +3,5 @@ from django.conf import settings
PBF_FILE_SIZE_ESTIMATION_CSV_FILE_PATH = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'planet-stats.csv')
-if hasattr(settings, 'PBF_FILE_SIZE_ESTIMATION_CVS_FILE_PATH'):
- PBF_FILE_SIZE_ESTIMATION_CSV_FILE_PATH = settings.PBF_FILE_SIZE_ESTIMATION_CVS_FILE_PATH
+if hasattr(settings, 'PBF_FILE_SIZE_ESTIMATION_CSV_FILE_PATH'):
+ PBF_FILE_SIZE_ESTIMATION_CSV_FILE_PATH = settings.PBF_FILE_SIZE_ESTIMATION_CSV_FILE_PATH
|
fix typo in project setting name This is about character-separated values, not the Concurrent Versions System.
|
py
|
diff --git a/scs_osio/user_topics.py b/scs_osio/user_topics.py
index <HASH>..<HASH> 100755
--- a/scs_osio/user_topics.py
+++ b/scs_osio/user_topics.py
@@ -65,7 +65,7 @@ if __name__ == '__main__':
# ----------------------------------------------------------------------------------------------------------------
# run...
- # find self...
+ # find...
topics = manager.find_for_user(cmd.user_id)
for topic in topics:
|
Added user_topics top-level script.
|
py
|
diff --git a/viztricks/convenience.py b/viztricks/convenience.py
index <HASH>..<HASH> 100644
--- a/viztricks/convenience.py
+++ b/viztricks/convenience.py
@@ -74,12 +74,15 @@ def imagesc(data, ax=None):
return plt.show
-def axes_grid(n):
- '''Finds a reasonable arrangement of n axes. Returns (fig, axes) tuple.'''
+def axes_grid(n, sharex=False, sharey=False, subplot_kw=None, gridspec_kw=None,
+ fig_kw=None):
+ '''Finds a reasonable arrangement of n axes. Returns (fig, axes) tuple.
+ For keyword arguments descriptions, see matplotlib.pyplot.subplots'''
r = np.floor(np.sqrt(n))
r, c = int(r), int(np.ceil(n / r))
- fig, axes = plt.subplots(nrows=r, ncols=c, figsize=(c*4, r*4))
- axes = np.atleast_2d(axes)
+ fig, axes = plt.subplots(nrows=r, ncols=c, figsize=(c*4, r*4), squeeze=False,
+ subplot_kw=subplot_kw, gridspec_kw=gridspec_kw,
+ **fig_kw)
# Turn off any extra axes
for ax in axes.flat[n:]:
ax.set_axis_off()
|
Making axes_grid more general
|
py
|
diff --git a/pyinfra/api/state.py b/pyinfra/api/state.py
index <HASH>..<HASH> 100644
--- a/pyinfra/api/state.py
+++ b/pyinfra/api/state.py
@@ -384,10 +384,6 @@ class State(object):
'''
if not hash_key:
- logger.warning((
- 'Use of `State.get_temp_filename` without a key is deprecated, '
- 'as it may generated imbalanced operations.'
- ))
hash_key = six.text_type(uuid4())
temp_filename = '{0}/{1}'.format(
|
Remove unnecessary warning about using `get_temp_filename` without a filename as the new state order fixes this.
|
py
|
diff --git a/geomdl/tessellate.py b/geomdl/tessellate.py
index <HASH>..<HASH> 100644
--- a/geomdl/tessellate.py
+++ b/geomdl/tessellate.py
@@ -32,10 +32,10 @@ class AbstractTessellate(object):
self._arguments = dict()
def __getstate__(self):
- return self._vertices, self._faces
+ return self._vertices, self._faces, self._tsl_func, self._arguments
def __setstate__(self, state):
- self._vertices, self._faces = state
+ self._vertices, self._faces, self._tsl_func, self._arguments = state
@property
def vertices(self):
@@ -142,6 +142,16 @@ class TrimTessellate(AbstractTessellate):
self._tsl_func = tsl.make_triangle_mesh
self._tsl_trim_func = tsl.surface_trim_tessellate
+ def __getstate__(self):
+ ret_data = super(TrimTessellate, self).__getstate__()
+ ret_data = list(ret_data) + [self._tsl_trim_func]
+ return tuple(ret_data)
+
+ def __setstate__(self, state):
+ inp_data = list(state)
+ self._tsl_trim_func = inp_data.pop()
+ super(TrimTessellate, self).__setstate__(inp_data)
+
def tessellate(self, points, **kwargs):
""" Applies triangular tessellation w/ trimming curves.
|
Improve pickle support for multiprocessing
|
py
|
diff --git a/test/source_generation_test.py b/test/source_generation_test.py
index <HASH>..<HASH> 100644
--- a/test/source_generation_test.py
+++ b/test/source_generation_test.py
@@ -96,12 +96,15 @@ class TestFooRecord(AutoTest, TestCase):
@no_auto_store()
class EnumRecord(Record):
- e = Enum(["HELLO", "GOODBYE"])
+ e = Enum(["HELLO", "GOODBYE"], name="MyEnum")
class TestEnumRecord(AutoTest, TestCase):
schema_classes = [EnumRecord]
+ def test_name_is_preserved(self):
+ self.assertIn("name='MyEnum'", to_python_source(self.schema_classes))
+
class DependentRecords(AutoTest, TestCase):
schema_classes = [source_generation_helpers.A, source_generation_helpers.B]
|
Test some aspects of enums in source generation
|
py
|
diff --git a/webkit_server.py b/webkit_server.py
index <HASH>..<HASH> 100644
--- a/webkit_server.py
+++ b/webkit_server.py
@@ -432,7 +432,7 @@ class Server(object):
def kill(self):
""" Kill the process. """
self._server.kill()
- self._server.wait()
+ self._server.communicate()
def connect(self):
""" Returns a new socket connection to this server. """
|
Use communicate() to ensure standard file descriptors are closed This internally calls wait() but also ensures that any file descriptors for stdin, stdout, and stderr are closed. If we don't do this we'll leak file descriptors. For long running processes that start & stop a WebKit server many times this can result in an eventual crash due to hitting the max open files limit on the underlying system.
|
py
|
diff --git a/pgmpy/tests/test_Factor.py b/pgmpy/tests/test_Factor.py
index <HASH>..<HASH> 100644
--- a/pgmpy/tests/test_Factor.py
+++ b/pgmpy/tests/test_Factor.py
@@ -37,9 +37,9 @@ class TestFactorMethods(unittest.TestCase):
self.assertListEqual(self.phi.assignment([4, 5, 6]), [['x1_1', 'x2_0', 'x3_0'],
['x1_1', 'x2_0', 'x3_1'],
['x1_1', 'x2_1', 'x3_0']])
- self.assertListEqual(self.phi.assignment(np.array([4, 5, 6])), [['x1_1', 'x2_0', 'x3_0'],
- ['x1_1', 'x2_0', 'x3_1'],
- ['x1_1', 'x2_1', 'x3_0']])
+ self.assertListEqual(self.phi1.assignment(np.array([4, 5, 6])), [['x1_0', 'x2_2', 'x3_0'],
+ ['x1_0', 'x2_2', 'x3_1'],
+ ['x1_1', 'x2_0', 'x3_0']])
def test_assignment_indexerror(self):
self.assertRaises(IndexError, self.phi.assignment, [10])
|
corrected tests for assignment method according to the new representation of Factor [refs #<I>]
|
py
|
diff --git a/insteonplm/plm.py b/insteonplm/plm.py
index <HASH>..<HASH> 100644
--- a/insteonplm/plm.py
+++ b/insteonplm/plm.py
@@ -206,9 +206,9 @@ class PLM(asyncio.Protocol):
int.from_bytes(product_key, byteorder='big'))
if device is not None:
if isinstance(device, list):
- for dev in device:
- self.devices[device.id] = device
- self.log.info('Device with id %s added to device list.', device.id)
+ for currdev in device:
+ self.devices[currdev.id] = device
+ self.log.info('Device with id %s added to device list.', currdev.id)
else:
self.devices[device.id] = device
self.log.info('Device with id %s added to device list.', device.id)
|
Fixed handling of creation of multiple devices from one cat, subcat
|
py
|
diff --git a/tilequeue/query/postgres.py b/tilequeue/query/postgres.py
index <HASH>..<HASH> 100644
--- a/tilequeue/query/postgres.py
+++ b/tilequeue/query/postgres.py
@@ -116,7 +116,8 @@ def jinja_filter_bbox(bounds, srid=3857):
def jinja_filter_bbox_overlaps(bounds, geometry_col_name, srid=3857):
"""
- Check whether the boundary of the geometry intersects with the bounding box.
+ Check whether the boundary of the geometry intersects with the bounding
+ box.
Note that the usual meaning of "overlaps" in GIS terminology is that the
boundaries of the box and polygon intersect, but not the interiors. This
@@ -124,9 +125,9 @@ def jinja_filter_bbox_overlaps(bounds, geometry_col_name, srid=3857):
st_overlaps will be false.
However, that's not what we want. This is used for boundary testing, and
- while we don't want to pull out a whole country boundary if the bounding box
- is fully within it, we _do_ want to if the country boundary is within the
- bounding box.
+ while we don't want to pull out a whole country boundary if the bounding
+ box is fully within it, we _do_ want to if the country boundary is within
+ the bounding box.
Therefore, this test has an extra "or st_contains" test to also pull in any
boundaries which are completely within the bounding box.
|
Fix flake8 warnings - overlong lines.
|
py
|
diff --git a/openquake/baselib/parallel.py b/openquake/baselib/parallel.py
index <HASH>..<HASH> 100644
--- a/openquake/baselib/parallel.py
+++ b/openquake/baselib/parallel.py
@@ -605,12 +605,12 @@ class Starmap(object):
@property
def num_tasks(self):
"""
- The number of tasks, if known, or the empty string.
+ The number of tasks, if known, or -1 otherwise.
"""
try:
return len(self.task_args)
except TypeError: # generators have no len
- return ''
+ return -1
def submit_all(self):
"""
@@ -657,8 +657,8 @@ class Starmap(object):
args = pickle_sequence(args)
self.sent += {a: len(p) for a, p in zip(self.argnames, args)}
if task_no == 1: # first time
- self.progress(
- 'Submitting %s "%s" tasks', self.num_tasks, self.name)
+ n = '' if self.num_tasks == -1 else self.num_tasks
+ self.progress('Submitting %s "%s" tasks', n, self.name)
yield args
|
Small improvement [skip hazardlib] [demos]
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.