diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
---|---|---|
diff --git a/src/Exscriptd/Service.py b/src/Exscriptd/Service.py
index <HASH>..<HASH> 100644
--- a/src/Exscriptd/Service.py
+++ b/src/Exscriptd/Service.py
@@ -55,6 +55,7 @@ class Service(object):
for logger in self.loggers[order.id]:
del logger.manager.loggerDict[logger.name]
logger.manager = None
+ del self.loggers[order.id]
def _update_host_logname(self, order, host):
host.set_logname(self.get_logname(order, host.get_logname()))
|
exscriptd: fix: forgot to remove loggers from internal list in the previous commit.
|
py
|
diff --git a/sirmordred/task_enrich.py b/sirmordred/task_enrich.py
index <HASH>..<HASH> 100644
--- a/sirmordred/task_enrich.py
+++ b/sirmordred/task_enrich.py
@@ -27,7 +27,7 @@ import time
from datetime import datetime, timedelta
-from elasticsearch import Elasticsearch
+from elasticsearch import Elasticsearch, RequestsHttpConnection
from grimoire_elk.elk import (do_studies,
enrich_backend,
@@ -282,8 +282,9 @@ class TaskEnrich(Task):
logger.debug("Autorefresh for Areas of Code study index: %s", aoc_index)
- es = Elasticsearch([self.conf['es_enrichment']['url']], timeout=100,
- verify_certs=self._get_enrich_backend().elastic.requests.verify)
+ es = Elasticsearch([self.conf['es_enrichment']['url']], timeout=100, retry_on_timeout=True,
+ verify_certs=self._get_enrich_backend().elastic.requests.verify,
+ connection_class=RequestsHttpConnection)
if not es.indices.exists(index=aoc_index):
logger.debug("Not doing autorefresh, index doesn't exist for Areas of Code study")
|
[task_enrich] Fix elasticsearch.exceptions.SSLError: ConnectionError This PR updates the statement that initializes the Elasticsearch objects (imported from the library elasticsearch) to use RequestsHttpConnection as connection class, thus avoiding `elasticsearch.exceptions.SSLError: ConnectionError exceptions`
|
py
|
diff --git a/abilian/app.py b/abilian/app.py
index <HASH>..<HASH> 100644
--- a/abilian/app.py
+++ b/abilian/app.py
@@ -619,6 +619,11 @@ class Application(Flask, ServiceManager, PluginManager):
manifest_file = assets_base_dir / 'manifest.json'
assets.manifest = 'json:{}'.format(str(manifest_file))
+ # set up load_path for application static dir. This is required since we are
+ # setting Environment.load_path for other assets (like core_bundle below),
+ # in this case Flask-Assets uses webasssets resolvers instead of Flask's one
+ assets.append_path(self.static_folder, self.static_url_path)
+
# filters options
less_args = ['-ru']
assets.config['less_extra_args'] = less_args
@@ -679,7 +684,7 @@ class Application(Flask, ServiceManager, PluginManager):
def _register_base_assets(self):
"""
Registers assets needed by Abilian. This is done in a separate method in
- order to allow applications to redefins it at will.
+ order to allow applications to redefine it at will.
"""
from abilian.web import assets as bundles
|
assets: setup load path for static_folder
|
py
|
diff --git a/barcode/__init__.py b/barcode/__init__.py
index <HASH>..<HASH> 100755
--- a/barcode/__init__.py
+++ b/barcode/__init__.py
@@ -16,8 +16,8 @@ __copyright__ = '2010-2013, Thorsten Weimann; 2014, Alexander Shorin'
__author_email__ = '[email protected]'
__description__ = ('Create standard barcodes with Python. No external '
'modules needed (optional PIL support included).')
-__version__ = '0.8'
-__release__ = '{version}beta1'.format(version=__version__)
+__version__ = '0.8.0'
+__release__ = '{version}'.format(version=__version__)
__license__ = 'MIT'
__url__ = 'https://github.com/kxepal/viivakoodi'
__classifiers__ = [
|
Release <I> version And switch to semantic versioning.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -38,8 +38,13 @@ EXTRA_REQUIREMENTS = {
'deploy': ['twine', 'setuptools', 'wheel'],
}
-with open('README.md', 'r') as f:
- LONG_DESCRIPTION = f.read()
+# building the docs on readthedocs fails with a FileNotFoundError
+# https://github.com/IAMconsortium/pyam/issues/363
+try:
+ with open('README.md', 'r') as f:
+ LONG_DESCRIPTION = f.read()
+except FileNotFoundError:
+ LONG_DESCRIPTION = ''
# thank you https://stormpath.com/blog/building-simple-cli-interfaces-in-python
class RunTests(Command):
|
add try-except for importing readme when building the docs (#<I>)
|
py
|
diff --git a/spacy/tests/parser/test_ner.py b/spacy/tests/parser/test_ner.py
index <HASH>..<HASH> 100644
--- a/spacy/tests/parser/test_ner.py
+++ b/spacy/tests/parser/test_ner.py
@@ -46,7 +46,7 @@ def test_unit_end_gazetteer(EN):
ents = matcher(doc)
assert len(ents) == 1
EN.entity(doc)
- assert list(nlp.ents)[0].text == 'cal'
+ assert list(EN.ents)[0].text == 'cal'
|
* Add test for matcher end-point problem
|
py
|
diff --git a/test/amqp-integration-test.py b/test/amqp-integration-test.py
index <HASH>..<HASH> 100644
--- a/test/amqp-integration-test.py
+++ b/test/amqp-integration-test.py
@@ -96,7 +96,7 @@ def run_client_tests():
--key-dir %s \
--cert-dir %s \
--text \
- --agree-tos \
+ --agree-eula \
--email "" \
--renewer-config-file %s \
''' % (letsencrypt_bin, tempconfig, tempwork, tempkey, tempwork, renewer_config_filename)
|
Update execution of the integration test to match new parameters.
|
py
|
diff --git a/scriptworker/cot/verify.py b/scriptworker/cot/verify.py
index <HASH>..<HASH> 100644
--- a/scriptworker/cot/verify.py
+++ b/scriptworker/cot/verify.py
@@ -687,7 +687,7 @@ def verify_firefox_decision_command(decision_link):
))
bash_commands = command[-1].split('&&')
allowed_commands = ('cd', 'ln')
- allowed_mach_args = ('./mach', 'taskgraph', 'decision')
+ allowed_mach_args = ['./mach', 'taskgraph', 'decision']
for bash_command in bash_commands:
parts = shlex.split(bash_command)
if parts[0] in allowed_commands:
@@ -695,7 +695,7 @@ def verify_firefox_decision_command(decision_link):
for part in parts:
if part.startswith('--'):
continue
- if part not in allowed_mach_args:
+ if not allowed_mach_args or part != allowed_mach_args.pop(0):
errors.append("{} {} Illegal command `{}`".format(
decision_link.name, decision_link.task_id, bash_command
))
|
make sure mach args are in order
|
py
|
diff --git a/benchexec/tablegenerator/__init__.py b/benchexec/tablegenerator/__init__.py
index <HASH>..<HASH> 100644
--- a/benchexec/tablegenerator/__init__.py
+++ b/benchexec/tablegenerator/__init__.py
@@ -1072,7 +1072,7 @@ def get_stats(rows, local_summary):
def indent(n):
return ' '*(n*4)
- return [tempita.bunch(id=None, title='total tasks', description=task_counts, content=rowsForStats[0]),
+ return [tempita.bunch(id=None, title='total', description=task_counts, content=rowsForStats[0]),
] + ([summary_row] if local_summary else []) + [
tempita.bunch(id=None, title=indent(1)+'correct results', description='(property holds + result is true) OR (property does not hold + result is false)', content=rowsForStats[1]),
tempita.bunch(id=None, title=indent(2)+'correct true', description='property holds + result is true', content=rowsForStats[2]),
|
Fix misleading row name: this row contains the total of the respective column over all tasks (not the total number of tasks, but also total CPU time etc.)
|
py
|
diff --git a/crispy/modules/quanty/generate.py b/crispy/modules/quanty/generate.py
index <HASH>..<HASH> 100644
--- a/crispy/modules/quanty/generate.py
+++ b/crispy/modules/quanty/generate.py
@@ -6,7 +6,7 @@
# This work is licensed under the terms of the MIT license. #
# For further information, see https://github.com/mretegan/crispy #
###################################################################
-"""This module provides a comand line interface for generating the parameters of atomic configurations."""
+"""This module provides a command line interface for generating the parameters of atomic configurations."""
__authors__ = ["Marius Retegan"]
__license__ = "MIT"
@@ -22,7 +22,7 @@ from crispy.modules.quanty.utils import Calculations, Cowan, Element
def generate_parameters(elements):
- """Generate the atomic parameters of the elments and store them in an HDF5 container."""
+ """Generate the atomic parameters of the elements and store them in an HDF5 container."""
config = h5py.get_config()
config.track_order = True
|
Spell check should be on by default
|
py
|
diff --git a/src/cmdline/settings.py b/src/cmdline/settings.py
index <HASH>..<HASH> 100644
--- a/src/cmdline/settings.py
+++ b/src/cmdline/settings.py
@@ -170,11 +170,12 @@ class SettingsParser(BaseCommand):
parser.add_argument(arg, **_info)
- if subcommands:
+ command_info = args.get('_COMMANDS', {})
+
+ if subcommands or command_info:
self.subparsers = parser.add_subparsers(help='sub-commands')
# go through all the args found for subcommands and create a subparser for them
- command_info = args.get('_COMMANDS', {})
for subcommand, args in subcommands.items():
kwargs = command_info.get(subcommand, {})
subcommand_parser = self.subparsers.add_parser(subcommand, **kwargs)
@@ -182,3 +183,10 @@ class SettingsParser(BaseCommand):
self.parse(args=args, parser=subcommand_parser)
subcommand_parser.set_defaults(subcommand=subcommand)
+
+ # check for any commands listed in command_info without additional settings
+ for subcommand in set(command_info.keys()) - set(subcommands.keys()):
+ kwargs = command_info[subcommand]
+ subcommand_parser = self.subparsers.add_parser(subcommand, **kwargs)
+
+ subcommand_parser.set_defaults(subcommand=subcommand)
|
Add subcommands that do not have any additional settings
|
py
|
diff --git a/openquake/calculators/event_based.py b/openquake/calculators/event_based.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/event_based.py
+++ b/openquake/calculators/event_based.py
@@ -45,7 +45,7 @@ F32 = numpy.float32
F64 = numpy.float64
TWO32 = 2 ** 32
RUPTURES_PER_BLOCK = 1000 # decided by MS
-BLOCKSIZE = 10000 # decided by MS
+BLOCKSIZE = 2000 # decided by MS
def build_ruptures(srcs, srcfilter, param, monitor):
|
Reduced event_based.BLOCKSIZE
|
py
|
diff --git a/paramiko/packet.py b/paramiko/packet.py
index <HASH>..<HASH> 100644
--- a/paramiko/packet.py
+++ b/paramiko/packet.py
@@ -261,14 +261,14 @@ class Packetizer (object):
else:
cmd_name = '$%x' % cmd
orig_len = len(data)
- if self.__compress_engine_out is not None:
- data = self.__compress_engine_out(data)
- packet = self._build_packet(data)
- if self.__dump_packets:
- self._log(DEBUG, 'Write packet <%s>, length %d' % (cmd_name, orig_len))
- self._log(DEBUG, util.format_binary(packet, 'OUT: '))
self.__write_lock.acquire()
try:
+ if self.__compress_engine_out is not None:
+ data = self.__compress_engine_out(data)
+ packet = self._build_packet(data)
+ if self.__dump_packets:
+ self._log(DEBUG, 'Write packet <%s>, length %d' % (cmd_name, orig_len))
+ self._log(DEBUG, util.format_binary(packet, 'OUT: '))
if self.__block_engine_out != None:
out = self.__block_engine_out.encrypt(packet)
else:
|
[project @ <EMAIL><I>-d<I>b<I>bac4f<I>c] i think the compression needs to be inside this lock in order to preserve order
|
py
|
diff --git a/zappa/core.py b/zappa/core.py
index <HASH>..<HASH> 100644
--- a/zappa/core.py
+++ b/zappa/core.py
@@ -669,7 +669,7 @@ class Zappa(object):
if not os.path.exists(wheel_path):
# The file is not cached, download it.
- wheel_url = self.get_manylinux_wheel(package_name, package_version)
+ wheel_url = self.get_manylinux_wheel_url(package_name, package_version)
if not wheel_url:
return None
@@ -681,7 +681,7 @@ class Zappa(object):
return wheel_path
- def get_manylinux_wheel(self, package_name, package_version):
+ def get_manylinux_wheel_url(self, package_name, package_version):
"""
For a given package name, returns a link to the download URL,
else returns None.
|
refactored function to match base
|
py
|
diff --git a/metanl/freeling.py b/metanl/freeling.py
index <HASH>..<HASH> 100644
--- a/metanl/freeling.py
+++ b/metanl/freeling.py
@@ -86,8 +86,8 @@ class FreelingWrapper(ProcessWrapper):
results = []
for chunk_text in chunks:
if chunk_text.strip():
- text = (chunk_text + '\n').encode('utf-8')
- self.send_input(text)
+ textbytes = (chunk_text + '\n').encode('utf-8')
+ self.send_input(textbytes)
out_line = ''
while True:
out_line = self.receive_output_line()
|
Don't overwrite the variable 'text' with a bytestring.
|
py
|
diff --git a/packages/vaex-core/setup.py b/packages/vaex-core/setup.py
index <HASH>..<HASH> 100644
--- a/packages/vaex-core/setup.py
+++ b/packages/vaex-core/setup.py
@@ -24,7 +24,7 @@ install_requires_core = ["numpy>=1.16", "aplus", "tabulate>=0.8.3",
"future>=0.15.2", "pyyaml", "progressbar2",
"requests", "six", "cloudpickle", "pandas", "dask",
"nest-asyncio>=1.3.3", "pyarrow>=3.0", "frozendict",
- "blake3", "filelock", "pydantic>=1.8.0",
+ "blake3", "filelock", "pydantic>=1.8.0", "rich",
]
if sys.version_info[0] == 2:
install_requires_core.append("futures>=2.2.0")
|
TEMP: make rich a dependency
|
py
|
diff --git a/typedload/__init__.py b/typedload/__init__.py
index <HASH>..<HASH> 100644
--- a/typedload/__init__.py
+++ b/typedload/__init__.py
@@ -153,8 +153,6 @@ __all__ = [
'load',
'datadumper',
'dump',
- 'attrload',
- 'attrdump',
'typechecks',
]
|
Fix import * I had forgotten the removed modules in the __all__, so import * failed.
|
py
|
diff --git a/amqpconsumer/events.py b/amqpconsumer/events.py
index <HASH>..<HASH> 100644
--- a/amqpconsumer/events.py
+++ b/amqpconsumer/events.py
@@ -233,7 +233,8 @@ class EventConsumer(object):
:param Exception | None closing reason: The Exception containing the reason the connection was closed
"""
logger.warning('Channel {} was closed: {}'.format(channel, closing_reason))
- self._connection.close()
+ if not self._connection.is_closing and not self._connection.is_closed:
+ self._connection.close()
def start_consuming(self):
"""Sets up the consumer.
|
Check if connection is already closed before closing Otherwise an error will be raised with "invalid close, state already closed"
|
py
|
diff --git a/doc/conf.py b/doc/conf.py
index <HASH>..<HASH> 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -118,7 +118,7 @@ pygments_style = 'tango_console_highlighting.TangoStyle'
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
-html_theme = 'default'
+html_theme = 'sphinxdoc'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
|
Update sphinx theme to new sphinx version
|
py
|
diff --git a/egoio/db_tables/model_draft.py b/egoio/db_tables/model_draft.py
index <HASH>..<HASH> 100644
--- a/egoio/db_tables/model_draft.py
+++ b/egoio/db_tables/model_draft.py
@@ -3704,6 +3704,16 @@ t_ego_res_powerplant_costdat_gid = Table(
schema='model_draft'
)
+class EgoRenewableFeedin(Base):
+ __tablename__ = 'ego_renewable_feedin'
+ __table_args__ = {'schema': 'model_draft'}
+
+ weather_scenario_id = Column(Integer, primary_key=True, nullable=False)
+ w_id = Column(Integer, primary_key=True, nullable=False)
+ source = Column(Text, primary_key=True, nullable=False)
+ weather_year = Column(Integer, primary_key=True, nullable=False)
+ feedin = Column(ARRAY(DOUBLE_PRECISION(precision=53)))
+ scenario = Column(Text, primary_key=True, nullable=False)
class EgoScenario(Base):
__tablename__ = 'ego_scenario'
|
Added the class definition for ego_renewable_feeding
|
py
|
diff --git a/indra/sources/sofia/processor.py b/indra/sources/sofia/processor.py
index <HASH>..<HASH> 100644
--- a/indra/sources/sofia/processor.py
+++ b/indra/sources/sofia/processor.py
@@ -235,7 +235,7 @@ class SofiaProcessor(object):
if proc and proc[0] is not None:
name = event_entry['Relation'] # Match name to theme (process)
theme = proc
- if prop and prop is not None:
+ if prop and prop[0] is not None:
theme_prop = prop
# We don't have a grounding, return nothing
else:
@@ -247,11 +247,13 @@ class SofiaProcessor(object):
if prop and prop[0] is not None:
theme_proc_prop = prop
# If we have theme and property, but no process
- elif prop[0] is not None:
+ elif prop and prop[0] is not None:
theme_prop = prop
# Return 4-tuple of:
- # Theme, Theme Property, Theme Process, Theme Process Property
+ # Theme, Theme Property, Theme Process, Theme Process Property
+ assert not all(co == (None, 0.0) for co in
+ [theme, theme_prop, theme_proc, theme_proc_prop])
return name, (theme, theme_prop, theme_proc, theme_proc_prop)
def _get_theme_prop(self, entity_inds):
|
Fix case where empty property comes out, add check
|
py
|
diff --git a/gnosis/eth/clients/blockscout_client.py b/gnosis/eth/clients/blockscout_client.py
index <HASH>..<HASH> 100644
--- a/gnosis/eth/clients/blockscout_client.py
+++ b/gnosis/eth/clients/blockscout_client.py
@@ -42,6 +42,8 @@ class BlockscoutClient:
EthereumNetwork.GODWOKEN: "https://v1.gwscan.com/",
EthereumNetwork.VENIDIUM_TESTNET: "https://evm-testnet.venidiumexplorer.com/",
EthereumNetwork.VENIDIUM: "https://evm.venidiumexplorer.com/",
+ EthereumNetwork.KLAY_BAOBAB: "https://baobab.scope.klaytn.com/",
+ EthereumNetwork.KLAY_CYPRESS: "https://scope.klaytn.com/",
}
def __init__(self, network: EthereumNetwork):
|
add klaytn mainnet cypress and testnet baobab (#<I>)
|
py
|
diff --git a/src/ai/backend/client/cli/admin/sessions.py b/src/ai/backend/client/cli/admin/sessions.py
index <HASH>..<HASH> 100644
--- a/src/ai/backend/client/cli/admin/sessions.py
+++ b/src/ai/backend/client/cli/admin/sessions.py
@@ -64,6 +64,8 @@ def sessions(status, access_key, id_only):
for item in resp['compute_sessions']:
if 'mem_cur_bytes' in item:
item['mem_cur_bytes'] = round(item['mem_cur_bytes'] / 2 ** 20, 1)
+ if 'mem_max_bytes' in item:
+ item['mem_max_bytes'] = round(item['mem_max_bytes'] / 2 ** 20, 1)
if id_only:
for item in resp['compute_sessions']:
|
Bugfix - correct unit for memory max bytes
|
py
|
diff --git a/pyte/screens.py b/pyte/screens.py
index <HASH>..<HASH> 100644
--- a/pyte/screens.py
+++ b/pyte/screens.py
@@ -1110,9 +1110,9 @@ History = namedtuple("History", "top bottom ratio size position")
class HistoryScreen(DiffScreen):
- """A screen subclass, which keeps track of screen history and allows
- pagination. This is not linux-specific, but still useful; see
- page 462 of VT520 User's Manual.
+ """A :class:~`pyte.screens.DiffScreen` subclass, which keeps track
+ of screen history and allows pagination. This is not linux-specific,
+ but still useful; see page 462 of VT520 User's Manual.
:param int history: total number of history lines to keep; is split
between top and bottom queues.
|
Updated the docstring of ``HistoryScreen``
|
py
|
diff --git a/vivarium/framework/engine.py b/vivarium/framework/engine.py
index <HASH>..<HASH> 100644
--- a/vivarium/framework/engine.py
+++ b/vivarium/framework/engine.py
@@ -6,6 +6,7 @@ from time import time
from collections import Iterable
from pprint import pformat
import gc
+import inspect
from bdb import BdbQuit
import pandas as pd
@@ -78,8 +79,9 @@ class SimulationContext:
if hasattr(component, 'configuration_defaults'):
# This reapplies configuration from some components but
# that shouldn't be a problem.
+ component_source = inspect.getfile(component.__class__)
config.read_dict(component.configuration_defaults, layer='component_configs',
- source=component.__file__)
+ source=component_source)
if hasattr(component, 'setup'):
sub_components = component.setup(builder)
done.add(component)
|
Figured out how to get the file name from an object
|
py
|
diff --git a/slither/visitors/slithir/expression_to_slithir.py b/slither/visitors/slithir/expression_to_slithir.py
index <HASH>..<HASH> 100644
--- a/slither/visitors/slithir/expression_to_slithir.py
+++ b/slither/visitors/slithir/expression_to_slithir.py
@@ -134,7 +134,7 @@ class ExpressionToSlithIR(ExpressionVisitor):
# Tuple with only one element. We need to convert the assignment to a Unpack
# Ex:
# (uint a,,) = g()
- elif isinstance(left, LocalVariableInitFromTuple) and left.tuple_index:
+ elif isinstance(left, LocalVariableInitFromTuple) and left.tuple_index and isinstance(right, TupleVariable):
operation = Unpack(left, right, left.tuple_index)
operation.set_expression(expression)
self._result.append(operation)
|
Fix incorrect handling of tuple variable re-assignement (bug introduced xith #<I>)
|
py
|
diff --git a/quilt/cli/meta.py b/quilt/cli/meta.py
index <HASH>..<HASH> 100644
--- a/quilt/cli/meta.py
+++ b/quilt/cli/meta.py
@@ -69,7 +69,7 @@ class Command(object):
self.run(options, pargs)
def run(self, options, args):
- pass
+ raise NotImplementedError()
def add_args(self, parser):
pass
|
Raise exception if run method is not implemented Raise exception if run method of a cli command is not implemented. This avoids typos.
|
py
|
diff --git a/tests/volume_test.py b/tests/volume_test.py
index <HASH>..<HASH> 100644
--- a/tests/volume_test.py
+++ b/tests/volume_test.py
@@ -166,14 +166,12 @@ class FsTypeTest(unittest.TestCase):
]
for definition in definitions:
- with self.subTest(definition=definition):
- print(definition)
- volume = Volume(disk=Disk(ImageParser(), "..."))
- volume._get_blkid_type = mock.Mock(return_value=definition.get("blkid"))
- volume._get_magic_type = mock.Mock(return_value=definition.get("magic"))
- volume.info = definition
- volume.determine_fs_type()
- self.assertEqual(FILE_SYSTEM_TYPES[definition[_]], volume.fstype)
+ volume = Volume(disk=Disk(ImageParser(), "..."))
+ volume._get_blkid_type = mock.Mock(return_value=definition.get("blkid"))
+ volume._get_magic_type = mock.Mock(return_value=definition.get("magic"))
+ volume.info = definition
+ volume.determine_fs_type()
+ self.assertEqual(FILE_SYSTEM_TYPES[definition[_]], volume.fstype)
def test_no_clue_fstype(self):
volume = Volume(disk=Disk(ImageParser(), "..."))
|
Obviously, Python 2 does not support subtest
|
py
|
diff --git a/stanza/pipeline/processor.py b/stanza/pipeline/processor.py
index <HASH>..<HASH> 100644
--- a/stanza/pipeline/processor.py
+++ b/stanza/pipeline/processor.py
@@ -136,10 +136,10 @@ class ProcessorVariant(ABC):
"""
pass
- @abstractmethod
def bulk_process(self, docs):
- """ Process a list of Documents that are potentially preprocessed by the processor. """
- pass
+ """ Process a list of Documents. This should be replaced with a more efficient implementation if possible. """
+
+ return [self.process(doc) for doc in docs]
class UDProcessor(Processor):
""" Base class for the neural UD Processors (tokenize,mwt,pos,lemma,depparse,sentiment) """
|
By default, bulk process in a dumb manner so it at least does something useful
|
py
|
diff --git a/rdopkg/utils/specfile.py b/rdopkg/utils/specfile.py
index <HASH>..<HASH> 100644
--- a/rdopkg/utils/specfile.py
+++ b/rdopkg/utils/specfile.py
@@ -305,8 +305,13 @@ class Spec(object):
def set_patches_base(self, base):
v, _ = self.get_patches_base()
- if 'patches_ignore' in self.txt and (base is None or base == ''):
- base = self.get_tag('Version', expand_macros=True)
+ if re.search("^#\s*patches_ignore\s*=\s*\S+", self.txt, flags=re.M):
+ # This is a temporary hack as patches_ignore currently requires
+ # explicit patches_base. This should be solved with a proper
+ # magic comment parser and using Version in filtration logic
+ # when no patches_base is defined.
+ if not base:
+ base = self.get_tag('Version', expand_macros=True)
if base:
regex = r'^#\s*patches_base*'
if v is None and re.search(regex, self.txt, flags=re.M) is None:
|
Improve patches_ignore detection patches_base was always added to .spec when it contained string 'patches_ignore' (like rdopkg does in changelog) due to an overly relaxed check. I used regex to make sure it only triggers for proper patches_ignore comment. No time was wasted on unit tests because all these hacks must be replaced with proper magic comment parser without silly expectations and million special cases. Fixes: <URL>
|
py
|
diff --git a/tensorflow_datasets/image/downsampled_imagenet.py b/tensorflow_datasets/image/downsampled_imagenet.py
index <HASH>..<HASH> 100644
--- a/tensorflow_datasets/image/downsampled_imagenet.py
+++ b/tensorflow_datasets/image/downsampled_imagenet.py
@@ -77,7 +77,7 @@ class DownsampledImagenet(tfds.core.GeneratorBasedBuilder):
description=(
"A dataset consisting of Train and Validation images of " +
config_name + " resolution."),
- version="0.1.0",
+ version="1.0.0",
supported_versions=[
tfds.core.Version("1.0.0", experiments={
tfds.core.Experiment.S3: True}),
|
Updates downsampled_imagenet to use the version which is in supported_versions. PiperOrigin-RevId: <I>
|
py
|
diff --git a/category_encoders/leave_one_out.py b/category_encoders/leave_one_out.py
index <HASH>..<HASH> 100644
--- a/category_encoders/leave_one_out.py
+++ b/category_encoders/leave_one_out.py
@@ -81,7 +81,7 @@ class LeaveOneOutEncoder(BaseEstimator, TransformerMixin):
self.drop_cols = []
self.verbose = verbose
self.static_cols = cols
- self.cols = None
+ self.cols = cols
self._dim = None
self.mapping = None
self.impute_missing = impute_missing
@@ -125,8 +125,6 @@ class LeaveOneOutEncoder(BaseEstimator, TransformerMixin):
# if columns aren't passed, just use every string column
if self.static_cols is None:
self.cols = get_obj_cols(X)
- else:
- self.cols = self.static_cols
categories = self.fit_leave_one_out(
X, y,
|
set cols to static cols by default and only overwrite cols if static_cols is None
|
py
|
diff --git a/iterm2_tools/tests/test_ipython.py b/iterm2_tools/tests/test_ipython.py
index <HASH>..<HASH> 100644
--- a/iterm2_tools/tests/test_ipython.py
+++ b/iterm2_tools/tests/test_ipython.py
@@ -32,8 +32,8 @@ f()
# First the control (without iterm2_tools)
if IPy5:
p = pexpect.spawn(' '.join(ipython + ['--quick', '--colors=NoColor',
- '--no-banner', '--no-simple-prompt']))
- p.write(commands)
+ '--no-banner', '--no-simple-prompt', '--no-term-title',
+ '--no-confirm-exit', '--no-color-info']))
p.sendeof()
p.expect(pexpect.EOF, timeout=10)
|
Remove some more variables from the tested IPython 5 pexpect output
|
py
|
diff --git a/openid/store/sqlstore.py b/openid/store/sqlstore.py
index <HASH>..<HASH> 100644
--- a/openid/store/sqlstore.py
+++ b/openid/store/sqlstore.py
@@ -1,6 +1,10 @@
"""
This module contains C{L{OpenIDStore}} implementations that use
various SQL databases to back them.
+
+Example of how to initialize a store database::
+
+ python -c 'from openid.store import sqlstore; import pysqlite2.dbapi2; sqlstore.SQLiteStore(pysqlite2.dbapi2.connect("cstore.db")).createTables()'
"""
import re
import time
|
[project @ store.sqlstore: add example to docstring of a one-liner for store creation] This should really probably be a script and/or incorporated into the install documentation.
|
py
|
diff --git a/apyori.py b/apyori.py
index <HASH>..<HASH> 100755
--- a/apyori.py
+++ b/apyori.py
@@ -12,7 +12,7 @@ from itertools import combinations
from itertools import chain
-__version__ = '0.1.0'
+__version__ = '0.9.0'
__author__ = 'ymoch'
__author_email__ = '[email protected]'
|
Update the version to <I>.
|
py
|
diff --git a/wsgiservice/resource.py b/wsgiservice/resource.py
index <HASH>..<HASH> 100644
--- a/wsgiservice/resource.py
+++ b/wsgiservice/resource.py
@@ -207,7 +207,7 @@ class Resource(object):
else:
self.response.vary.append('Accept')
types = [mime for ext, mime in self.EXTENSION_MAP]
- return self.request.accept.first_match(types)
+ return self.request.accept.best_match(types)
def handle_ignored_resources(self):
"""Ignore robots.txt and favicon.ico GET requests based on a list of
|
Replace first_match with best_match. first_match has been deprecated since WebOb <I>b1.
|
py
|
diff --git a/src/python/test/test_dxclient.py b/src/python/test/test_dxclient.py
index <HASH>..<HASH> 100755
--- a/src/python/test/test_dxclient.py
+++ b/src/python/test/test_dxclient.py
@@ -865,6 +865,7 @@ class TestDXClient(DXTestCase):
@unittest.skipUnless(testutil.TEST_RUN_JOBS, "Skipping test that would run jobs")
def test_dx_run_debug_on_all(self):
+ self.configure_ssh()
crash_applet = dxpy.api.applet_new(dict(name="crash",
runSpec={"code": "exit 5", "interpreter": "bash",
"execDepends": [{"name": "dx-toolkit"}]},
|
Need to configure ssh to run test with debug-on
|
py
|
diff --git a/__init__.py b/__init__.py
index <HASH>..<HASH> 100644
--- a/__init__.py
+++ b/__init__.py
@@ -15,6 +15,8 @@
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import sys
+from .__pkginfo__ import version as __version__
+
def run_pylint():
"""run pylint"""
from pylint.lint import Run
|
Add __version__ attribute to comply with PEP <I>. Closes issue #<I>.
|
py
|
diff --git a/plugins/ldap/server/__init__.py b/plugins/ldap/server/__init__.py
index <HASH>..<HASH> 100644
--- a/plugins/ldap/server/__init__.py
+++ b/plugins/ldap/server/__init__.py
@@ -159,11 +159,11 @@ def _ldapAuth(event):
conn.bind_s(attrs['distinguishedName'][0], password, ldap.AUTH_SIMPLE)
except ldap.LDAPError:
# Try other LDAP servers or fall back to core auth
- conn.unbind_s()
continue
+ finally:
+ conn.unbind_s()
user = _getLdapUser(attrs, server)
- conn.unbind_s()
if user:
event.stopPropagation().preventDefault().addResponse(user)
|
Use `finally` to close LDAP connection
|
py
|
diff --git a/Lib/fontmake/font_project.py b/Lib/fontmake/font_project.py
index <HASH>..<HASH> 100644
--- a/Lib/fontmake/font_project.py
+++ b/Lib/fontmake/font_project.py
@@ -919,7 +919,10 @@ class FontProject:
**kwargs,
)
except FontmakeError as e:
- e.source_trail.append(designspace.path)
+ # Some called functions already added the Designspace file to the source
+ # trail.
+ if e.source_trail[-1] != designspace.path:
+ e.source_trail.append(designspace.path)
raise
except Exception as e:
raise FontmakeError(
|
run_from_designspace: selectively add DS to source_trail
|
py
|
diff --git a/LiSE/core.py b/LiSE/core.py
index <HASH>..<HASH> 100644
--- a/LiSE/core.py
+++ b/LiSE/core.py
@@ -455,6 +455,9 @@ class Engine(object):
self.vonmisesvariate = self.rando.vonmisesvariate
self.weibullvariate = self.rando.weibullvariate
+ def coinflip(self):
+ return self.choice((True, False))
+
def commit(self):
"""Commit to both the world and code databases, and begin a new
transaction for the world database
|
for when you just want a True or False
|
py
|
diff --git a/webview/win32.py b/webview/win32.py
index <HASH>..<HASH> 100644
--- a/webview/win32.py
+++ b/webview/win32.py
@@ -159,9 +159,9 @@ class BrowserView(object):
atl_width = self.width - self.scrollbar_width
atl_height = self.height - self.scrollbar_height - VERTICAL_SCROLLBAR_OFFSET
- self.atlhwnd = win32gui.CreateWindow("AtlAxWin", self.url,
- win32con.WS_CHILD | win32con.WS_HSCROLL | win32con.WS_VSCROLL,
- 0, 0, atl_width, atl_height, self.hwnd, None, hInstance, None)
+ self.atlhwnd = win32gui.CreateWindow("AtlAxWin", "bogus-url",
+ win32con.WS_CHILD | win32con.WS_HSCROLL | win32con.WS_VSCROLL,
+ 0, 0, atl_width, atl_height, self.hwnd, None, hInstance, None)
# COM voodoo
pBrowserUnk = POINTER(IUnknown)()
@@ -181,6 +181,9 @@ class BrowserView(object):
win32gui.UpdateWindow(self.atlhwnd)
win32gui.SetFocus(self.atlhwnd)
+ # Load URL here instead in CreateWindow to prevent a dead-lock
+ self.browser.Navigate2(self.url)
+
# Start sending and receiving messages
win32gui.PumpMessages()
|
Change how URL is loaded on an initial window display in Windows
|
py
|
diff --git a/h2o-bindings/bin/gen_java.py b/h2o-bindings/bin/gen_java.py
index <HASH>..<HASH> 100755
--- a/h2o-bindings/bin/gen_java.py
+++ b/h2o-bindings/bin/gen_java.py
@@ -86,7 +86,7 @@ def generate_schema(class_name, schema):
if field["name"] == "can_build": is_model_builder = True
superclass = schema["superclass"]
- if superclass == "Iced": superclass = "Object"
+ if superclass == "Schema": superclass = "Object"
fields = []
for field in schema["fields"]:
|
In new schema hierarchy all schemas inherit from Schema, not from Iced
|
py
|
diff --git a/salt/utils/master.py b/salt/utils/master.py
index <HASH>..<HASH> 100755
--- a/salt/utils/master.py
+++ b/salt/utils/master.py
@@ -85,7 +85,7 @@ class MasterPillarUtil(object):
if not salt.utils.verify.valid_id(self.opts, minion_id):
continue
else:
- if not salt.utils.verify.valid_id(minion_id):
+ if not salt.utils.verify.valid_id(self.opts, minion_id):
continue
path = os.path.join(mdir, minion_id, 'data.p')
if os.path.isfile(path):
@@ -110,7 +110,7 @@ class MasterPillarUtil(object):
expr_form='list')
return ret
- def _get_live_minion_pillar(self, minion_id=None, minion_grains={}):
+ def _get_live_minion_pillar(self, minion_id=None, minion_grains=None):
# Returns a dict of pillar data for one minion
if minion_id == None:
return {}
|
Fix fad method call and bad default arg
|
py
|
diff --git a/thumbor/result_storages/file_storage.py b/thumbor/result_storages/file_storage.py
index <HASH>..<HASH> 100644
--- a/thumbor/result_storages/file_storage.py
+++ b/thumbor/result_storages/file_storage.py
@@ -57,7 +57,7 @@ class Storage(BaseStorage):
return abspath(path).startswith(self.context.config.RESULT_STORAGE_FILE_STORAGE_ROOT_PATH)
def normalize_path(self, path):
- if self.is_auto_webp:
+ if not self.is_auto_webp:
path = join(self.context.config.RESULT_STORAGE_FILE_STORAGE_ROOT_PATH.rstrip('/'), self.partition(path), path.lstrip('/'))
else:
path = join(self.context.config.RESULT_STORAGE_FILE_STORAGE_ROOT_PATH.rstrip('/'), "webp", self.partition(path), path.lstrip('/'))
|
Use webp in path only when webp is supported
|
py
|
diff --git a/tests/test_parse.py b/tests/test_parse.py
index <HASH>..<HASH> 100644
--- a/tests/test_parse.py
+++ b/tests/test_parse.py
@@ -22,7 +22,7 @@ class ParseTest(unittest.TestCase):
for torrent, expected_result in zip(torrents, expected_results):
result = PTN.parse(torrent)
- self.assertItemsEqual(result, expected_result)
+ self.assertEqual(result, expected_result)
if __name__ == '__main__':
|
Fix unit test. Earlier testing was done for keys rather than values
|
py
|
diff --git a/salt/state.py b/salt/state.py
index <HASH>..<HASH> 100644
--- a/salt/state.py
+++ b/salt/state.py
@@ -1265,8 +1265,8 @@ class State(object):
# We pass deep copies here because we don't want any misbehaving
# state module to change these at runtime.
'__low__': copy.deepcopy(low),
- '__running__': copy.deepcopy(running),
- '__lowstate__': copy.deepcopy(chunks)
+ '__running__': copy.deepcopy(running) if running else {},
+ '__lowstate__': copy.deepcopy(chunks) if chunks else {}
}
if low.get('__prereq__'):
|
Save CPU cycles on the deep copy calls.
|
py
|
diff --git a/loguru/__init__.py b/loguru/__init__.py
index <HASH>..<HASH> 100644
--- a/loguru/__init__.py
+++ b/loguru/__init__.py
@@ -82,11 +82,11 @@ class StrRecord(str):
class Handler:
- def __init__(self, *, writter, level, format_, filter, colored, better_exceptions):
+ def __init__(self, *, writter, level, format_, filter_, colored, better_exceptions):
self.writter = writter
self.level = level
self.format = format_
- self.filter = filter
+ self.filter = filter_
self.colored = colored
self.better_exceptions = better_exceptions
@@ -507,7 +507,7 @@ class Logger:
writter=writter,
level=level,
format_=format,
- filter=filter,
+ filter_=filter,
colored=colored,
better_exceptions=better_exceptions,
)
|
No longer shadow built-in "filter" in private functions
|
py
|
diff --git a/tests/unit/remote/ssh/test_connection.py b/tests/unit/remote/ssh/test_connection.py
index <HASH>..<HASH> 100644
--- a/tests/unit/remote/ssh/test_connection.py
+++ b/tests/unit/remote/ssh/test_connection.py
@@ -95,11 +95,19 @@ def test_symlink(repo_dir, ssh):
assert System.is_symlink("link")
[email protected](
+ platform.system() == "Windows",
+ reason="hardlink is temporarily not supported on Windows",
+)
def test_hardlink(repo_dir, ssh):
ssh.hardlink("foo", "link")
assert System.is_hardlink("link")
[email protected](
+ platform.system() == "Windows",
+ reason="copy is temporarily not supported on Windows",
+)
def test_copy(repo_dir, ssh):
ssh.copy("foo", "link")
assert filecmp.cmp("foo", "link")
|
tests: ignore ssh's hardlink and copy tests on windows
|
py
|
diff --git a/charmhelpers/contrib/openstack/context.py b/charmhelpers/contrib/openstack/context.py
index <HASH>..<HASH> 100644
--- a/charmhelpers/contrib/openstack/context.py
+++ b/charmhelpers/contrib/openstack/context.py
@@ -197,7 +197,7 @@ class SharedDBContext(OSContextGenerator):
if self.relation_prefix:
password_setting = self.relation_prefix + '_password'
- for rid in relation_ids('shared-db'):
+ for rid in relation_ids(self.interfaces[0]):
for unit in related_units(rid):
rdata = relation_get(rid=rid, unit=unit)
host = rdata.get('db_host')
|
[wolsen,r=] Change the SharedDBContext to iterate over relations of the first interface name for viable shared databases.
|
py
|
diff --git a/sailthru/sailthru_http.py b/sailthru/sailthru_http.py
index <HASH>..<HASH> 100644
--- a/sailthru/sailthru_http.py
+++ b/sailthru/sailthru_http.py
@@ -41,7 +41,7 @@ def sailthru_http_request(url, data, method):
response_data = response.read()
response.close()
return response_data
- except urllib2.URLError, e:
- return str(e)
except urllib2.HTTPError, e:
- return e.read()
\ No newline at end of file
+ return e.read()
+ except urllib2.URLError, e:
+ return str(e)
\ No newline at end of file
|
must catch urllib2.HTTPError before urllib2.URLError.. since URLError will also gobble up HTTPError.. and then response returns generic <I> message.. instead of intended response.
|
py
|
diff --git a/folium/plugins/timestamped_geo_json.py b/folium/plugins/timestamped_geo_json.py
index <HASH>..<HASH> 100644
--- a/folium/plugins/timestamped_geo_json.py
+++ b/folium/plugins/timestamped_geo_json.py
@@ -6,6 +6,7 @@ import json
from branca.element import CssLink, Figure, JavascriptLink, MacroElement
+from folium.folium import Map
from folium.utilities import iter_points, none_max, none_min
from jinja2 import Template
@@ -166,6 +167,9 @@ class TimestampedGeoJson(MacroElement):
self.options = json.dumps(options, sort_keys=True, indent=2)
def render(self, **kwargs):
+ assert isinstance(self._parent, Map), (
+ 'TimestampedGeoJson can only be added to a Map object.'
+ )
super(TimestampedGeoJson, self).render()
figure = self.get_root()
|
Update timestamped_geo_json.py
|
py
|
diff --git a/classify_imports.py b/classify_imports.py
index <HASH>..<HASH> 100644
--- a/classify_imports.py
+++ b/classify_imports.py
@@ -182,7 +182,7 @@ class Import:
@property
def module_base(self) -> str:
- return self.module.split('.')[0]
+ return self.module.partition('.')[0]
@cached_property
def key(self) -> ImportKey:
@@ -237,7 +237,7 @@ class ImportFrom:
@property
def module_base(self) -> str:
- return self.module.split('.')[0]
+ return self.module.partition('.')[0]
@cached_property
def key(self) -> ImportFromKey:
|
micro-optimization: use partition instead of split
|
py
|
diff --git a/cgroupspy/nodes.py b/cgroupspy/nodes.py
index <HASH>..<HASH> 100644
--- a/cgroupspy/nodes.py
+++ b/cgroupspy/nodes.py
@@ -153,6 +153,7 @@ class Node(object):
Delete a cgroup by name and detach it from this node.
Raises OSError if the cgroup is not empty.
"""
+ name = name.encode()
fp = os.path.join(self.full_path, name)
if os.path.exists(fp):
os.rmdir(fp)
|
fix error: cant' mix strings and bytes in path components
|
py
|
diff --git a/dataproperty/converter/_core.py b/dataproperty/converter/_core.py
index <HASH>..<HASH> 100644
--- a/dataproperty/converter/_core.py
+++ b/dataproperty/converter/_core.py
@@ -82,13 +82,12 @@ class DateTimeConverter(ValueConverter):
import dateutil.parser
import pytz
- if not self._is_convert:
- return self._value
-
if isinstance(self._value, datetime.datetime):
self.__datetime = self._value
return self.__datetime
+ if not self._is_convert:
+ return self._value
try:
self.__datetime = dateutil.parser.parse(self._value)
except (AttributeError, ValueError):
|
Fix placement of a if statement
|
py
|
diff --git a/napalm/__init__.py b/napalm/__init__.py
index <HASH>..<HASH> 100644
--- a/napalm/__init__.py
+++ b/napalm/__init__.py
@@ -16,7 +16,7 @@ from eos import EOSDriver
from iosxr import IOSXRDriver
from junos import JunOSDriver
from fortios import FortiOSDriver
-from ibm import IBMDriver
+#from ibm import IBMDriver
def get_network_driver(vendor):
driver_mapping = {
@@ -27,7 +27,7 @@ def get_network_driver(vendor):
'JUNOS': JunOSDriver,
'JUNIPER': JunOSDriver,
'FORTIOS': FortiOSDriver,
- 'IBM': IBMDriver,
+# 'IBM': IBMDriver,
}
try:
return driver_mapping[vendor.upper()]
|
IBMDriver disabled because documentation and requirements are missing
|
py
|
diff --git a/plenum/server/consensus/consensus_shared_data.py b/plenum/server/consensus/consensus_shared_data.py
index <HASH>..<HASH> 100644
--- a/plenum/server/consensus/consensus_shared_data.py
+++ b/plenum/server/consensus/consensus_shared_data.py
@@ -68,7 +68,7 @@ class ConsensusSharedData:
self.prepared = [] # type: List[BatchID]
self._validators = None
self.quorums = None
- self.view_change_votes = ViewChangeVotesForView(self.quorums)
+ self.view_change_votes = ViewChangeVotesForView(Quorums(len(validators)))
# a list of validator node names ordered by rank (historical order of adding)
self.set_validators(validators)
self.low_watermark = 0
|
INDY-<I>: fix a problem with ViewChangeVotesForView creation
|
py
|
diff --git a/pyvisa-py/tcpip.py b/pyvisa-py/tcpip.py
index <HASH>..<HASH> 100644
--- a/pyvisa-py/tcpip.py
+++ b/pyvisa-py/tcpip.py
@@ -381,13 +381,20 @@ class TCPIPSocketSession(Session):
return (out + parts[0] + end_byte,
constants.StatusCode.success_termination_character_read)
+ # initial 'select_timout' is same as timeout, so when no data arrived then max block time
+ select_timout = timeout
while now - start <= timeout:
- # use select to wait for read ready
- select.select([self.interface], [], [])
- last = read_fun(chunk_length)
+ # use select to wait for read ready, max `select_timout` seconds
+ r, w, x = select.select([self.interface], [], [], select_timout)
+
+ last = b''
+ if self.interface in r:
+ last = read_fun(chunk_length)
if not last:
- time.sleep(.01)
+ # can't read chunk
+ # `select_timout` decreased to 0.01 sec
+ select_timout = 0.01
now = time.time()
continue
|
added timeout to select to unblock if no data arrived
|
py
|
diff --git a/cheroot/test/test_conn.py b/cheroot/test/test_conn.py
index <HASH>..<HASH> 100644
--- a/cheroot/test/test_conn.py
+++ b/cheroot/test/test_conn.py
@@ -1094,7 +1094,7 @@ class FaultyGetMap:
def __call__(self):
"""Intercept the calls to selector.get_map."""
sabotage_targets = (
- conn for _, (*_, conn) in self.original_get_map().items()
+ conn for _, (_, _, _, conn) in self.original_get_map().items()
if isinstance(conn, cheroot.server.HTTPConnection)
) if self.sabotage_conn else ()
|
Fix py2 tuple unpacking in gen expr @ test_conn
|
py
|
diff --git a/tests/test_client.py b/tests/test_client.py
index <HASH>..<HASH> 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -98,10 +98,8 @@ def test_client_broken_cert_bundle(monkeypatch):
Under circumstances it could be possible that the certifi package is not correctly installed, broken,
or just plain too old. Connecting to the Mollie API should fail with an error when the certificate
cannot be verified.
-
- We monkeypatch requests with a non-existent path at the location where certifi normally sets the correct path.
"""
- monkeypatch.setattr(requests.adapters, 'DEFAULT_CA_BUNDLE_PATH', '/does/not/exist')
+ monkeypatch.setenv("REQUESTS_CA_BUNDLE", "/does/not/exist")
client = Client()
client.set_api_key('test_test')
|
Don't mess with 3rd-party internals when it's not needed. The "REQUESTS_CA_BUNDLE" envvar is a documented feature of requests.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ setup(
'cc_core.commons.connectors',
'cc_core.agent',
'cc_core.agent.cwl',
- 'cc_core.agent.cwl_io',
+ 'cc_core.agent.red',
'cc_core.agent.connected'
],
entry_points={
|
fixed package renaming error in setup.py
|
py
|
diff --git a/field_history/tracker.py b/field_history/tracker.py
index <HASH>..<HASH> 100644
--- a/field_history/tracker.py
+++ b/field_history/tracker.py
@@ -4,11 +4,14 @@ from copy import deepcopy
import threading
from django.core import serializers
+from django.conf import settings
from django.db import models
from django.utils.functional import curry
from .models import FieldHistory
+SERIALIZER_NAME = getattr(settings, 'FIELD_HISTORY_SERIALIZER_NAME', 'json')
+
class FieldInstanceTracker(object):
def __init__(self, instance, fields):
@@ -92,7 +95,7 @@ class FieldHistoryTracker(object):
# Create a FieldHistory for all self.fields that have changed
for field in self.fields:
if tracker.has_changed(field) or is_new_object:
- data = serializers.serialize('json',
+ data = serializers.serialize(SERIALIZER_NAME,
[instance],
fields=[field])
user = self.get_field_history_user(instance)
|
ability to change serialize name in settings: FIELD_HISTORY_SERIALIZER_NAME
|
py
|
diff --git a/sinchsms.py b/sinchsms.py
index <HASH>..<HASH> 100644
--- a/sinchsms.py
+++ b/sinchsms.py
@@ -33,8 +33,8 @@ class SinchSMS(object):
Sends a get request if values are None, post request otherwise.
"""
if values:
- jsonData = json.dumps(values)
- request = urllib2.Request(url, jsonData.encode())
+ json_data = json.dumps(values)
+ request = urllib2.Request(url, json_data.encode())
request.add_header('content-type', 'application/json')
request.add_header('authorization', self._auth)
connection = urllib2.urlopen(request)
|
Fixed pylint warning.
|
py
|
diff --git a/pymongo/collection.py b/pymongo/collection.py
index <HASH>..<HASH> 100644
--- a/pymongo/collection.py
+++ b/pymongo/collection.py
@@ -1120,9 +1120,12 @@ class Collection(common.BaseObject):
.. versionadded:: 1.10
"""
+ use_master = not self.slave_okay and not self.read_preference
+
response = self.__database.command("mapreduce", self.__name,
uuid_subtype=self.__uuid_subtype,
map=map, reduce=reduce,
+ _use_master=use_master,
out={"inline": 1}, **kwargs)
if full_response:
|
PYTHON-<I>: Adding support for ReadPreference on ReplicaSetConnections for Collection.inline_map_reduce.
|
py
|
diff --git a/autoflake.py b/autoflake.py
index <HASH>..<HASH> 100755
--- a/autoflake.py
+++ b/autoflake.py
@@ -41,7 +41,7 @@ import pyflakes.messages
import pyflakes.reporter
-__version__ = '0.6.1'
+__version__ = '0.6.2'
ATOMS = frozenset([tokenize.NAME, tokenize.NUMBER, tokenize.STRING])
|
Increment patch version to <I>
|
py
|
diff --git a/machina/apps/forum/abstract_models.py b/machina/apps/forum/abstract_models.py
index <HASH>..<HASH> 100644
--- a/machina/apps/forum/abstract_models.py
+++ b/machina/apps/forum/abstract_models.py
@@ -54,7 +54,8 @@ class AbstractForum(MPTTModel, ActiveModel):
default=False)
# Category, Default forum or Link ; that's what a forum can be
- type = models.PositiveSmallIntegerField(choices=FORUM_TYPES, verbose_name=_('Forum type'), db_index=True)
+ TYPE_CHOICES = FORUM_TYPES
+ type = models.PositiveSmallIntegerField(choices=TYPE_CHOICES, verbose_name=_('Forum type'), db_index=True)
# Tracking data
posts_count = models.PositiveIntegerField(verbose_name=_('Number of posts'), editable=False, blank=True, default=0)
|
Forum type choices are now embedded in the model
|
py
|
diff --git a/trunk/languagetool/query.py b/trunk/languagetool/query.py
index <HASH>..<HASH> 100644
--- a/trunk/languagetool/query.py
+++ b/trunk/languagetool/query.py
@@ -151,20 +151,28 @@ def buildList(filename):
def queryFiles(tokens, dir_name):
os.chdir(dir_name)
dir_contents = os.listdir(".")
+ dir_contents.sort()
+ c = 0
+ for filename in dir_contents:
+ if filename.endswith(".xml"):
+ c = c + 1
+ print "Found %d *.xml files in %s<br>" % (c, dir_name)
w = 0
s = 0
m = 0
+ f_count = 1
for name in dir_contents:
if os.path.isdir(name):
queryFiles(tokens, name)
elif name.endswith(".xml"):
- print "<strong>%s</strong>, so far %d words, %d sentences<br>" % (name, word_count, sentence_count)
+ print "<strong>%.3d. %s</strong>, so far %d words, %d sentences<br>" % (f_count, name, word_count, sentence_count)
res = query(tokens, name)
if not res:
return
#global_file_count = global_file_count + 1
#print "<hr />"
sys.stdout.flush()
+ f_count = f_count + 1
# for profiling
#if word_count > 200000:
# return
|
sort filenames, display file number
|
py
|
diff --git a/plugins/bigquery/dbt/adapters/bigquery/impl.py b/plugins/bigquery/dbt/adapters/bigquery/impl.py
index <HASH>..<HASH> 100644
--- a/plugins/bigquery/dbt/adapters/bigquery/impl.py
+++ b/plugins/bigquery/dbt/adapters/bigquery/impl.py
@@ -519,13 +519,13 @@ class BigQueryAdapter(BaseAdapter):
table_field = table.time_partitioning.field
return table_field == conf_partition.field
elif conf_partition and table.range_partitioning is not None:
- dest_part = table.range_partition.range_
+ dest_part = table.range_partitioning
conf_part = conf_partition.range or {}
return dest_part.field == conf_partition.field \
- and dest_part.start == conf_part.get('start') \
- and dest_part.end == conf_part.get('end') \
- and dest_part.interval == conf_part.get('interval')
+ and dest_part.range_.start == conf_part.get('start') \
+ and dest_part.range_.end == conf_part.get('end') \
+ and dest_part.range_.interval == conf_part.get('interval')
else:
return False
|
Fixup range_partitioning comparison logic
|
py
|
diff --git a/graphenecommon/genesisbalance.py b/graphenecommon/genesisbalance.py
index <HASH>..<HASH> 100644
--- a/graphenecommon/genesisbalance.py
+++ b/graphenecommon/genesisbalance.py
@@ -51,7 +51,7 @@ class GenesisBalance(BlockchainObject, AbstractBlockchainInstanceProvider):
account = self.blockchain.config["default_account"]
if not account:
raise ValueError("You need to provide an account")
- account = self.account_class(account)
+ account = self.account_class(account, blockchain_instance=self.blockchain)
pubkeys = self.blockchain.wallet.getPublicKeys()
addresses = dict()
for p in pubkeys:
|
Pass blockchain_instance Without passing a proper instance, account got taken from the new instance which may connect to different network.
|
py
|
diff --git a/tweepy/api.py b/tweepy/api.py
index <HASH>..<HASH> 100644
--- a/tweepy/api.py
+++ b/tweepy/api.py
@@ -1452,14 +1452,24 @@ class API:
``user_id`` and ``screen_name`` are not provided, the ownerships for
the authenticating user are returned.
- :param user_id: |user_id|
- :param screen_name: |screen_name|
- :param count: |count|
- :param cursor: |cursor|
+ Parameters
+ ----------
+ user_id
+ |user_id|
+ screen_name
+ |screen_name|
+ count
+ |count|
+ cursor
+ |cursor|
- :rtype: list of :class:`List` objects
+ Returns
+ -------
+ :py:class:`List`\ [:class:`~tweepy.models.List`]
- :reference: https://developer.twitter.com/en/docs/twitter-api/v1/accounts-and-users/create-manage-lists/api-reference/get-lists-ownerships
+ References
+ ----------
+ https://developer.twitter.com/en/docs/twitter-api/v1/accounts-and-users/create-manage-lists/api-reference/get-lists-ownerships
"""
return self.request(
'GET', 'lists/ownerships', endpoint_parameters=(
|
Update and improve documentation for API.lists_ownerships
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,7 @@ finally:
setup(
name='django-fluent-comments',
- version='0.1.0',
+ version='0.8.0',
license='Apache License, Version 2.0',
install_requires=[
|
Set <I> version, like other django-fluent apps
|
py
|
diff --git a/lhc-python/lhc/binf/kmer.py b/lhc-python/lhc/binf/kmer.py
index <HASH>..<HASH> 100644
--- a/lhc-python/lhc/binf/kmer.py
+++ b/lhc-python/lhc/binf/kmer.py
@@ -1,18 +1,23 @@
+from collections import Counter
+
+
class KmerCounter(object):
def __init__(self, sequence, k=None, step=1):
- self.sequence = sequence
- self.counts = {}
+ self.sequence = sequence.lower()
+ self.counts = Counter()
self.step = step
+ self.ks = set()
if k is not None:
+ self.ks.add(k)
self._count_k(k)
def __str__(self):
return str(self.counts)
def __getitem__(self, key):
- if key not in self.counts:
+ if len(key) not in self.ks:
self._count_k(len(key))
- return self.counts[key]
+ return self.counts[key.lower()]
def _count_k(self, k):
for i in xrange(0, len(self.sequence), self.step):
|
improved kmer to work with no counts
|
py
|
diff --git a/thefuck/rules/cargo_no_command.py b/thefuck/rules/cargo_no_command.py
index <HASH>..<HASH> 100644
--- a/thefuck/rules/cargo_no_command.py
+++ b/thefuck/rules/cargo_no_command.py
@@ -4,7 +4,7 @@ from thefuck.utils import replace_argument, for_app
@for_app('cargo', at_least=1)
def match(command):
- return ('o such subcommand' in command.stderr
+ return ('no such subcommand' in command.stderr.lower()
and 'Did you mean' in command.stderr)
|
#<I>: Little refactoring
|
py
|
diff --git a/doc/src/docbkx/scripts/generateReleaseNotes.py b/doc/src/docbkx/scripts/generateReleaseNotes.py
index <HASH>..<HASH> 100755
--- a/doc/src/docbkx/scripts/generateReleaseNotes.py
+++ b/doc/src/docbkx/scripts/generateReleaseNotes.py
@@ -6,7 +6,7 @@ import os
import argparse
import time
import collections
-sys.path.append('../../../../tools/PyGithub')
+sys.path.append('./lib/PyGithub')
import config
from github import Github
|
Updated Release Notes script to look at new path of PyGithub
|
py
|
diff --git a/src/python/dxpy/__init__.py b/src/python/dxpy/__init__.py
index <HASH>..<HASH> 100644
--- a/src/python/dxpy/__init__.py
+++ b/src/python/dxpy/__init__.py
@@ -609,6 +609,9 @@ def DXHTTPRequest(resource, data, method='POST', headers=None, auth=True,
return i
_headers = {ensure_ascii(k): ensure_ascii(v) for k, v in _headers.items()}
+ if (sys.version_info >= (3, 0)):
+ _headers.pop(b'host', None)
+ _headers.pop(b'content-length', None)
response = pool_manager.request(_method, _url, headers=_headers, body=body,
timeout=timeout, retries=False, **kwargs)
except urllib3.exceptions.ClosedPoolError:
|
DEVEX-<I> - Fix python 3 dx upload issue (#<I>)
|
py
|
diff --git a/spyder/plugins/editor/widgets/codeeditor.py b/spyder/plugins/editor/widgets/codeeditor.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/editor/widgets/codeeditor.py
+++ b/spyder/plugins/editor/widgets/codeeditor.py
@@ -2073,6 +2073,7 @@ class CodeEditor(TextEditBaseWidget):
if data and data.code_analysis:
for warning in data.code_analysis:
warnings.append([warning[-1], block.blockNumber() + 1])
+ # See spyder-ide/spyder#9924
if block.blockNumber() + 1 == line_count:
break
block = block.next()
|
Update spyder/plugins/editor/widgets/codeeditor.py
|
py
|
diff --git a/smartmin/views.py b/smartmin/views.py
index <HASH>..<HASH> 100644
--- a/smartmin/views.py
+++ b/smartmin/views.py
@@ -46,7 +46,6 @@ class SmartView(object):
exclude = None
field_config = {}
title = None
- permission = None
refresh = 0
template_name = None
@@ -82,7 +81,7 @@ class SmartView(object):
self.args = args
self.request = request
- if not self.permission:
+ if not getattr(self, 'permission', None):
return True
else:
# first check our anonymous permissions
@@ -118,7 +117,7 @@ class SmartView(object):
if obj_getter:
obj = obj_getter()
if obj:
- return self.request.user.has_perm(self.permission, obj)
+ return self.request.user.has_perm(getattr(self, 'permission', None), obj)
def dispatch(self, request, *args, **kwargs):
"""
|
Handled the permission for actions inheritance issue
|
py
|
diff --git a/source/awesome_tool/mvc/controllers/graphical_editor.py b/source/awesome_tool/mvc/controllers/graphical_editor.py
index <HASH>..<HASH> 100644
--- a/source/awesome_tool/mvc/controllers/graphical_editor.py
+++ b/source/awesome_tool/mvc/controllers/graphical_editor.py
@@ -685,6 +685,10 @@ class GraphicalEditorController(Controller):
state.meta['gui']['editor']['input_pos'].items() +
state.meta['gui']['editor']['output_pos'].items() +
state.meta['gui']['editor']['scoped_pos'].items())
+
+ if not from_key in connectors or not to_key in connectors:
+ logger.warn("Data flow with non existing port(s): {0}, {1}".format(from_key, to_key))
+ continue
from_x = connectors[from_key][0]
from_y = connectors[from_key][1]
to_x = connectors[to_key][0]
|
Make graphical editor more fail safe The graphical editor controller now checks whether the ports of a data flows are existing. If not, a warning is logged and the data flow is skipped. However, there are still errors in other components when removing an port to which a data flow is connected to!
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -28,4 +28,7 @@ setup(
'docs': ['Sphinx']
},
test_suite='aja.tests',
+ entry_points = {
+ 'console_scripts': ['aja = fabric.main:main']
+ }
)
|
Add aja-alias for fabric as a console script
|
py
|
diff --git a/filer/admin/fileadmin.py b/filer/admin/fileadmin.py
index <HASH>..<HASH> 100644
--- a/filer/admin/fileadmin.py
+++ b/filer/admin/fileadmin.py
@@ -41,7 +41,7 @@ class FileAdmin(PrimitivePermissionAwareModelAdmin):
'fields': ('is_public',)
}),
(_('Advanced'), {
- 'fields': ('_file',),
+ 'fields': ('file',),
'classes': ('collapse',),
}),
)
|
Update the admin with the latest atribute name changes
|
py
|
diff --git a/fsoopify/nodes.py b/fsoopify/nodes.py
index <HASH>..<HASH> 100644
--- a/fsoopify/nodes.py
+++ b/fsoopify/nodes.py
@@ -46,8 +46,8 @@ class NodeInfo(ABC):
'''
if not isinstance(dest_path, str):
raise TypeError
- os.rename(self._path, dest_path)
- self._path = Path(dest_path)
+ os.rename(self._path, new_path)
+ self._path = Path(dest_path).get_abspath()
def get_parent(self):
'''
|
fixes path not abs when rename
|
py
|
diff --git a/tests/test_rdd.py b/tests/test_rdd.py
index <HASH>..<HASH> 100644
--- a/tests/test_rdd.py
+++ b/tests/test_rdd.py
@@ -32,7 +32,7 @@ dpark_master = os.environ.get("TEST_DPARK_MASTER", "local")
# export TEST_DPARK_MASTER=mesos
# export TMPDIR=/path/on/moosefs
-env_msg = "test with dpark_master={}, tempdir={}, PYTHONPATH={}".format(
+env_msg = "test with TEST_DPARK_MASTER={}, TMPDIR={}, PYTHONPATH={}".format(
dpark_master, tempfile.gettempdir(), os.environ.get("PYTHONPATH")
)
print(env_msg)
|
Polish env info output of test.
|
py
|
diff --git a/maildir_deduplicate/deduplicate.py b/maildir_deduplicate/deduplicate.py
index <HASH>..<HASH> 100644
--- a/maildir_deduplicate/deduplicate.py
+++ b/maildir_deduplicate/deduplicate.py
@@ -62,7 +62,7 @@ class Deduplicate(object):
def add_maildir(self, maildir_path):
""" Load up a maildir add compute hash for each mail their contain. """
- maildir = Maildir(maildir_path, factory=None)
+ maildir = Maildir(maildir_path, create=False)
# Collate folders by hash.
print("Processing {} mails in {}".format(len(maildir), maildir._path))
for mail_id, message in maildir.iteritems():
|
Input maildirs are read-only.
|
py
|
diff --git a/tests/column_test.py b/tests/column_test.py
index <HASH>..<HASH> 100644
--- a/tests/column_test.py
+++ b/tests/column_test.py
@@ -64,3 +64,12 @@ def test_plain_strings():
assert len(df[1:3]) == 2
assert df[1:3].x.tolist() == x[1:3].tolist()
+
+def test_dtype_object_with_arrays():
+ x = np.arange(10)
+ y = np.arange(11) ** 2
+ z = np.array([x, y])
+ assert z.dtype == np.object
+ df = vaex.from_arrays(z=z)
+ assert df.z.tolist()[0].tolist() == x.tolist()
+ assert df.z.tolist()[1].tolist() == y.tolist()
|
test: dtype object with numpy arrays Was missing from #<I>
|
py
|
diff --git a/mtools/mplotqueries/plottypes/connchurn_type.py b/mtools/mplotqueries/plottypes/connchurn_type.py
index <HASH>..<HASH> 100644
--- a/mtools/mplotqueries/plottypes/connchurn_type.py
+++ b/mtools/mplotqueries/plottypes/connchurn_type.py
@@ -93,7 +93,13 @@ class ConnectionChurnPlotType(BasePlotType):
total = sorted(opened+closed, key=lambda ll: ll.datetime)
x = date2num( [ logline.datetime for logline in total ] )
- conns = [int(re.search(r'(\d+) connections? now open', ll.line_str).group(1)) for ll in total]
+
+ try:
+ conns = [int(re.search(r'(\d+) connections? now open', ll.line_str).group(1)) for ll in total]
+ except AttributeError:
+ # hack, v2.0.x doesn't have this information
+ axis.set_ylim(top = self.ymax*1.1)
+ return
axis.plot(x, conns, '-', color='black', linewidth=2, alpha=0.7, label='# open connections total')
|
fix for <I> log files, that don't have the total number of connections.
|
py
|
diff --git a/icekit/api/urls.py b/icekit/api/urls.py
index <HASH>..<HASH> 100644
--- a/icekit/api/urls.py
+++ b/icekit/api/urls.py
@@ -1,5 +1,7 @@
+import logging
+
from django.conf import settings
-from django.conf.urls import url, include, patterns
+from django.conf.urls import url, include
from django.utils.module_loading import import_string
from rest_framework import routers
@@ -8,6 +10,8 @@ from rest_framework_swagger.views import get_swagger_view
from .images import views as images_views
from .pages import views as pages_views
+logger = logging.getLogger(__name__)
+
schema_doc_view = get_swagger_view(title='GLAMkit API')
@@ -20,7 +24,13 @@ router.register(r'pages', pages_views.PageViewSet, 'pages-api')
for api_section_name, pluggable_router \
in getattr(settings, 'EXTRA_API_ROUTERS', []):
if isinstance(pluggable_router, basestring):
- pluggable_router = import_string(pluggable_router)
+ try:
+ pluggable_router = import_string(pluggable_router)
+ except ImportError, ex:
+ logger.warn(
+ "Failed to load API router '%s' from EXTRA_API_ROUTERS: %s"
+ % (pluggable_router, ex))
+ continue
for prefix, viewset, basename in pluggable_router.registry:
if api_section_name:
prefix = api_section_name + prefix
|
#<I> Log errors don't fail when loading extra API routers
|
py
|
diff --git a/h2o-py/h2o/h2o.py b/h2o-py/h2o/h2o.py
index <HASH>..<HASH> 100644
--- a/h2o-py/h2o/h2o.py
+++ b/h2o-py/h2o/h2o.py
@@ -354,7 +354,7 @@ def download_csv(data, filename):
"""
data._eager()
if not isinstance(data, H2OFrame): raise(ValueError, "`data` argument must be an H2OFrame, but got " + type(data))
- url = "http://{}:{}/3/DownloadDataset?frame_id={}".format(H2OConnection.ip(),H2OConnection.port(),data._id)
+ url = "http://{}:{}/3/DownloadDataset.bin?frame_id={}".format(H2OConnection.ip(),H2OConnection.port(),data._id)
with open(filename, 'w') as f: f.write(urllib2.urlopen(url).read())
def download_all_logs(dirname=".",filename=None):
|
update endpoint for python client download_csv
|
py
|
diff --git a/python/ray/rllib/eval.py b/python/ray/rllib/eval.py
index <HASH>..<HASH> 100644
--- a/python/ray/rllib/eval.py
+++ b/python/ray/rllib/eval.py
@@ -7,6 +7,7 @@ from __future__ import print_function
import argparse
import gym
import json
+import os
import ray
from ray.rllib.agent import get_agent_class
@@ -42,11 +43,19 @@ parser.add_argument(
help="Run evaluation of the agent forever.")
parser.add_argument(
"--config", default="{}", type=json.loads,
- help="Algorithm-specific configuration (e.g. env, hyperparams), ")
+ help="Algorithm-specific configuration (e.g. env, hyperparams). "
+ "Surpresses loading of configuration from checkpoint.")
if __name__ == "__main__":
args = parser.parse_args()
+ if not args.config:
+ # Load configuration from file
+ config_dir = os.path.dirname(args.checkpoint)
+ config_path = os.path.join(config_dir, "params.json")
+ with open(config_path) as f:
+ args.config = json.load(f)
+
if not args.env:
if not args.config.get("env"):
parser.error("the following arguments are required: --env")
|
Load evaluation configuration from checkpoint (#<I>)
|
py
|
diff --git a/gutenberg/acquire/metadata.py b/gutenberg/acquire/metadata.py
index <HASH>..<HASH> 100644
--- a/gutenberg/acquire/metadata.py
+++ b/gutenberg/acquire/metadata.py
@@ -39,12 +39,11 @@ class MetadataCacheManager(object):
if store == 'Sleepycat':
self.store = store
self.removable = True
- else:
- if cache_uri.startswith('sqlite://'):
- self.removable = True
- else:
- self.removable = False
+ elif cache_uri.startswith('sqlite://'):
+ self.removable = True
self.store = plugin.get(store, Store)(identifier=self.identifier)
+ else:
+ raise NotImplementedError
self.cache_uri = cache_uri
self.graph = Graph(store=self.store, identifier=self.identifier)
|
Make number of metadata stores explicit Really there are only two supported stores: - Sleepycat/BDB - Sqlite
|
py
|
diff --git a/geomdl/BSpline.py b/geomdl/BSpline.py
index <HASH>..<HASH> 100644
--- a/geomdl/BSpline.py
+++ b/geomdl/BSpline.py
@@ -1324,7 +1324,7 @@ class Surface(object):
def delta(self):
""" Surface evaluation delta.
- .. note:: The delta value is 0.01 by default.
+ .. note:: The delta value is 0.1 by default.
:getter: Gets the delta value
:setter: Sets the delta value
|
Update inline docs for surface delta
|
py
|
diff --git a/splunk_handler/__init__.py b/splunk_handler/__init__.py
index <HASH>..<HASH> 100644
--- a/splunk_handler/__init__.py
+++ b/splunk_handler/__init__.py
@@ -249,6 +249,7 @@ class SplunkHandler(logging.Handler):
if not payload:
payload = self.log_payload
+ self.log_payload = ""
if payload:
self.write_debug_log("Payload available for sending")
@@ -274,7 +275,6 @@ class SplunkHandler(logging.Handler):
self.write_debug_log("Exception encountered," +
"but traceback could not be formatted")
- self.log_payload = ""
else:
self.write_debug_log("Timer thread executed but no payload was available to send")
@@ -338,14 +338,9 @@ class SplunkHandler(logging.Handler):
def wait_until_empty(self):
self.write_debug_log("Waiting until queue empty")
- flush_interval = self.flush_interval
- self.flush_interval = .5
-
while len(self.queue) > 0:
self.write_debug_log("Current queue size: " + str(len(self.queue)))
- time.sleep(.5)
-
- self.flush_interval = flush_interval
+ time.sleep(self.alt_flush_interval)
@property
def alt_flush_interval(self):
|
fix dup log race condition on shutdown
|
py
|
diff --git a/pytestsalt/utils.py b/pytestsalt/utils.py
index <HASH>..<HASH> 100644
--- a/pytestsalt/utils.py
+++ b/pytestsalt/utils.py
@@ -30,7 +30,6 @@ from collections import namedtuple
# Import 3rd party libs
import pytest
import psutil
-from tornado import gen, ioloop
# Import salt libs
import salt.ext.six as six
@@ -422,10 +421,6 @@ class SaltDaemonScriptBase(SaltScriptBase):
terminate_child_processes(children=children)
def wait_until_running(self, timeout=None):
- return ioloop.IOLoop.current().run_sync(lambda: self._wait_until_running(timeout=timeout))
-
- @gen.coroutine
- def _wait_until_running(self, timeout=None):
'''
Blocking call to wait for the daemon to start listening
'''
|
We don't need to rely on ioloop
|
py
|
diff --git a/lib/yaml/emitter.py b/lib/yaml/emitter.py
index <HASH>..<HASH> 100644
--- a/lib/yaml/emitter.py
+++ b/lib/yaml/emitter.py
@@ -581,7 +581,9 @@ class Emitter(object):
return tag
handle = None
suffix = tag
- for prefix in self.tag_prefixes:
+ prefixes = self.tag_prefixes.keys()
+ prefixes.sort()
+ for prefix in prefixes:
if tag.startswith(prefix) \
and (prefix == u'!' or len(prefix) < len(tag)):
handle = self.tag_prefixes[prefix]
|
Added a workaround against #<I> (Thanks Andrey Somov).
|
py
|
diff --git a/client/deis.py b/client/deis.py
index <HASH>..<HASH> 100755
--- a/client/deis.py
+++ b/client/deis.py
@@ -377,10 +377,6 @@ class DeisClient(object):
'No active controller. Use `deis login` or `deis register` to get started.')
url = urlparse.urljoin(controller, path, **kwargs)
response = func(url, data=body, headers=headers)
- # check for errors
- if response.json().get('error') is not None:
- print(response.json()['error'])
- sys.exit(1)
return response
def apps(self, args):
|
fix(client): remove check for error message This check was originally used for API/client version mismatches, but breaks when the response is not as to be expected. The client response works, anyways: ><> deis apps <I> METHOD NOT ALLOWED {u'error': u'Client and server versions do not match.\nClient version: <I>\nServer version: <I>'}
|
py
|
diff --git a/tests/test_plan.py b/tests/test_plan.py
index <HASH>..<HASH> 100644
--- a/tests/test_plan.py
+++ b/tests/test_plan.py
@@ -3,7 +3,7 @@
import unittest
import sys
import os
-lib_fldr = os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + os.sep + ".." + os.sep + "AI" + os.sep + "lib")
+lib_fldr = os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + os.sep + ".." + os.sep + "aikif" + os.sep + "lib")
#print(lib_fldr) # C:\DATA\Duncan\git\AIKIF\AI\lib (on laptop)
sys.path.append(lib_fldr)
from cls_plan import Plan
|
fixed test_plan to use corrected path
|
py
|
diff --git a/pyaxiom/netcdf/dataset.py b/pyaxiom/netcdf/dataset.py
index <HASH>..<HASH> 100644
--- a/pyaxiom/netcdf/dataset.py
+++ b/pyaxiom/netcdf/dataset.py
@@ -24,3 +24,10 @@ class EnhancedDataset(Dataset):
vs.append(self.variables[vname])
return vs
+
+ def close(self):
+ try:
+ self.sync()
+ self.close()
+ except RuntimeError:
+ pass
|
Add a close method to EnhancedDataset that won't raise a RuntimeError
|
py
|
diff --git a/balsa/balsa.py b/balsa/balsa.py
index <HASH>..<HASH> 100644
--- a/balsa/balsa.py
+++ b/balsa/balsa.py
@@ -94,6 +94,8 @@ class Balsa(object):
use_sentry_django = attrib(default=False)
use_sentry_lambda = attrib(default=False)
use_sentry_sqlalchemy = attrib(default=False)
+ use_sentry_celery = attrib(default=False)
+
sentry_client = attrib(default=None)
sentry_dsn = attrib(default=None)
@@ -223,6 +225,9 @@ class Balsa(object):
if self.use_sentry_sqlalchemy:
from sentry_sdk.integrations.sqlalchemy import SqlalchemyIntegration
integrations.append(SqlalchemyIntegration())
+ if self.use_sentry_celery:
+ from sentry_sdk.integrations.celery import CeleryIntegration
+ integrations.append(CeleryIntegration())
if self.sentry_dsn is None:
if 'SENTRY_DSN' not in os.environ:
|
Add celery to sentry integrations
|
py
|
diff --git a/src/requirementslib/models/dependency.py b/src/requirementslib/models/dependency.py
index <HASH>..<HASH> 100644
--- a/src/requirementslib/models/dependency.py
+++ b/src/requirementslib/models/dependency.py
@@ -324,13 +324,14 @@ class DependencyResolver(object):
for round_ in range(max_rounds):
self.pin_deps()
self.pin_history[round_] = self.pinned_deps.copy()
- previous_round = self.pin_history[round_ - 1]
- difference = set(self.pin_history[round_]) - set(previous_round)
- if difference:
- log("Difference: ")
- for d in difference:
- log(format_requirement(d))
- if not difference and round >= 3:
+ if round_ > 0:
+ previous_round = self.pin_history[round_ - 1]
+ difference = set(self.pin_history[round_]) - set(previous_round)
+ if difference:
+ log("Difference: ")
+ for d in difference:
+ log(format_requirement(d))
+ if round >= 3 and not difference:
return
if len(self.pinned_deps.keys()) == len(self.dep_dict.keys()):
return
|
Only check for difference in history after round 1
|
py
|
diff --git a/yabt/caching_random_test.py b/yabt/caching_random_test.py
index <HASH>..<HASH> 100644
--- a/yabt/caching_random_test.py
+++ b/yabt/caching_random_test.py
@@ -350,7 +350,7 @@ def get_random_artifacts_to_delete(project: ProjectContext):
target = summary['name'].strip(':')
for type_name, artifact_list in artifact_desc.items():
for artifact in artifact_list:
- if artifact in artifacts_to_delete:
+ if artifact['hash'] in artifacts_to_delete:
if target not in project.targets:
artifacts_to_delete.remove(artifact)
elif summary['created'] == \
|
check if the artifact hash in artifacts to delete
|
py
|
diff --git a/metanl/wordlist.py b/metanl/wordlist.py
index <HASH>..<HASH> 100644
--- a/metanl/wordlist.py
+++ b/metanl/wordlist.py
@@ -83,10 +83,10 @@ class Wordlist(object):
if filename in CACHE:
return CACHE[filename]
else:
- stream = pkg_resources.resource_stream(
+ stream = pkg_resources.resource_string(
__name__,
'data/wordlists/%s' % filename
- )
+ ).decode('utf-8').splitlines()
wordlist = cls._load_stream(stream)
CACHE[filename] = wordlist
return wordlist
@@ -96,9 +96,7 @@ class Wordlist(object):
worddict = {}
for line in stream:
word, freq = line.rstrip().split(',')
- freq = float(freq)
- word = word.decode('utf-8')
- worddict[word] = freq
+ worddict[word] = float(freq)
return cls(worddict)
@classmethod
|
Decode the wordlist all at once, for faster loading.
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.