diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
---|---|---|
diff --git a/build/build_package.py b/build/build_package.py
index <HASH>..<HASH> 100755
--- a/build/build_package.py
+++ b/build/build_package.py
@@ -125,7 +125,7 @@ class Magento_Packager(object):
cdir = os.getcwd()
os.chdir(self._base_dir)
with open(manifest_filename, 'w') as xml_file:
- ElementTree.ElementTree(pkg_xml).write(xml_file, 'utf-8')
+ ElementTree.ElementTree(pkg_xml).write(xml_file, 'utf-8', True)
self._logger.debug('Wrote package XML')
with tarfile.open(tarball_name, 'w:gz') as tarball:
for filename in self._file_list:
|
fix missing xml declaration in package manifest
|
py
|
diff --git a/salt/utils/event.py b/salt/utils/event.py
index <HASH>..<HASH> 100644
--- a/salt/utils/event.py
+++ b/salt/utils/event.py
@@ -479,12 +479,16 @@ class Reactor(multiprocessing.Process, salt.state.Compiler):
'''
react = {}
for fn_ in glob.glob(glob_ref):
- react.update(self.render_template(
+ try:
+ react.update(self.render_template(
fn_,
tag=tag,
data=data))
+ except:
+ log.error('Failed to render "{0}"'.format(fn_))
return react
+
def list_reactors(self, tag):
'''
Take in the tag from an event and return a list of the reactors to
|
Fix for #<I> Reacter process get exited with wrong sls
|
py
|
diff --git a/astropy_helpers/setup_helpers.py b/astropy_helpers/setup_helpers.py
index <HASH>..<HASH> 100644
--- a/astropy_helpers/setup_helpers.py
+++ b/astropy_helpers/setup_helpers.py
@@ -1109,7 +1109,8 @@ def iter_setup_packages(srcdir, packages):
for packagename in packages:
package_parts = packagename.split('.')
package_path = os.path.join(srcdir, *package_parts)
- setup_package = os.path.join(package_path, 'setup_package.py')
+ setup_package = os.path.relpath(
+ os.path.join(package_path, 'setup_package.py'))
if os.path.isfile(setup_package):
module = import_file(setup_package)
|
Fix a small issue that was preventing the skip_2to3 feature from working correctly
|
py
|
diff --git a/bigfile/tests/test_bigfile.py b/bigfile/tests/test_bigfile.py
index <HASH>..<HASH> 100644
--- a/bigfile/tests/test_bigfile.py
+++ b/bigfile/tests/test_bigfile.py
@@ -123,11 +123,13 @@ def test_grow(comm):
b.grow(size=100, Nfile=2)
b.write(100, data)
+ assert b.size == 200
with x.open(d.str) as b:
assert b.Nfile == 5
assert_equal(b[:100], data)
assert_equal(b[100:], data)
+ assert b.size == 200
shutil.rmtree(fname)
|
Add assertion on the size of grown files.
|
py
|
diff --git a/pyghmi/ipmi/oem/lenovo/imm.py b/pyghmi/ipmi/oem/lenovo/imm.py
index <HASH>..<HASH> 100644
--- a/pyghmi/ipmi/oem/lenovo/imm.py
+++ b/pyghmi/ipmi/oem/lenovo/imm.py
@@ -673,7 +673,9 @@ class XCCClient(IMMClient):
if len(rsp['items']) != 1:
raise Exception('Unexpected result: ' + repr(rsp))
firmtype = rsp['items'][0]['firmware_type']
- if firmtype not in ('UEFI', 'IMM'): # adapter firmware
+ if firmtype not in (
+ 'TDM', 'WINDOWS DRIV', 'LINUX DRIVER', 'UEFI', 'IMM'):
+ # adapter firmware
webid = rsp['items'][0]['webfile_build_id']
locations = webid[webid.find('[')+1:webid.find(']')]
locations = locations.split(':')
|
Whitelist more core drivers LXPM and associated content also needs to be updated the core way. Change-Id: I0b5eb<I>ad1b<I>bac<I>e0c2ec<I>a<I>da8f<I>
|
py
|
diff --git a/salt/modules/cmdmod.py b/salt/modules/cmdmod.py
index <HASH>..<HASH> 100644
--- a/salt/modules/cmdmod.py
+++ b/salt/modules/cmdmod.py
@@ -982,8 +982,8 @@ def run(cmd,
.. warning::
For versions 2018.3.3 and above on macosx while using runas,
- to pass special characters to the command you need to escape
- the characters on the shell.
+ on linux while using run, to pass special characters to the
+ command you need to escape the characters on the shell.
Example:
|
doc: escape special characters on linux for cmd.run
|
py
|
diff --git a/models/rasmachine/rasmachine.py b/models/rasmachine/rasmachine.py
index <HASH>..<HASH> 100644
--- a/models/rasmachine/rasmachine.py
+++ b/models/rasmachine/rasmachine.py
@@ -392,6 +392,7 @@ if __name__ == '__main__':
['biopax', 'bel'], policy='none')
no_db_stmts = ac.filter_belief(no_db_stmts, belief_threshold)
orig_stmts = db_stmts + no_db_stmts
+ orig_stmts = ac.filter_top_level(orig_stmts)
stats['orig_final'] = len(orig_stmts)
logger.info('%d final statements' % len(orig_stmts))
@@ -413,6 +414,7 @@ if __name__ == '__main__':
['biopax', 'bel'], policy='none')
no_db_stmts = ac.filter_belief(no_db_stmts, belief_threshold)
new_stmts = db_stmts + no_db_stmts
+ new_stmts = ac.filter_top_level(new_stmts)
stats['new_final'] = len(new_stmts)
logger.info('%d final statements' % len(new_stmts))
|
Filter top-evel after belief cutoff in Ras Machine
|
py
|
diff --git a/Python/test/phate_test_tree.py b/Python/test/phate_test_tree.py
index <HASH>..<HASH> 100755
--- a/Python/test/phate_test_tree.py
+++ b/Python/test/phate_test_tree.py
@@ -27,7 +27,7 @@ def main(argv=None):
#run phate with nonmetric MDS
phate_operator.reset_mds(mds="nonmetric")
- Y_nmmds = Y_mmds#phate_operator.fit_transform(M)
+ Y_nmmds = phate_operator.fit_transform(M)
pca = phate.preprocessing.pca_reduce(M, n_components=2)
tsne = sklearn.manifold.TSNE().fit_transform(M)
|
Update phate_test_tree.py
|
py
|
diff --git a/safe_qgis/impact_statistics/function_options_dialog.py b/safe_qgis/impact_statistics/function_options_dialog.py
index <HASH>..<HASH> 100644
--- a/safe_qgis/impact_statistics/function_options_dialog.py
+++ b/safe_qgis/impact_statistics/function_options_dialog.py
@@ -31,6 +31,7 @@ from PyQt4.QtGui import (
QLabel,
QCheckBox,
QFormLayout,
+ QScrollArea,
QWidget)
from collections import OrderedDict
@@ -119,9 +120,13 @@ class FunctionOptionsDialog(QtGui.QDialog, Ui_FunctionOptionsDialogBase):
"""
# create minimum needs tab
tab = QWidget()
- form_layout = QFormLayout(tab)
+ form_widget = QWidget()
+ form_layout = QFormLayout(form_widget)
form_layout.setLabelAlignment(Qt.AlignLeft)
- self.tabWidget.addTab(tab, self.tr('Minimum Needs'))
+ scroll_area = QtGui.QScrollArea(tab)
+ scroll_area.setWidget(form_widget)
+ scroll_area.setWidgetResizable(True)
+ self.tabWidget.addTab(scroll_area, self.tr('Minimum Needs'))
self.tabWidget.tabBar().setVisible(True)
widget = QWidget()
|
make minimum needs scrollable in the minimum needs tab
|
py
|
diff --git a/ubireader/ubi_io/__init__.py b/ubireader/ubi_io/__init__.py
index <HASH>..<HASH> 100755
--- a/ubireader/ubi_io/__init__.py
+++ b/ubireader/ubi_io/__init__.py
@@ -192,7 +192,7 @@ class leb_virtual_file():
try:
self._last_read_addr = self._ubi.blocks[self._blocks[leb]].file_offset + self._ubi.blocks[self._blocks[leb]].ec_hdr.data_offset + offset
- except KeyError as e:
+ except Exception as e:
error(self.read, 'Error', 'LEB: %s is corrupted or has no data.' % (leb))
raise Exception('Bad Read Offset Request')
|
Changed leb_virtual_file read exception to general exception.
|
py
|
diff --git a/src/ai/backend/client/request.py b/src/ai/backend/client/request.py
index <HASH>..<HASH> 100644
--- a/src/ai/backend/client/request.py
+++ b/src/ai/backend/client/request.py
@@ -239,7 +239,7 @@ class Request:
'Disallowed HTTP method: {}'.format(self.method)
self.date = datetime.now(tzutc())
self.headers['Date'] = self.date.isoformat()
- if self.content_type is not None:
+ if self.content_type is not None and not self.headers['Content-Type']:
self.headers['Content-Type'] = self.content_type
full_url = self._build_url()
self._sign(full_url.relative())
|
Do not update Content-Type when it is pre-set
|
py
|
diff --git a/blog/management/commands/wordpress_to_wagtail.py b/blog/management/commands/wordpress_to_wagtail.py
index <HASH>..<HASH> 100755
--- a/blog/management/commands/wordpress_to_wagtail.py
+++ b/blog/management/commands/wordpress_to_wagtail.py
@@ -160,6 +160,8 @@ class Command(BaseCommand):
path, file_ = os.path.split(img['src'])
if not img['src']:
continue # Blank image
+ if img['src'].startswith('data:'):
+ continue # Embedded image
try:
remote_image = urllib.request.urlretrieve(img['src'])
except (urllib.error.HTTPError,
|
Don't import embedded as base<I> data images.
|
py
|
diff --git a/dplython/dplython.py b/dplython/dplython.py
index <HASH>..<HASH> 100644
--- a/dplython/dplython.py
+++ b/dplython/dplython.py
@@ -247,9 +247,8 @@ def CreateLaterFunction(fcn, *args, **kwargs):
def DelayFunction(fcn):
def DelayedFcnCall(*args, **kwargs):
# Check to see if any args or kw are Later. If not, return normal fcn.
- checkIfLater = lambda x: type(x) == Later
if (len([a for a in args if isinstance(a, Later)]) == 0 and
- len([v for k, v in kwargs.items() if isinstance(a, Later)]) == 0):
+ len([v for k, v in kwargs.items() if isinstance(v, Later)]) == 0):
return fcn(*args, **kwargs)
else:
return CreateLaterFunction(fcn, *args, **kwargs)
|
fixing bug in DelayFunction
|
py
|
diff --git a/osmnx/core.py b/osmnx/core.py
index <HASH>..<HASH> 100644
--- a/osmnx/core.py
+++ b/osmnx/core.py
@@ -982,6 +982,7 @@ def truncate_graph_bbox(G, north, south, east, west, truncate_by_edge=False, ret
y = G.nodes[neighbor]['y']
if y < north and y > south and x < east and x > west:
any_neighbors_in_bbox = True
+ break
# if none of its neighbors are within the bounding box, add node
# to list of nodes outside the bounding box
|
(Very) minor optimization Stops it from searching through the rest of the neighbors after finding at least one neighbor lying inside the BBOX. The possible speedup might be unnoticeable in most cases with sparse networks.
|
py
|
diff --git a/modin/pandas/test/test_groupby.py b/modin/pandas/test/test_groupby.py
index <HASH>..<HASH> 100644
--- a/modin/pandas/test/test_groupby.py
+++ b/modin/pandas/test/test_groupby.py
@@ -114,6 +114,8 @@ def test_mixed_dtypes_groupby(as_index):
# TODO Add more apply functions
apply_functions = [lambda df: df.sum(), min]
+ # Workaround for Pandas bug #34656. Recreate groupby object for Pandas
+ pandas_groupby = pandas_df.groupby(by=by[-1], as_index=as_index)
for func in apply_functions:
eval_apply(modin_groupby, pandas_groupby, func)
|
Workaround for bug Pandas bug #<I> Recreate groupby object before apply because other functions affected its internals.
|
py
|
diff --git a/salt/utils/__init__.py b/salt/utils/__init__.py
index <HASH>..<HASH> 100644
--- a/salt/utils/__init__.py
+++ b/salt/utils/__init__.py
@@ -703,7 +703,7 @@ def backup_minion(path, bkroot):
dname, bname = os.path.split(path)
fstat = os.stat(path)
msecs = str(int(time.time() * 1000000))[-6:]
- stamp = time.asctime().replace(' ', '_')
+ stamp = time.strftime("%a_%b_%d_%m_%H:%M:%S_%Y")
stamp = '{0}{1}_{2}'.format(stamp[:-4], msecs, stamp[-4:])
bkpath = os.path.join(bkroot,
dname[1:],
|
using strftime instead of asctime in backup_minion
|
py
|
diff --git a/deis/__init__.py b/deis/__init__.py
index <HASH>..<HASH> 100644
--- a/deis/__init__.py
+++ b/deis/__init__.py
@@ -6,4 +6,4 @@ the api, provider, cm, and web Django apps.
from __future__ import absolute_import
-__version__ = '0.15.0+git'
+__version__ = '0.15.1'
|
chore(release): update version to <I>
|
py
|
diff --git a/panels/_version.py b/panels/_version.py
index <HASH>..<HASH> 100644
--- a/panels/_version.py
+++ b/panels/_version.py
@@ -1,2 +1,2 @@
# Versions compliant with PEP 440 https://www.python.org/dev/peps/pep-0440
-__version__ = "0.0.11"
+__version__ = "0.0.12"
|
Update version number to <I>
|
py
|
diff --git a/gnsq/reader.py b/gnsq/reader.py
index <HASH>..<HASH> 100644
--- a/gnsq/reader.py
+++ b/gnsq/reader.py
@@ -487,7 +487,7 @@ class Reader(object):
# first set RDY 0 to all connections that have not received a message
# within a configurable timeframe (low_ready_idle_timeout).
- for conn in self.conns:
+ for conn in list(self.conns):
if conn.ready_count == 0:
continue
|
Handle changing connections during redistribute ready
|
py
|
diff --git a/emencia_paste_djangocms_3/__init__.py b/emencia_paste_djangocms_3/__init__.py
index <HASH>..<HASH> 100644
--- a/emencia_paste_djangocms_3/__init__.py
+++ b/emencia_paste_djangocms_3/__init__.py
@@ -1,2 +1,2 @@
"""Epaster template to build DjangoCMS 3.x projects"""
-__version__ = '1.3.7'
+__version__ = '1.3.8'
|
Forgetted to bump package version to <I>
|
py
|
diff --git a/eemeter/modeling/split.py b/eemeter/modeling/split.py
index <HASH>..<HASH> 100644
--- a/eemeter/modeling/split.py
+++ b/eemeter/modeling/split.py
@@ -229,7 +229,7 @@ class SplitModeledEnergyTrace(object):
# require NaN last data point as cap
if filtered_df.shape[0] > 0:
last_index = filtered_df.index[-1]
- filtered_df.set_value(last_index, 'value', np.nan)
- filtered_df.set_value(last_index, 'estimated', False)
+ filtered_df.at[last_index, 'value'] = np.nan
+ filtered_df.at[last_index, 'estimated'] = False
return filtered_df
|
Get rid of FutureWarning Modifying two lines from set_value() to at[] syntax.
|
py
|
diff --git a/tests/dataset_fixtures.py b/tests/dataset_fixtures.py
index <HASH>..<HASH> 100644
--- a/tests/dataset_fixtures.py
+++ b/tests/dataset_fixtures.py
@@ -32,6 +32,14 @@ def no_redis(monkeypatch):
monkeypatch.setattr('datalad_service.common.redis.redisClient', fake)
[email protected](autouse=True)
+def no_git_config(monkeypatch):
+ monkeypatch.setattr(
+ 'datalad_service.common.annex.CommitInfo.__enter__', lambda s: None)
+ monkeypatch.setattr(
+ 'datalad_service.common.annex.CommitInfo.__exit__', lambda s, x, y, z: None)
+
+
@pytest.fixture(scope='session')
def annex_path(tmpdir_factory):
path = tmpdir_factory.mktemp('annexes')
|
Disallow global git configuration edits in tests.
|
py
|
diff --git a/sqlparse/keywords.py b/sqlparse/keywords.py
index <HASH>..<HASH> 100644
--- a/sqlparse/keywords.py
+++ b/sqlparse/keywords.py
@@ -46,7 +46,11 @@ SQL_REGEX = {
(r'(CASE|IN|VALUES|USING)\b', tokens.Keyword),
(r'(@|##|#)[A-Z]\w+', tokens.Name),
- (r'[A-Z]\w*(?=\.)', tokens.Name), # see issue39
+
+ # see issue #39
+ # Spaces around period `schema . name` are valid identifier
+ # TODO: Spaces before period not implemented
+ (r'[A-Z]\w*(?=\s*\.)', tokens.Name), # 'Name' .
(r'(?<=\.)[A-Z]\w*', tokens.Name), # .'Name'
(r'[A-Z]\w*(?=\()', tokens.Name), # side effect: change kw to func
|
Rewrite regex to allow spaces between `name` and `.`
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -29,7 +29,7 @@ setup(
setup_requires=['setuptools-markdown'],
extras_require={
'tester': [
- "eth-tester[py-evm]==0.1.0b16",
+ "eth-tester[py-evm]==0.1.0b19",
],
'testrpc': ["eth-testrpc>=1.3.3,<2.0.0"],
'linter': [
|
bump to latest eth-tester
|
py
|
diff --git a/src/pyscipopt/__init__.py b/src/pyscipopt/__init__.py
index <HASH>..<HASH> 100644
--- a/src/pyscipopt/__init__.py
+++ b/src/pyscipopt/__init__.py
@@ -1,4 +1,4 @@
-__version__ = '3.2.0'
+__version__ = '3.2.1Test'
# required for Python 3.8 on Windows
import os
|
change version to <I>Test testing release workflow live
|
py
|
diff --git a/cerberus/__init__.py b/cerberus/__init__.py
index <HASH>..<HASH> 100644
--- a/cerberus/__init__.py
+++ b/cerberus/__init__.py
@@ -8,7 +8,7 @@
"""
-__version__ = "0.1.0"
+__version__ = "0.2.0"
from .cerberus import Validator, ValidationError, SchemaError
|
Bumped version to <I>
|
py
|
diff --git a/mongoctl/objects/server.py b/mongoctl/objects/server.py
index <HASH>..<HASH> 100644
--- a/mongoctl/objects/server.py
+++ b/mongoctl/objects/server.py
@@ -470,7 +470,11 @@ class Server(DocumentWrapper):
###########################################################################
def db_command(self, cmd, dbname):
# try without auth first if server allows it (i.e. version >= 3.0.0)
- db = self.get_db(dbname, no_auth=self.try_on_auth_failures())
+ if self.try_on_auth_failures():
+ need_auth = False
+ else:
+ need_auth = self.command_needs_auth(dbname, cmd)
+ db = self.get_db(dbname, no_auth=not need_auth)
try:
return db.command(cmd)
except (RuntimeError,Exception), e:
|
fix authing when not needed for pre <I>
|
py
|
diff --git a/furious/async.py b/furious/async.py
index <HASH>..<HASH> 100644
--- a/furious/async.py
+++ b/furious/async.py
@@ -155,6 +155,17 @@ class Async(object):
self._executing = False
self._executed = True
+ if self._options.get('persist_result'):
+ self._persist_result()
+
+ def _persist_result(self):
+ """Store this Async's result in persistent storage."""
+
+ self._prepare_persistence_engine()
+
+ return self._persistence_engine.store_async_result(
+ self.id, self.result)
+
@property
def _function_path(self):
return self.job[0]
@@ -363,13 +374,6 @@ class Async(object):
import uuid
return uuid.uuid4().hex
- def persist_result(self):
- """Store this Async's result in persistent storage."""
- self._prepare_persistence_engine()
-
- return self._persistence_engine.store_async_result(
- self.id, self.result)
-
def _increment_recursion_level(self):
"""Increment current_depth based on either defaults or the enclosing
Async.
|
In persist mode, call _persist_result when result set When `persist_result` is set in an Async's options, Async._persist_result will be called when the result is set one the Async. The persistence engine must be specified or an exception will be raised.
|
py
|
diff --git a/ecabc/abc.py b/ecabc/abc.py
index <HASH>..<HASH> 100644
--- a/ecabc/abc.py
+++ b/ecabc/abc.py
@@ -143,8 +143,7 @@ class ABC:
self.onlooker.bestEmployers.clear()
self.output.print("Assigning new positions")
for i in range(len(self.onlooker.bestEmployers)):
- sys.stdout.flush()
- sys.stdout.write('At bee number: %d \r' % (i+1))
+ self.output.print('At bee number: %d \r' % (i+1))
self.assignNewPositions(i)
self.output.print("Getting fitness average")
self.getFitnessAverage()
|
Fixed the missing replacement of sys.stdout
|
py
|
diff --git a/fluent_blogs/admin/entryadmin.py b/fluent_blogs/admin/entryadmin.py
index <HASH>..<HASH> 100644
--- a/fluent_blogs/admin/entryadmin.py
+++ b/fluent_blogs/admin/entryadmin.py
@@ -34,6 +34,7 @@ class EntryAdmin(SeoEntryAdminMixin, _entry_admin_base):
SeoEntryAdminMixin.FIELDSET_SEO,
)
+ list_filter = list(_entry_admin_base.list_filter)
formfield_overrides = {}
formfield_overrides.update(SeoEntryAdminMixin.formfield_overrides)
formfield_overrides.update({
@@ -47,3 +48,9 @@ class EntryAdmin(SeoEntryAdminMixin, _entry_admin_base):
for _f in ('intro', 'contents', 'categories', 'tags', 'enable_comments'):
if _f in _model_fields:
EntryAdmin.FIELDSET_GENERAL[1]['fields'] += (_f,)
+
+if 'categories' in _model_fields:
+ EntryAdmin.list_filter.append('categories')
+# This should only display tags that are in use, sorted by count:
+#if 'tags' in _model_fields:
+# EntryAdmin.list_filter.append('tags')
|
admin: show categories in the list_filter
|
py
|
diff --git a/dwave/cloud/__init__.py b/dwave/cloud/__init__.py
index <HASH>..<HASH> 100644
--- a/dwave/cloud/__init__.py
+++ b/dwave/cloud/__init__.py
@@ -0,0 +1,5 @@
+from __future__ import absolute_import
+
+from dwave.cloud.client import Client
+from dwave.cloud.solver import Solver
+from dwave.cloud.computation import Future
|
General client available in cloud ns This now works: >>> from dwave.cloud import Client >>> Client.from_config(profile='internal') <dwave.cloud.sw.Client object at 0xbeefcafe>
|
py
|
diff --git a/qtpylib/broker.py b/qtpylib/broker.py
index <HASH>..<HASH> 100644
--- a/qtpylib/broker.py
+++ b/qtpylib/broker.py
@@ -166,6 +166,20 @@ class Broker():
# do stuff on exit
atexit.register(self._on_exit)
+
+ # ---------------------------------------
+ def add_instruments(self, *instruments):
+ """ add instruments after initialization """
+ for instrument in instruments:
+ if isinstance(instrument, ezibpy.utils.Contract):
+ instrument = self.ibConn.contract_to_tuple(instrument)
+ contractString = self.ibConn.contractString(instrument)
+ self.instruments[contractString] = instrument
+ self.ibConn.createContract(instrument)
+
+ self.symbols = list(self.instruments.keys())
+
+
# ---------------------------------------
"""
instrument group methods
|
option to add instruments after algo initialization
|
py
|
diff --git a/tests/python/pants_test/rules/test_test_integration.py b/tests/python/pants_test/rules/test_test_integration.py
index <HASH>..<HASH> 100644
--- a/tests/python/pants_test/rules/test_test_integration.py
+++ b/tests/python/pants_test/rules/test_test_integration.py
@@ -4,6 +4,8 @@
from __future__ import absolute_import, division, print_function, unicode_literals
+import unittest
+
from pants_test.pants_run_integration_test import PantsRunIntegrationTest
@@ -90,6 +92,7 @@ testprojects/tests/python/pants/dummies:failing_target
""",
)
+ @unittest.skip('Flaky test: https://github.com/pantsbuild/pants/issues/6782')
def test_mixed_python_tests(self):
args = [
'--no-v1',
|
Skip another flaky test. (#<I>)
|
py
|
diff --git a/compiler/quilt/tools/main.py b/compiler/quilt/tools/main.py
index <HASH>..<HASH> 100644
--- a/compiler/quilt/tools/main.py
+++ b/compiler/quilt/tools/main.py
@@ -6,12 +6,26 @@ from __future__ import print_function
import argparse
import sys
+import pkg_resources
+
import requests
from . import command
from .const import DEFAULT_QUILT_YML
HANDLE = "owner/packge_name"
+VERSION = command.VERSION
+
+
+
+def get_full_version():
+ try:
+ quilt = pkg_resources.get_distribution('quilt')
+ except pkg_resources.DistributionNotFound:
+ pass
+ else:
+ return "quilt {} ({})".format(VERSION, quilt.egg_name())
+ return "quilt " + VERSION
def main():
"""
@@ -26,6 +40,8 @@ def main():
return (hashstr if 6 <= len(hashstr) <= 64 else
group.error('hashes must be 6-64 chars long'))
+ parser.add_argument('--version', action='version', version=get_full_version())
+
config_p = subparsers.add_parser("config")
config_p.set_defaults(func=command.config)
|
--version command (#<I>) Adds a --version option to quilt that prints the release version from the command line. When quilt is installed, it produces quilt <version> (<egg-name>), as in: quilt <I> (quilt-<I>-py<I>) When quilt is not installed, it produces quilt <version> only, as in: quilt <I>
|
py
|
diff --git a/python/google/transit/gtfs_realtime_pb2.py b/python/google/transit/gtfs_realtime_pb2.py
index <HASH>..<HASH> 100644
--- a/python/google/transit/gtfs_realtime_pb2.py
+++ b/python/google/transit/gtfs_realtime_pb2.py
@@ -1,3 +1,19 @@
+#! /usr/bin/python
+#
+# Copyright 2016 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: gtfs-realtime.proto
|
Re-add Google licensing header in preparation for submitting pull request
|
py
|
diff --git a/script/create-dist.py b/script/create-dist.py
index <HASH>..<HASH> 100755
--- a/script/create-dist.py
+++ b/script/create-dist.py
@@ -9,7 +9,7 @@ import tarfile
from lib.config import LIBCHROMIUMCONTENT_COMMIT, BASE_URL, NODE_VERSION
from lib.util import scoped_cwd, rm_rf, get_atom_shell_version, make_zip, \
- safe_mkdir, execute
+ safe_mkdir, safe_unlink, execute
ATOM_SHELL_VRESION = get_atom_shell_version()
@@ -81,6 +81,8 @@ def main():
args = parse_args()
+ if TARGET_PLATFORM == 'linux':
+ clean_build()
force_build()
if TARGET_PLATFORM != 'linux':
download_libchromiumcontent_symbols(args.url)
@@ -105,6 +107,16 @@ def parse_args():
return parser.parse_args()
+def clean_build():
+ # On Linux stripping binary would cause them to be rebuilt next time, which
+ # would make create-dist create symbols from stripped binary if it has been
+ # ran for twice.
+ # So in order to make sure we built correct symbols everytime, we have to
+ # force a rebuild of the binaries.
+ for binary in TARGET_BINARIES[TARGET_PLATFORM]:
+ safe_unlink(os.path.join(OUT_DIR, binary))
+
+
def force_build():
build = os.path.join(SOURCE_ROOT, 'script', 'build.py')
execute([sys.executable, build, '-c', 'Release'])
|
Make sure symbol is dumped from unstripped binary.
|
py
|
diff --git a/gocd_cli/commands/pipeline/__init__.py b/gocd_cli/commands/pipeline/__init__.py
index <HASH>..<HASH> 100644
--- a/gocd_cli/commands/pipeline/__init__.py
+++ b/gocd_cli/commands/pipeline/__init__.py
@@ -2,7 +2,7 @@ from gocd_cli.command import BaseCommand
from .retrigger_failed import RetriggerFailed
-__all__ = ['Pause', 'RetriggerFailed', 'Trigger', 'Unlock']
+__all__ = ['Pause', 'RetriggerFailed', 'Trigger', 'Unlock', 'Unpause']
def unlock_pipeline(pipeline):
|
Add Unpause to the list of available pipeline commands
|
py
|
diff --git a/sen/__init__.py b/sen/__init__.py
index <HASH>..<HASH> 100644
--- a/sen/__init__.py
+++ b/sen/__init__.py
@@ -11,10 +11,11 @@ def set_logging(name="sen", level=logging.DEBUG):
if level == logging.DEBUG:
handler = logging.FileHandler("debug.log")
- else:
- handler = logging.StreamHandler(sys.stderr)
+ handler.setLevel(logging.DEBUG)
+ # else:
+ # handler = logging.StreamHandler(sys.stderr)
- handler.setLevel(logging.DEBUG)
- formatter = logging.Formatter('%(asctime)s %(name)-18s %(levelname)-6s %(message)s', '%H:%M:%S')
- handler.setFormatter(formatter)
- logger.addHandler(handler)
+ formatter = logging.Formatter('%(asctime)s %(name)-18s %(levelname)-6s %(message)s',
+ '%H:%M:%S')
+ handler.setFormatter(formatter)
+ logger.addHandler(handler)
|
don't log to stderr Fixes #<I>
|
py
|
diff --git a/tst.py b/tst.py
index <HASH>..<HASH> 100755
--- a/tst.py
+++ b/tst.py
@@ -4,7 +4,7 @@ from dataclasses import (
import inspect
import unittest
-from typing import ClassVar
+from typing import ClassVar, Any
from collections import OrderedDict
# Just any custom exception we can catch.
@@ -171,7 +171,7 @@ class TestCase(unittest.TestCase):
# the same as defined in Base.
@dataclass
class Base:
- x: float = 15.0
+ x: Any = 15.0
y: int = 0
@dataclass
@@ -179,6 +179,9 @@ class TestCase(unittest.TestCase):
z: int = 10
x: int = 15
+ o = Base()
+ self.assertEqual(repr(o), 'Base(x=15.0,y=0)')
+
o = C1()
self.assertEqual(repr(o), 'C1(x=15,y=0,z=10)')
|
Closes #<I>: Covariant overriding of fields considered unsafe.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -8,6 +8,6 @@ setup(
author='Niklas Rosenstein',
author_email='[email protected]',
url='https://github.com/NiklasRosenstein/myo-python',
- packages=['myo'],
+ packages=['myo', 'myo.lowlevel', 'myo.utils'],
install_requires=['six'],
)
|
fixed setup.py not taking subpackages into account
|
py
|
diff --git a/ontquery/query.py b/ontquery/query.py
index <HASH>..<HASH> 100644
--- a/ontquery/query.py
+++ b/ontquery/query.py
@@ -24,8 +24,15 @@ class OntQuery:
self._services = tuple(_services)
def add(self, *services):
+ """ add low priority services """
+ # FIXME dupes
self._services += services
+ def ladd(self, *services):
+ """ add high priority services """
+ # FIXME dupes
+ self._services = services + self._services
+
@property
def predicates(self):
unique_predicates = set()
@@ -41,6 +48,12 @@ class OntQuery:
def services(self):
return self._services
+ # see if we can get away with using ladd
+ #@services.setter
+ #def services(self, value):
+ #""" sometimes we need to reorder services """
+ #self._services = value
+
def __iter__(self): # make it easier to init filtered queries
yield from self.services
|
implement OntQuery.ladd for adding new highest priority services
|
py
|
diff --git a/charmhelpers/contrib/openstack/vaultlocker.py b/charmhelpers/contrib/openstack/vaultlocker.py
index <HASH>..<HASH> 100644
--- a/charmhelpers/contrib/openstack/vaultlocker.py
+++ b/charmhelpers/contrib/openstack/vaultlocker.py
@@ -140,9 +140,16 @@ def vault_relation_complete(backend=None):
:ptype backend: string
:returns: whether the relation to vault is complete
:rtype: bool"""
- vault_kv = VaultKVContext(secret_backend=backend or VAULTLOCKER_BACKEND)
- vault_kv()
- return vault_kv.complete
+ try:
+ import hvac
+ except ImportError:
+ return False
+ try:
+ vault_kv = VaultKVContext(secret_backend=backend or VAULTLOCKER_BACKEND)
+ vault_kv()
+ return vault_kv.complete
+ except hvac.exceptions.InvalidRequest:
+ return False
# TODO: contrib a high level unwrap method to hvac that works
|
Be more defensive when checking Vault (#<I>) Closes-Bug: <I>
|
py
|
diff --git a/lems/__init__.py b/lems/__init__.py
index <HASH>..<HASH> 100644
--- a/lems/__init__.py
+++ b/lems/__init__.py
@@ -8,4 +8,4 @@ import logging
logger = logging.getLogger('LEMS')
-__version__ = '0.3.7'
+__version__ = '0.4.1'
|
Merged master & incremented version
|
py
|
diff --git a/tests/test_main.py b/tests/test_main.py
index <HASH>..<HASH> 100644
--- a/tests/test_main.py
+++ b/tests/test_main.py
@@ -41,6 +41,8 @@ def test_qt_api():
elif QT_API == 'pyqt5':
assert_pyqt5()
else:
+ # If the tests are run locally, USE_QT_API and QT_API may not be
+ # defined, but we still want to make sure qtpy is behaving sensibly.
# We should then be loading, in order of decreasing preference, PyQt5,
# PyQt4, and PySide.
try:
|
Added comment to explain why we consider the case where USE_QT_API and QT_API are not defined.
|
py
|
diff --git a/pyp2rpm/bin.py b/pyp2rpm/bin.py
index <HASH>..<HASH> 100644
--- a/pyp2rpm/bin.py
+++ b/pyp2rpm/bin.py
@@ -1,5 +1,6 @@
import getpass
import logging
+import os
from pyp2rpm.convertor import Convertor
from pyp2rpm import settings
@@ -110,7 +111,11 @@ def main(package, v, d, s, r, proxy, srpm, p, b, o, t, venv):
else:
# if user provide save_path then save spec in provided path
spec_path = d + '/' + spec_name
+ spec_dir = os.path.dirname(spec_path)
+ if not os.path.exists(spec_dir):
+ os.makedirs(spec_dir)
logger.debug('Opening specfile: {0}.'.format(spec_path))
+
if not utils.PY3:
converted = converted.encode('utf-8')
with open(spec_path, 'w') as f:
|
Create spec file directory if missing
|
py
|
diff --git a/curdling/uninstall.py b/curdling/uninstall.py
index <HASH>..<HASH> 100644
--- a/curdling/uninstall.py
+++ b/curdling/uninstall.py
@@ -1,6 +1,7 @@
from __future__ import absolute_import, print_function, unicode_literals
from distlib.util import parse_requirement
+from . import exceptions
from .database import Database
from .logging import Logger
from .util import safe_name
@@ -22,4 +23,8 @@ class Uninstall(object):
def run(self):
for package in self.packages:
self.logger.level(2, "Removing package %s", package)
- Database.uninstall(package)
+
+ try:
+ Database.uninstall(package)
+ except exceptions.PackageNotInstalled:
+ self.logger.level(1, "Package %s does not exist, skipping", package)
|
Warn the user if a package requested to be uninstalled do not exist
|
py
|
diff --git a/tests/test_sql_copy.py b/tests/test_sql_copy.py
index <HASH>..<HASH> 100644
--- a/tests/test_sql_copy.py
+++ b/tests/test_sql_copy.py
@@ -81,7 +81,7 @@ def in_memory_csv(request):
request.addfinalizer(fin)
- for i in xrange(IN_MEMORY_CSV_NROWS):
+ for i in range(IN_MEMORY_CSV_NROWS):
row = 'SRID=4326;POINT({lon} {lat}),{name},{age}\n'.format(
lon = random.uniform(-170.0, 170.0),
lat = random.uniform(-80.0, 80.0),
|
Replace xrange by range in tests
|
py
|
diff --git a/djangular/forms/angular_validation.py b/djangular/forms/angular_validation.py
index <HASH>..<HASH> 100644
--- a/djangular/forms/angular_validation.py
+++ b/djangular/forms/angular_validation.py
@@ -37,7 +37,11 @@ class NgValidationBoundField(forms.BoundField):
"""
Overload method which inserts AngularJS form validation elements just after the <label> tag.
"""
- lt = super(NgValidationBoundField, self).label_tag(contents, attrs, label_suffix)
+ from django import VERSION
+ if VERSION[1] <= 5:
+ lt = super(NgValidationBoundField, self).label_tag(contents, attrs)
+ else:
+ lt = super(NgValidationBoundField, self).label_tag(contents, attrs, label_suffix)
return lt + self.ng_validation_tags()
|
Fixed Django-<I> compatibility
|
py
|
diff --git a/pandas/io/data.py b/pandas/io/data.py
index <HASH>..<HASH> 100644
--- a/pandas/io/data.py
+++ b/pandas/io/data.py
@@ -69,8 +69,6 @@ def get_data_yahoo(name=None, start=None, end=None):
Returns a DataFrame.
"""
- from dateutil.relativedelta import relativedelta
-
start, end = _sanitize_dates(start, end)
if(name is None):
@@ -79,13 +77,11 @@ def get_data_yahoo(name=None, start=None, end=None):
yahoo_URL = 'http://ichart.yahoo.com/table.csv?'
- start -= relativedelta(months=1)
-
url = yahoo_URL + 's=%s' % name + \
- '&a=%s' % start.month + \
+ '&a=%s' % (start.month - 1) + \
'&b=%s' % start.day + \
'&c=%s' % start.year + \
- '&d=%s' % end.month + \
+ '&d=%s' % (end.month - 1) + \
'&e=%s' % end.day + \
'&f=%s' % end.year + \
'&g=d' + \
|
Fixed DataReader for yahoo to match the API. The months are expected to be zero-index. relativedelta was used to roll back the month, but did it incorrectly. This is a fix for issue <I>. This bug was fixed in class as a part of the RIT FLOSS game development seminar. <URL>
|
py
|
diff --git a/subliminal/providers/shooter.py b/subliminal/providers/shooter.py
index <HASH>..<HASH> 100644
--- a/subliminal/providers/shooter.py
+++ b/subliminal/providers/shooter.py
@@ -9,7 +9,6 @@ from requests import Session
from . import Provider
from .. import __short_version__
from ..subtitle import Subtitle, fix_line_ending
-from ..video import Episode, Movie
logger = logging.getLogger(__name__)
|
Remove unused imports in shooter
|
py
|
diff --git a/fudge/tests/test_fudge.py b/fudge/tests/test_fudge.py
index <HASH>..<HASH> 100644
--- a/fudge/tests/test_fudge.py
+++ b/fudge/tests/test_fudge.py
@@ -613,6 +613,17 @@ class TestOrderedCalls(unittest.TestCase):
fudge.verify()
@raises(AssertionError)
+ def test_chained_fakes_honor_order(self):
+ Thing = Fake("thing").remember_order().expects("__init__")
+ holder = Thing.expects("get_holder").returns_fake()
+ holder = holder.expects("init")
+
+ thing = Thing()
+ holder = thing.get_holder()
+ # missing thing.init()
+ fudge.verify()
+
+ @raises(AssertionError)
def test_too_many_calls(self):
db = Fake("db")\
.remember_order()\
|
Added additional test for ordered expectations when using returns_fake()
|
py
|
diff --git a/test/integration-test.py b/test/integration-test.py
index <HASH>..<HASH> 100644
--- a/test/integration-test.py
+++ b/test/integration-test.py
@@ -169,7 +169,7 @@ def test_ct_submission():
url_a = "http://boulder:4500/submissions"
url_b = "http://boulder:4501/submissions"
submissions_a = urllib2.urlopen(url_a).read()
- submissions_b = urllib2.urlopen(url_a).read()
+ submissions_b = urllib2.urlopen(url_b).read()
expected_a_submissions = int(submissions_a)+1
expected_b_submissions = int(submissions_b)+1
auth_and_issue([random_domain()])
|
Properly initialize submissions_b count (#<I>) The `submissions_b` count in the integration test `test_ct_submission` function was being populated initially by using `url_a` when it _should_ be initialized using `url_b` since it's the count of submissions to log b. This resolves <URL> would fail. With this fix, all <I> passed.
|
py
|
diff --git a/singularity/hub/registry/utils/cache.py b/singularity/hub/registry/utils/cache.py
index <HASH>..<HASH> 100644
--- a/singularity/hub/registry/utils/cache.py
+++ b/singularity/hub/registry/utils/cache.py
@@ -23,6 +23,7 @@ SOFTWARE.
'''
from singularity.logger import bot
+from singularity.utils import mkdir_p
import tempfile
import os
import pwd
@@ -89,7 +90,7 @@ def get_cache(subfolder=None,quiet=False):
cache_base = "%s/%s" %(cache_base,subfolder)
# Create the cache folder(s), if don't exist
- create_folders(cache_base)
+ mkdir_p(cache_base)
if not quiet:
bot.debug("Cache folder set to %s" %cache_base)
|
modified: singularity/hub/registry/utils/cache.py
|
py
|
diff --git a/macros/plugin.py b/macros/plugin.py
index <HASH>..<HASH> 100644
--- a/macros/plugin.py
+++ b/macros/plugin.py
@@ -383,7 +383,7 @@ class MacrosPlugin(BasePlugin):
# self.variables['files'] = files
- def on_serve(self, server, config):
+ def on_serve(self, server, config, **kwargs):
"""
Called when the serve command is used during development.
This is to add files or directories to the list of "watched"
|
Add **kwargs to on_serve event handler mkdocs <I> passes a `builder` kwarg to the `on_serve` event handler which results in a TypeError if not handled. This passes it through using **kwargs per the mkdocs best practice guidance [1]. [1] <URL>
|
py
|
diff --git a/datarum/converter.py b/datarum/converter.py
index <HASH>..<HASH> 100644
--- a/datarum/converter.py
+++ b/datarum/converter.py
@@ -30,19 +30,7 @@ def seconds_convert(total_days):
dat.mónþ += 1
if dat.mónþ == 13:
- bises = False
-
- # Romme Rule for Leaps
- if dat.gere in [3, 7, 11, 15]:
- bises = True
- if dat.gere >= 20 and dat.gere % 4 == 0:
- bises = True
- if dat.gere >= 100 and dat.gere % 100 == 0:
- bises = False
- if dat.gere >= 400 and dat.gere % 400 == 0:
- bises = True
-
- if bises:
+ if romme_bises(dat.gere):
if dat.dæg > 6:
dat.dæg = 1
dat.mónþ = 1
@@ -55,4 +43,17 @@ def seconds_convert(total_days):
day_count += 1
- return dat
\ No newline at end of file
+ return dat
+
+def romme_bises(gere):
+ bises = False
+ if gere in [3, 7, 11, 15]:
+ bises = True
+ if gere >= 20 and gere % 4 == 0:
+ bises = True
+ if gere >= 100 and gere % 100 == 0:
+ bises = False
+ if gere >= 400 and gere % 400 == 0:
+ bises = True
+
+ return bises
|
Move romme bises check into own func
|
py
|
diff --git a/tests/show_segmentation_test.py b/tests/show_segmentation_test.py
index <HASH>..<HASH> 100644
--- a/tests/show_segmentation_test.py
+++ b/tests/show_segmentation_test.py
@@ -42,6 +42,7 @@ class ShowSegmemtationCase(unittest.TestCase):
@attr('long')
+ # @attr('interactive')
def test_from_file(self):
input_file = "~/lisa_data/jatra_5mm_new.pklz"
output_file = "jatra.vtk"
@@ -69,7 +70,7 @@ class ShowSegmemtationCase(unittest.TestCase):
# resize_mm=self.resize_mm,
resize_voxel_number=90000,
# smoothing=self.smoothing,
- # show=False
+ show=False
)
# import sed3
#
|
try to run vtk test again
|
py
|
diff --git a/modopt/opt/algorithms.py b/modopt/opt/algorithms.py
index <HASH>..<HASH> 100644
--- a/modopt/opt/algorithms.py
+++ b/modopt/opt/algorithms.py
@@ -217,13 +217,22 @@ class FISTA(object):
This class is inhereited by optimisation classes to speed up convergence
The parameters for the modified FISTA are as described in [L2018] (p, q, r)
- or in .
+ or in [C2015] (a).
"""
def __init__(self, a=None, p=1, q=1, r=4):
+ if a is not None:
+ self.mode = 'CD'
+ else:
+ self.mode = 'regular'
+ self.a = a
+ self.p = p
+ self.q = q
+ self.r = r
self._t_now = 1.0
self._t_prev = 1.0
+ self._k = 0
def update_lambda(self, *args, **kwargs):
r"""Update lambda
|
added intialisation of params in FISTA class
|
py
|
diff --git a/raven/utils/__init__.py b/raven/utils/__init__.py
index <HASH>..<HASH> 100644
--- a/raven/utils/__init__.py
+++ b/raven/utils/__init__.py
@@ -57,6 +57,16 @@ _VERSION_CACHE = {}
def get_version_from_app(module_name, app):
version = None
+
+ # Try to pull version from pkg_resource first
+ # as it is able to detect version tagged with egg_info -b
+ if pkg_resources is not None:
+ # pull version from pkg_resources if distro exists
+ try:
+ return pkg_resources.get_distribution(module_name).version
+ except pkg_resources.DistributionNotFound:
+ pass
+
if hasattr(app, 'get_version'):
version = app.get_version
elif hasattr(app, '__version__'):
@@ -73,13 +83,7 @@ def get_version_from_app(module_name, app):
version = None
if version is None:
- if pkg_resources is None:
- return None
- # pull version from pkg_resources if distro exists
- try:
- version = pkg_resources.get_distribution(module_name).version
- except pkg_resources.DistributionNotFound:
- return None
+ return None
if isinstance(version, (list, tuple)):
version = '.'.join(map(str, version))
|
Try to detect version with pkg_resources first
|
py
|
diff --git a/src/hdx/data/dataset.py b/src/hdx/data/dataset.py
index <HASH>..<HASH> 100755
--- a/src/hdx/data/dataset.py
+++ b/src/hdx/data/dataset.py
@@ -457,18 +457,22 @@ class Dataset(HDXObject):
resource_ignore_fields.append(resource_ignore_field)
else:
dataset_ignore_fields.append(ignore_field)
- if "package_id" not in resource_ignore_fields:
- resource_ignore_fields.append("package_id")
if self.is_requestable():
- self._check_required_fields("dataset-requestable", dataset_ignore_fields)
+ self._check_required_fields(
+ "dataset-requestable", dataset_ignore_fields
+ )
else:
self._check_required_fields("dataset", dataset_ignore_fields)
if len(self.resources) == 0 and not allow_no_resources:
raise HDXError(
"There are no resources! Please add at least one resource!"
)
+ if "package_id" not in resource_ignore_fields:
+ resource_ignore_fields.append("package_id")
for resource in self.resources:
- resource.check_required_fields(ignore_fields=resource_ignore_fields)
+ resource.check_required_fields(
+ ignore_fields=resource_ignore_fields
+ )
@staticmethod
def revise(
|
Allow "resource:xxx" to pass xxx field to be ignored in create_in_hdx and update_in_hdx of Dataset class
|
py
|
diff --git a/pystache/renderengine.py b/pystache/renderengine.py
index <HASH>..<HASH> 100644
--- a/pystache/renderengine.py
+++ b/pystache/renderengine.py
@@ -17,8 +17,22 @@ DEFAULT_TAG_CLOSING = '}}'
END_OF_LINE_CHARACTERS = ['\r', '\n']
-# TODO: what are the possibilities for val?
def call(val, view, template=None):
+ """
+ Arguments:
+
+ val: the argument val can be any of the following:
+
+ * a unicode string
+ * the return value of a call to any of the following:
+
+ * RenderEngine.partial_tag_function()
+ * RenderEngine.section_tag_function()
+ * inverseTag()
+ * RenderEngine.literal_tag_function()
+ * RenderEngine.escape_tag_function()
+
+ """
if callable(val):
(args, _, _, _) = inspect.getargspec(val)
|
Added call() docstring.
|
py
|
diff --git a/resolwe/flow/managers/workload_connectors/kubernetes.py b/resolwe/flow/managers/workload_connectors/kubernetes.py
index <HASH>..<HASH> 100644
--- a/resolwe/flow/managers/workload_connectors/kubernetes.py
+++ b/resolwe/flow/managers/workload_connectors/kubernetes.py
@@ -716,6 +716,7 @@ class Connector(BaseConnector):
core_api = kubernetes.client.CoreV1Api()
ebs_claim_name = self._ebs_claim_name(data_id)
logger.debug("Kubernetes: removing claim %s.", ebs_claim_name)
- core_api.delete_namespaced_persistent_volume_claim(
- name=ebs_claim_name, namespace=self.kubernetes_namespace
- )
+ with suppress(kubernetes.client.rest.ApiException):
+ core_api.delete_namespaced_persistent_volume_claim(
+ name=ebs_claim_name, namespace=self.kubernetes_namespace
+ )
|
Suppress cleanup error in kubernetes when PVC can not be found
|
py
|
diff --git a/pandas/tools/rplot.py b/pandas/tools/rplot.py
index <HASH>..<HASH> 100644
--- a/pandas/tools/rplot.py
+++ b/pandas/tools/rplot.py
@@ -305,6 +305,21 @@ class GeomScatter(Layer):
ax.scatter(x, y, **self.kwds)
return fig, ax
+class GeomHistogram(Layer):
+ def __init__(self, bins=10, colour='grey'):
+ self.bins = bins
+ self.colour = colour
+ Layer.__init__(self)
+
+ def work(self, fig=None, ax=None):
+ if ax is None:
+ if fig is None:
+ return fig, ax
+ else:
+ ax = fig.gca()
+ x = self.data[self.aes['x']]
+ ax.hist(x, self.bins, facecolor=self.colour)
+ return fig, ax
class GeomDensity2D(Layer):
def work(self, fig=None, ax=None):
|
Added GeomHistogram class
|
py
|
diff --git a/eventkit/models.py b/eventkit/models.py
index <HASH>..<HASH> 100644
--- a/eventkit/models.py
+++ b/eventkit/models.py
@@ -401,3 +401,10 @@ class Event(AbstractEvent):
"""
tracker = FieldTracker(AbstractEvent.MONITOR_FIELDS)
+
+ def save(self, *args, **kwargs):
+ # Avoid `AttributeError: 'NoneType' object has no attribute 'tree_id'`
+ # by delaying MPTT updates. Not sure what is causing it.
+ with transaction.atomic():
+ with Event._tree_manager.delay_mptt_updates():
+ super(Event, self).save(*args, **kwargs)
|
Avoid AttributeError by delaying MPTT updates.
|
py
|
diff --git a/quantecon/random/utilities.py b/quantecon/random/utilities.py
index <HASH>..<HASH> 100644
--- a/quantecon/random/utilities.py
+++ b/quantecon/random/utilities.py
@@ -39,7 +39,6 @@ def probvec(m, k, random_state=None):
[ 0.43772774, 0.34763084, 0.21464142]])
"""
-
x = np.empty((m, k+1))
random_state = check_random_state(random_state)
@@ -91,7 +90,6 @@ def sample_without_replacement(n, k, num_trials=None, random_state=None):
[4, 1, 3]])
"""
-
if n <= 0:
raise ValueError('n must be greater than 0')
if k > n:
|
undo space changes - but still getting error in readthedocs build
|
py
|
diff --git a/looper/loodels.py b/looper/loodels.py
index <HASH>..<HASH> 100644
--- a/looper/loodels.py
+++ b/looper/loodels.py
@@ -41,6 +41,17 @@ class Project(models.Project):
@property
+ def compute_env_var(self):
+ """
+ Environment variable through which to access compute settings.
+
+ :return str: name of the environment variable to pointing to
+ compute settings
+ """
+ return "LOOPERENV"
+
+
+ @property
def required_metadata(self):
""" Which metadata attributes are required. """
return ["output_dir"]
|
recover LOOPERENV name for looper-specific Project
|
py
|
diff --git a/anyconfig/mergeabledict.py b/anyconfig/mergeabledict.py
index <HASH>..<HASH> 100644
--- a/anyconfig/mergeabledict.py
+++ b/anyconfig/mergeabledict.py
@@ -207,14 +207,14 @@ class UpdateWithReplaceDict(dict):
Traceback (most recent call last):
TypeError: ...
"""
- def _update(self, other, key, val=None, is_dict=True):
+ def _update(self, other, key, *args):
"""
:param other:
dict or dict-like object or a list of (key, value) pair tuples
:param key: object key
- :param val: object value
+ :param args: [] or (value, ...)
"""
- self[key] = other[key] if val is None and is_dict else val
+ self[key] = args[0] if args else other[key]
def update(self, *others, **another):
"""
@@ -231,7 +231,7 @@ class UpdateWithReplaceDict(dict):
self._update(other, key)
else:
for key, val in other: # TypeError, etc. may be raised.
- self._update(other, key, val=val, is_dict=False)
+ self._update(other, key, val)
for key in another.keys():
self._update(another, key)
|
fix: one more workaround try of UpdateWithReplaceDict.update for python <I> (again)
|
py
|
diff --git a/tests/settings.py b/tests/settings.py
index <HASH>..<HASH> 100644
--- a/tests/settings.py
+++ b/tests/settings.py
@@ -1,2 +1,3 @@
-INSTALLED_APPS = ('django_hudson',)
+INSTALLED_APPS = ('django.contrib.sessions', # just to enshure that dotted apps test works
+ 'django_hudson',)
DATABASE_ENGINE = 'sqlite3'
|
Add app with dots in names to test suite
|
py
|
diff --git a/cumulusci/tasks/metadata_etl/tests/test_base.py b/cumulusci/tasks/metadata_etl/tests/test_base.py
index <HASH>..<HASH> 100644
--- a/cumulusci/tasks/metadata_etl/tests/test_base.py
+++ b/cumulusci/tasks/metadata_etl/tests/test_base.py
@@ -533,8 +533,6 @@ class TestUpdateMetadataFirstChildTextTask:
metadata = fromstring(ORIGINAL_XML.encode("utf-8"))
- expected_metadata = fromstring(EXPECTED_XML.encode("utf-8"))
-
task = create_task(
UpdateMetadataFirstChildTextTask,
{
@@ -553,7 +551,7 @@ class TestUpdateMetadataFirstChildTextTask:
assert metadata == actual
- assert actual.tostring() == expected_metadata.tostring()
+ assert actual.tostring(xml_declaration=True) == EXPECTED_XML
task.logger.info.assert_has_calls(
[
|
simplify xml comparison in test
|
py
|
diff --git a/ayrton/__init__.py b/ayrton/__init__.py
index <HASH>..<HASH> 100644
--- a/ayrton/__init__.py
+++ b/ayrton/__init__.py
@@ -198,7 +198,7 @@ def polute (d):
'_k', '_p', '_r', '_s', '_u', '_w', '_x', '_L',
'_N', '_S', '_nt', '_ot' ],
'ayrton.expansion': [ 'bash', ],
- 'ayrton.functions': [ 'cd', 'export', 'option', 'run', 'shift', 'ssh',
+ 'ayrton.functions': [ 'cd', 'export', 'option', 'remote', 'run', 'shift',
'unset', ],
'ayrton': [ 'Capture', ],
'sh': [ 'CommandNotFound', ],
|
* last function rename was nom complete. I should add a git hook to run the tests before actually commiting anything.
|
py
|
diff --git a/django_mailbox/models.py b/django_mailbox/models.py
index <HASH>..<HASH> 100644
--- a/django_mailbox/models.py
+++ b/django_mailbox/models.py
@@ -320,6 +320,8 @@ class Mailbox(models.Model):
msg.from_header = convert_header_to_unicode(message['from'])
if 'to' in message:
msg.to_header = convert_header_to_unicode(message['to'])
+ elif 'Delivered-To' in message:
+ msg.to_header = convert_header_to_unicode(message['Delivered-To'])
msg.save()
message = self._get_dehydrated_message(message, msg)
msg.set_body(message.as_string())
|
Store 'Delivered-To' as 'to_header' when 'to' is unspecified.
|
py
|
diff --git a/salt/pillar/git_pillar.py b/salt/pillar/git_pillar.py
index <HASH>..<HASH> 100644
--- a/salt/pillar/git_pillar.py
+++ b/salt/pillar/git_pillar.py
@@ -316,6 +316,8 @@ class _LegacyGitPillar(object):
branch = opts.get('environment') or 'base'
if branch == 'base':
branch = opts.get('gitfs_base') or 'master'
+ elif ':' in branch:
+ branch = branch.split(':', 1)[0]
return branch
def update(self):
|
Get rid of error in legacy git pillar when using branch mapping notation This doesn't fix the problem of the branch not being mapped to the specified branch, and this code is not going to be maintained going forward (in favor of the gitfs-backed git_pillar code), but this will at least prevent the branch from failing to checkout.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,8 @@ requirements = [
'imageio',
'numpy',
'matplotlib',
- 'trimesh[all]',
+ 'trimesh[easy]',
+ 'meshpy',
'autolab_core',
'autolab_perception',
'meshrender'
|
Update setup.py to depend on meshpy
|
py
|
diff --git a/usagestats.py b/usagestats.py
index <HASH>..<HASH> 100644
--- a/usagestats.py
+++ b/usagestats.py
@@ -236,7 +236,7 @@ class Stats(object):
# FIXME: ``data=generator()`` would make requests stream,
# which is currently not a good idea (WSGI chokes on it)
r = requests.post(self.drop_point, data=fp.read(),
- verify=self.ssl_verify)
+ timeout=1, verify=self.ssl_verify)
r.raise_for_status()
except Exception as e:
logger.warning("Couldn't upload %s: %s", old_filename, str(e))
@@ -250,7 +250,7 @@ class Stats(object):
# FIXME: ``data=generator()`` would make requests stream, which is
# currently not a good idea (WSGI chokes on it)
r = requests.post(self.drop_point, data=b''.join(generator()),
- verify=self.ssl_verify)
+ timeout=1, verify=self.ssl_verify)
r.raise_for_status()
except requests.RequestException as e:
logger.warning("Couldn't upload report: %s", str(e))
|
Adds 1s timeout to report submission
|
py
|
diff --git a/niworkflows/common/report.py b/niworkflows/common/report.py
index <HASH>..<HASH> 100644
--- a/niworkflows/common/report.py
+++ b/niworkflows/common/report.py
@@ -9,13 +9,6 @@ import jinja2
from pkg_resources import resource_filename as pkgrf
from abc import abstractmethod
-import nibabel as nb
-from nilearn import plotting
-from nipype import logging
-from nipype.interfaces import ants, fsl
-from nipype.interfaces.base import File, traits
-from nipype.utils import filemanip
-
class ReportCapableInterface(object):
''' temporary mixin to enable reports for nipype interfaces '''
|
remove unnecessary imports left over from my fmriprep changes
|
py
|
diff --git a/src/_pytest/_code/code.py b/src/_pytest/_code/code.py
index <HASH>..<HASH> 100644
--- a/src/_pytest/_code/code.py
+++ b/src/_pytest/_code/code.py
@@ -721,11 +721,11 @@ class FormattedExcinfo:
) -> List[str]:
"""Return formatted and marked up source lines."""
lines = []
- if source is None or line_index >= len(source.lines):
+ if source is not None and line_index < 0:
+ line_index += len(source.lines)
+ if source is None or line_index >= len(source.lines) or line_index < 0:
source = Source("???")
line_index = 0
- if line_index < 0:
- line_index += len(source)
space_prefix = " "
if short:
lines.append(space_prefix + source.lines[line_index].strip())
|
Make code.FormattedExcinfo.get_source more defensive When line_index was a large negative number, get_source failed on `source.lines[line_index]`. Use the same dummy Source as with a large positive line_index.
|
py
|
diff --git a/tests/testing_utils.py b/tests/testing_utils.py
index <HASH>..<HASH> 100644
--- a/tests/testing_utils.py
+++ b/tests/testing_utils.py
@@ -286,13 +286,13 @@ def shutdown_environment(config=True, gui_config=True, caplog=None, expected_war
assert not rafcon.core.singleton.state_machine_manager.state_machines
if gui_ready:
assert not rafcon.gui.singleton.state_machine_manager_model.state_machines
+ except:
+ raise
+ finally:
rewind_and_set_libraries()
reload_config(config, gui_config)
GUI_INITIALIZED = GUI_SIGNAL_INITIALIZED = False
gui_thread = gui_ready = None
- except:
- raise
- finally:
test_multithreading_lock.release()
if unpatch_threading:
|
fix(testing_utils): reload libraries in any case in shudown_environment function
|
py
|
diff --git a/intercom/controllers/keystrokes.py b/intercom/controllers/keystrokes.py
index <HASH>..<HASH> 100644
--- a/intercom/controllers/keystrokes.py
+++ b/intercom/controllers/keystrokes.py
@@ -58,6 +58,10 @@ def main(screen):
if cc in codes:
controller.do(*codes[cc])
+ elif cc == 'p':
+ controller.send('do:mpd.play', {})
+ elif cc == 'P':
+ controller.send('do:mpd.pause', {})
elif cc == 'S':
controller.send('do:pc.suspend', {'origin': controller.name})
elif cc == 'q':
|
Added play/pause for MPD minion
|
py
|
diff --git a/firebirdsql/fbcore.py b/firebirdsql/fbcore.py
index <HASH>..<HASH> 100755
--- a/firebirdsql/fbcore.py
+++ b/firebirdsql/fbcore.py
@@ -839,10 +839,10 @@ class BaseConnect:
def _op_service_attach(self):
dpb = bs([2,2])
s = self.str_to_bytes(self.user)
- dpb += bs([28, len(s)]) + s
+ dpb += bs([isc_spb_user_name, len(s)]) + s
s = self.str_to_bytes(self.password)
- dpb += bs([29, len(s)]) + s
- dpb += bs([0x3a,0x04,0x78,0x0a,0x00,0x00]) # isc_dpb_dummy_packet_interval
+ dpb += bs([isc_spb_password, len(s)]) + s
+ dpb += bs([isc_dpb_dummy_packet_interbal,0x04,0x78,0x0a,0x00,0x00])
p = xdrlib.Packer()
p.pack_int(self.op_service_attach)
p.pack_int(0)
|
user isc_xxx consts in _op_service_attach() packet
|
py
|
diff --git a/py3status/modules/imap.py b/py3status/modules/imap.py
index <HASH>..<HASH> 100644
--- a/py3status/modules/imap.py
+++ b/py3status/modules/imap.py
@@ -33,6 +33,7 @@ SAMPLE OUTPUT
import imaplib
from threading import Thread
import select
+from time import sleep
from ssl import create_default_context
from socket import error as socket_error
STRING_UNAVAILABLE = 'N/A'
@@ -88,7 +89,8 @@ class Py3status:
def check_mail(self):
# I -- acquire mail_count
if self.use_idle is not False:
- if not self.idle_thread.isAlive():
+ if not self.idle_thread.is_alive():
+ sleep(5) # rate-limit thread-restarting (when network is offline)
self.idle_thread = Thread(target=self._get_mail_count, daemon=True)
self.idle_thread.start()
response = {'cached_until': self.py3.CACHE_FOREVER}
|
throttle thread-restarting if the thread repeatedly dies (most likely because no network connection is available) it gets restarted immediately -- over and over again. this commit adds a simple sleep(5) to mitigate that. also s/isAlive/is_alive/ (old notation deprecated)
|
py
|
diff --git a/claripy/vsa/strided_interval.py b/claripy/vsa/strided_interval.py
index <HASH>..<HASH> 100644
--- a/claripy/vsa/strided_interval.py
+++ b/claripy/vsa/strided_interval.py
@@ -113,6 +113,8 @@ class StridedInterval(BackendObject):
self._lower_bound = self._lower_bound & (2 ** bits - 1)
self._upper_bound = self._upper_bound & (2 ** bits - 1)
+ self.normalize()
+
def copy(self):
si = StridedInterval(name=self._name,
bits=self.bits,
|
call normalize() after an StridedInterval is created
|
py
|
diff --git a/master/buildbot/status/web/builder.py b/master/buildbot/status/web/builder.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/status/web/builder.py
+++ b/master/buildbot/status/web/builder.py
@@ -143,16 +143,17 @@ class ForceBuildActionResource(ActionResource):
"forcescheduler arg not found"))
return
- args = req.args.copy()
-
+ args = {}
# decode all of the args
encoding = getRequestCharset(req)
- for name, argl in args.iteritems():
- args[name] = [ arg.decode(encoding) for arg in argl ]
-
- # damn html's ungeneric checkbox implementation...
- for cb in args.get("checkbox", []):
- args[cb] = True
+ for name, argl in req.args.iteritems():
+ name = name.decode(encoding)
+ if name == 'checkbox':
+ # damn html's ungeneric checkbox implementation...
+ for cb in argl:
+ args[cb] = True
+ else:
+ args[name] = [ arg.decode(encoding) for arg in argl ]
builder_name = self.builder_status.getName()
|
Also decode the filed names when force-building. With ForceScheduler, it is possible to have semi-arbitrary names. This also cleans up the arg-preprocessing code somewhat. Refs #<I>.
|
py
|
diff --git a/openquake/risklib/riskmodels.py b/openquake/risklib/riskmodels.py
index <HASH>..<HASH> 100644
--- a/openquake/risklib/riskmodels.py
+++ b/openquake/risklib/riskmodels.py
@@ -725,7 +725,7 @@ class CompositeRiskModel(collections.abc.Mapping):
for key, lt in rdic:
rm = rdic[key, lt]
if len(rm.imt_by_lt) == 1:
- # TODO: if `check_risk_ids` will raise an error then
+ # NB: if `check_risk_ids` raise an error then
# this code branch will never run
[(lt, imt)] = rm.imt_by_lt.items()
else:
|
Improved comment [ci skip]
|
py
|
diff --git a/tests/test_hexary_trie.py b/tests/test_hexary_trie.py
index <HASH>..<HASH> 100644
--- a/tests/test_hexary_trie.py
+++ b/tests/test_hexary_trie.py
@@ -221,6 +221,22 @@ def test_hexary_trie_at_root_lookups():
assert key not in snapshot
+def test_hexary_trie_empty_squash_does_not_read_root():
+ db = {}
+ trie = HexaryTrie(db=db)
+ trie[b'AAA'] = b'LONG'*32
+ trie[b'BBB'] = b'LONG'*32
+ trie[b'\xffEE'] = b'LONG'*32
+
+ flagged_usage_db = KeyAccessLogger(db)
+ flag_trie = HexaryTrie(flagged_usage_db, root_hash=trie.root_hash)
+ with flag_trie.squash_changes():
+ # root node should not be read if no changes are made during squash
+ pass
+
+ assert len(flagged_usage_db.read_keys) == 0
+
+
@pytest.mark.parametrize(
'name, updates, expected, deleted, final_root',
FIXTURES_PERMUTED,
|
Test that no-op squash_changes reads no nodes
|
py
|
diff --git a/gsshapy/grid/grid_to_gssha.py b/gsshapy/grid/grid_to_gssha.py
index <HASH>..<HASH> 100644
--- a/gsshapy/grid/grid_to_gssha.py
+++ b/gsshapy/grid/grid_to_gssha.py
@@ -579,7 +579,7 @@ class GRIDtoGSSHA(object):
# STEP 1: Get extent from GSSHA Grid in LSM coordinates
####
# reproject GSSHA grid and get bounds
- min_x, max_x, min_y, max_y = ggrid.bounds(as_projection=self.xd.lsm.projection)
+ min_x, max_x, min_y, max_y = self.gssha_grid.bounds(as_projection=self.xd.lsm.projection)
# set subset indices
self._set_subset_indices(min_y,
|
Fix ref to gssha grid getting bounds
|
py
|
diff --git a/zinnia/feeds.py b/zinnia/feeds.py
index <HASH>..<HASH> 100644
--- a/zinnia/feeds.py
+++ b/zinnia/feeds.py
@@ -55,6 +55,8 @@ class EntryFeed(Feed):
return current_site.domain
def item_enclosure_url(self, item):
+ if item.image:
+ return item.image.url
parser = ImgParser()
parser.feed(item.content)
if len(parser.img_locations):
|
using image field of and entry, for enclosure_url in feeds
|
py
|
diff --git a/tests/test_xml.py b/tests/test_xml.py
index <HASH>..<HASH> 100644
--- a/tests/test_xml.py
+++ b/tests/test_xml.py
@@ -45,4 +45,19 @@ class TestDataError(object):
with pytest.raises(overpy.exception.ElementDataWrongType):
overpy.Way.from_xml(
self._get_element_wrong_type()
- )
\ No newline at end of file
+ )
+
+ def test_way_missing_data(self):
+ import xml.etree.ElementTree as ET
+
+ # Node without ref attribute
+ data = """<way id="1234"><nd></nd></way>"""
+ node = ET.fromstring(data)
+ with pytest.raises(ValueError):
+ overpy.Way.from_xml(node)
+
+ # Tag without k attribute
+ data = """<way id="1234"><tag></tag></way>"""
+ node = ET.fromstring(data)
+ with pytest.raises(ValueError):
+ overpy.Way.from_xml(node)
|
test - Add xml tests with missing or wrong attributes
|
py
|
diff --git a/src/transformers/tokenization_xlnet.py b/src/transformers/tokenization_xlnet.py
index <HASH>..<HASH> 100644
--- a/src/transformers/tokenization_xlnet.py
+++ b/src/transformers/tokenization_xlnet.py
@@ -240,7 +240,7 @@ class XLNetTokenizer(PreTrainedTokenizer):
cls_segment_id = [2]
if token_ids_1 is None:
- return len(token_ids_0 + sep + cls) * [0]
+ return len(token_ids_0 + sep) * [0] + cls_segment_id
return len(token_ids_0 + sep) * [0] + len(token_ids_1 + sep) * [1] + cls_segment_id
def save_vocabulary(self, save_directory):
|
Correct segment ID for XLNet single sequence
|
py
|
diff --git a/tensorforce/core/networks/__init__.py b/tensorforce/core/networks/__init__.py
index <HASH>..<HASH> 100755
--- a/tensorforce/core/networks/__init__.py
+++ b/tensorforce/core/networks/__init__.py
@@ -41,12 +41,14 @@ __all__ = [
'Dropout',
'Flatten',
'Pool2d',
+ 'Embedding',
'Linear',
'Dense',
'Dueling',
'Conv1d',
'Conv2d',
'InternalLstm',
+ 'Lstm',
'Network',
'LayerBasedNetwork',
'LayeredNetwork'
|
Added missing classes to __all__ Added Lstm & Embedding
|
py
|
diff --git a/test/reader_test.py b/test/reader_test.py
index <HASH>..<HASH> 100644
--- a/test/reader_test.py
+++ b/test/reader_test.py
@@ -18,13 +18,14 @@ def remove_multiple_white_space(lines):
def test_reader_test_data():
+ """Basic regression test"""
f_files = glob.glob("./test_data/*.f*")
f_files = [
f
for f in f_files
- if "expected" not in f and "bad" not in f # remove 'expected' files
- ] # remove 'bad files'
- # create_expected=True
+ if "expected" not in f and "bad" not in f # remove 'expected' and 'bad' files
+ ]
+ # Set to True to update the 'expected' files
create_expected = False
for ff in f_files:
ee = ff.replace(".f90", "_expected.f90")
|
Fix some comments/docstrings in test
|
py
|
diff --git a/py_zipkin/zipkin.py b/py_zipkin/zipkin.py
index <HASH>..<HASH> 100644
--- a/py_zipkin/zipkin.py
+++ b/py_zipkin/zipkin.py
@@ -34,6 +34,7 @@ STANDARD_ANNOTATIONS = {
'client': {'cs', 'cr'},
'server': {'ss', 'sr'},
}
+STANDARD_ANNOTATIONS_KEYS = frozenset(STANDARD_ANNOTATIONS.keys())
class zipkin_span(object):
@@ -154,10 +155,10 @@ class zipkin_span(object):
if self.sample_rate is not None and not (0.0 <= self.sample_rate <= 100.0):
raise ZipkinError('Sample rate must be between 0.0 and 100.0')
- if not set(include).issubset(set(STANDARD_ANNOTATIONS.keys())):
+ if not set(include).issubset(STANDARD_ANNOTATIONS_KEYS):
raise ZipkinError(
'Only %s are supported as annotations' %
- STANDARD_ANNOTATIONS.keys()
+ STANDARD_ANNOTATIONS_KEYS
)
else:
# get a list of all of the mapped annotations
|
move standard annotation keys generation outside of critical path
|
py
|
diff --git a/galpy/df/quasiisothermaldf.py b/galpy/df/quasiisothermaldf.py
index <HASH>..<HASH> 100644
--- a/galpy/df/quasiisothermaldf.py
+++ b/galpy/df/quasiisothermaldf.py
@@ -1710,6 +1710,9 @@ class quasiisothermaldf(df):
R_pixel, z_pixel = the pixel size for creating the grid for
interpolation (in natural unit)
+
+ num_std = number of standard deviation to be considered outliers
+ sampled separately from interpolation
HISTORY:
@@ -1733,7 +1736,8 @@ class quasiisothermaldf(df):
R, z = outlier
vR, vT, vz = self.sampleV(R, z)[0]
outlier_coord_v[i] = numpy.array([R, z, vR, vT, vz])
-
+ print("shape of outlier coord v =", numpy.shape(outlier_coord_v))
+ print("outlier coord v =", outlier_coord_v)
#optimize the dimensions of the grid for interpolation
#get the minimum and maximum of each coordinate
R_min, z_min = numpy.min(normal, axis = 0)
|
added pixels and num_std as input to sample v on set
|
py
|
diff --git a/emiz/miz.py b/emiz/miz.py
index <HASH>..<HASH> 100644
--- a/emiz/miz.py
+++ b/emiz/miz.py
@@ -212,7 +212,8 @@ class Miz:
try:
- with ZipFile(self.miz_path.absolute()) as zip_file:
+ path = str(self.miz_path.absolute())
+ with ZipFile(path) as zip_file:
LOGGER.debug('reading infolist')
|
fix: dev: got a weird bug on AV...
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ from setuptools import setup
setup(
name="django-ldapdb",
- version="0.4.0",
+ version="0.5.0",
description=u"An LDAP database backend for Django",
long_description=open('README.md').read(),
url="https://github.com/jlaine/django-ldapdb",
@@ -15,6 +15,8 @@ setup(
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Programming Language :: Python",
+ "Programming Language :: Python :: 2.7",
+ "Programming Language :: Python :: 3",
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
|
Release django-ldapdb <I> This version adds official support for Python <I>/<I>, and for Django <I>
|
py
|
diff --git a/highcharts/highcharts/highcharts.py b/highcharts/highcharts/highcharts.py
index <HASH>..<HASH> 100644
--- a/highcharts/highcharts/highcharts.py
+++ b/highcharts/highcharts/highcharts.py
@@ -10,7 +10,7 @@ from jinja2 import Environment, PackageLoader
import json, uuid
import re
import datetime
-import urllib2
+import urllib.request, urllib.error, urllib.parse
import html
from collections import Iterable
from .options import BaseOptions, ChartOptions, ColorAxisOptions, \
@@ -334,7 +334,7 @@ class Highchart(object):
if self.offline:
- opener = urllib2.build_opener()
+ opener = urllib.request.build_opener()
opener.addheaders = [('User-Agent', 'Mozilla/5.0')]
self.header_css = [
|
Update urllib2 to python3 version, relying on the already available 'future' package to provide the appropriate alias for use in python2 (#<I>)
|
py
|
diff --git a/pymongo/__init__.py b/pymongo/__init__.py
index <HASH>..<HASH> 100644
--- a/pymongo/__init__.py
+++ b/pymongo/__init__.py
@@ -32,7 +32,7 @@ SLOW_ONLY = 1
ALL = 2
"""Profile all operations."""
-version = "0.11.3"
+version = "0.12"
"""Current version of PyMongo."""
Connection = PyMongo_Connection
|
BUMP <I> new objectid generation, better error message for reconnection, thread safe gridfs, bugfixes
|
py
|
diff --git a/salt/modules/dnsmasq.py b/salt/modules/dnsmasq.py
index <HASH>..<HASH> 100644
--- a/salt/modules/dnsmasq.py
+++ b/salt/modules/dnsmasq.py
@@ -153,7 +153,7 @@ def _parse_dnamasq(filename):
else:
fileopts[comps[0]] = comps[1].strip()
else:
- if not 'unparsed' in fileopts:
+ if 'unparsed' not in fileopts:
fileopts['unparsed'] = []
fileopts['unparsed'].append(line)
return fileopts
|
Fix PEP8 E<I> - test for membership should be "not in"
|
py
|
diff --git a/rootpy/core.py b/rootpy/core.py
index <HASH>..<HASH> 100644
--- a/rootpy/core.py
+++ b/rootpy/core.py
@@ -3,6 +3,7 @@
"""
This module contains base classes defining core functionality
"""
+import os
import ROOT
import re
import uuid
@@ -10,6 +11,9 @@ import inspect
from . import rootpy_globals
+CONVERT_SNAKE_CASE = os.getenv('NO_ROOTPY_SNAKE_CASE', False) == False
+
+
class RequireFile(object):
def __init__(self):
@@ -33,14 +37,6 @@ class RequireFile(object):
return g
-def wrap_call(cls, method, *args, **kwargs):
- """
- Will provide more detailed info in the case that
- a method call on a ROOT object raises a TypeError
- """
- pass
-
-
class _repr_mixin:
def __str__(self):
@@ -87,6 +83,8 @@ def snake_case_methods(cls, debug=False):
A class decorator adding snake_case methods
that alias capitalized ROOT methods
"""
+ if not CONVERT_SNAKE_CASE:
+ return cls
# Fix both the class and its corresponding ROOT base class
#TODO use the class property on Object
root_base = cls.__bases__[-1]
|
ability to switch off snake_case alias
|
py
|
diff --git a/pypeerassets/pa.py b/pypeerassets/pa.py
index <HASH>..<HASH> 100644
--- a/pypeerassets/pa.py
+++ b/pypeerassets/pa.py
@@ -5,15 +5,16 @@ from pypeerassets import paproto, pautils, RpcNode
class Deck:
- def __init__(self, version, name, number_of_decimals, issue_mode):
+ def __init__(self, version, name, number_of_decimals, issue_mode, asset_id=None):
'''initialize deck object, load from dictionary Deck(**dict) or initilize with kwargs Deck(1, "deck", 3, 2)'''
self.version = version # protocol version
self.name = name # deck name
self.issue_mode = issue_mode # deck issue mode
+ self.asset_id = None
@property
- def to_protobuf(self):
+ def metainfo_to_protobuf(self):
'''encode deck into protobuf'''
deck = paproto.DeckSpawn()
@@ -25,7 +26,7 @@ class Deck:
return deck.SerializeToString()
@property
- def to_dict(self):
+ def metainfo_to_dict(self):
'''encode deck into dictionary'''
return {
|
pa: asset_id property, rename Deck properties to reflect exact usage.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -42,7 +42,6 @@ setup(
"Topic :: Software Development :: Version Control",
"Topic :: Utilities"
],
- bugtrack_url = 'https://github.com/msiemens/PyGitUp/issues',
long_description = README
# could also include download_url etc.
|
Removed bugtrack_url from setup.py
|
py
|
diff --git a/synapse/tests/test_lib_cli.py b/synapse/tests/test_lib_cli.py
index <HASH>..<HASH> 100644
--- a/synapse/tests/test_lib_cli.py
+++ b/synapse/tests/test_lib_cli.py
@@ -1,7 +1,6 @@
-
+import threading
import unittest.mock as mock
-
import synapse.exc as s_exc
import synapse.lib.cli as s_cli
import synapse.tests.common as s_test
@@ -277,3 +276,14 @@ class CliTest(s_test.SynTest):
self.true(outp.expect('ZeroDivisionError'))
self.true(outp.expect('<ctrl-c>'))
self.true(cli.isfini)
+
+ def test_cli_fini_disconnect(self):
+ evt = threading.Event()
+ outp = self.getTestOutp()
+ with self.getTestDmon('dmonboot') as dmon:
+ with dmon._getTestProxy('echo00') as prox:
+ cli = s_cli.Cli(prox, outp)
+ cli.onfini(evt.set)
+ self.true(evt.wait(2))
+ self.true(cli.isfini)
+ self.true(outp.expect('connection closed...'))
|
Add a test to show that prox disconnection causes fini
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.