diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
---|---|---|
diff --git a/GPy/util/mocap.py b/GPy/util/mocap.py
index <HASH>..<HASH> 100644
--- a/GPy/util/mocap.py
+++ b/GPy/util/mocap.py
@@ -2,7 +2,6 @@ import os
import numpy as np
import math
from GPy.util import datasets as dat
-import urllib2
class vertex:
def __init__(self, name, id, parents=[], children=[], meta = {}):
|
Removed import urllib2 since it wasn't being used
|
py
|
diff --git a/ads/core.py b/ads/core.py
index <HASH>..<HASH> 100644
--- a/ads/core.py
+++ b/ads/core.py
@@ -29,10 +29,17 @@ class Article(object):
aff = ["Unknown"]
author = ["Anonymous"]
citation_count = 0
+ reference_count = 0
url = None
def __init__(self, **kwargs):
for key, value in kwargs.iteritems():
+ # It's not Pythonic to use '[citations]' as an attribute
+ if key == "[citations]":
+ if "num_references" in value:
+ setattr(self, "reference_count", value["num_references"])
+ continue
+
setattr(self, key, value)
if "bibcode" in kwargs:
|
Removed [citations] as an attribute for Article, introduced article.reference_count
|
py
|
diff --git a/openquake/calculators/extract.py b/openquake/calculators/extract.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/extract.py
+++ b/openquake/calculators/extract.py
@@ -1012,6 +1012,7 @@ def extract_disagg(dstore, what):
if not allnames:
raise KeyError('No data for ' + what)
elif len(allnames) == 1:
+ qdict['names'] = allnames
return ArrayWrapper(values, qdict)
else:
qdict['names'] = allnames
|
Fixed make_figure_disagg [skip CI] Former-commit-id: b<I>f<I>c5c4a<I>d<I>ede<I>cb<I>fb<I>d9b7
|
py
|
diff --git a/run_tests.py b/run_tests.py
index <HASH>..<HASH> 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -52,7 +52,8 @@ def RunTests(debug_mode=False):
# related to securityd parsing, yet the tests fail, but not if run
# independently, this has something to do with the test suite.
blacklisted_casses = [
- 'plaso.parsers.pcap_test', 'plaso.parsers.mac_securityd_test']
+ 'plaso.parsers.pcap_test', 'plaso.parsers.mac_securityd_test',
+ 'plaso.frontend.preg_test']
tests = None
for test_file in sorted(FindTestFiles()):
|
Fix for Travis CI test configuration.
|
py
|
diff --git a/tornado/curl_httpclient.py b/tornado/curl_httpclient.py
index <HASH>..<HASH> 100644
--- a/tornado/curl_httpclient.py
+++ b/tornado/curl_httpclient.py
@@ -319,17 +319,7 @@ class CurlAsyncHTTPClient(AsyncHTTPClient):
self.io_loop.add_callback(request.streaming_callback, chunk)
else:
write_function = buffer.write
- if bytes is str: # py2
- curl.setopt(pycurl.WRITEFUNCTION, write_function)
- else: # py3
- # Upstream pycurl doesn't support py3, but ubuntu 12.10 includes
- # a fork/port. That version has a bug in which it passes unicode
- # strings instead of bytes to the WRITEFUNCTION. This means that
- # if you use a WRITEFUNCTION (which tornado always does), you cannot
- # download arbitrary binary data. This needs to be fixed in the
- # ported pycurl package, but in the meantime this lambda will
- # make it work for downloading (utf8) text.
- curl.setopt(pycurl.WRITEFUNCTION, lambda s: write_function(utf8(s)))
+ curl.setopt(pycurl.WRITEFUNCTION, write_function)
curl.setopt(pycurl.FOLLOWLOCATION, request.follow_redirects)
curl.setopt(pycurl.MAXREDIRS, request.max_redirects)
curl.setopt(pycurl.CONNECTTIMEOUT_MS, int(1000 * request.connect_timeout))
|
CurlAsyncHTTPClient: remove ubuntu-<I> python3 fix No need to utf8() what is passed to WRITEFUNCTION, it will always be bytes on python3 for upstream pycurl on python3. The ubuntu-<I> variant is long gone.
|
py
|
diff --git a/tests/contrib/test_securecookie.py b/tests/contrib/test_securecookie.py
index <HASH>..<HASH> 100644
--- a/tests/contrib/test_securecookie.py
+++ b/tests/contrib/test_securecookie.py
@@ -1,3 +1,4 @@
+from werkzeug import Request, Response, parse_cookie
from werkzeug.contrib.securecookie import SecureCookie
@@ -14,6 +15,7 @@ def test_basic_support():
s = c.serialize()
c2 = SecureCookie.unserialize(s, 'foo')
+ assert c is not c2
assert not c2.new
assert not c2.modified
assert not c2.should_save
@@ -23,3 +25,21 @@ def test_basic_support():
assert not c3.modified
assert not c3.new
assert c3 == {}
+
+
+def test_wrapper_support():
+ """Securecookie wrapper integration"""
+ req = Request.from_values()
+ resp = Response()
+ c = SecureCookie.load_cookie(req, secret_key='foo')
+ assert c.new
+ c['foo'] = 42
+ assert c.secret_key == 'foo'
+ c.save_cookie(resp)
+
+ req = Request.from_values(headers={
+ 'Cookie': 'session="%s"' % parse_cookie(resp.headers['set-cookie'])['session']
+ })
+ c2 = SecureCookie.load_cookie(req, secret_key='foo')
+ assert not c2.new
+ assert c2 == c
|
Added another secure cookie test, this time for the wrapper integration.
|
py
|
diff --git a/tests/calculation_tests.py b/tests/calculation_tests.py
index <HASH>..<HASH> 100644
--- a/tests/calculation_tests.py
+++ b/tests/calculation_tests.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import unittest
from pycpfcnpj import calculation as calc
|
another try to make python 3 compatible
|
py
|
diff --git a/valid_vietnamese.py b/valid_vietnamese.py
index <HASH>..<HASH> 100644
--- a/valid_vietnamese.py
+++ b/valid_vietnamese.py
@@ -105,7 +105,11 @@ def is_valid_combination(components, final_form=True):
# break
# if not good_vowel:
# return False
- return mark.remove_mark_string(vowel) in STRIPPED_VOWELS
+ accentless_vowel = accent.remove_accent_string(vowel)
+ stripped_vowel = mark.remove_mark_string(accentless_vowel)
+ last_consonant = comps[2]
+ return (last_consonant == "" and stripped_vowel in STRIPPED_VOWELS) or \
+ (last_consonant != "" and accentless_vowel in CLOSED_VOWELS)
# 'ăch'?
if comps[2] == 'ch' and ((vowel in 'ăâeôơuư') or
|
Improve vowel checking in is_valid_vietnamese() If the last consonant exists then the vowel must be a closed vowel else it can be any of the stripped vowels.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -2,6 +2,8 @@
"""trakt.py setup script (powered by pbr)."""
+from __future__ import absolute_import, division, print_function
+
from setuptools import setup
import sys
|
Added missing `__future__` imports in [setup.py]
|
py
|
diff --git a/examples/add_model.py b/examples/add_model.py
index <HASH>..<HASH> 100644
--- a/examples/add_model.py
+++ b/examples/add_model.py
@@ -11,6 +11,7 @@ from juju import utils
from juju.controller import Controller
import asyncio
from logging import getLogger
+import uuid
LOG = getLogger(__name__)
@@ -21,7 +22,8 @@ async def main():
await controller.connect_current()
try:
- model_name = "quux"
+ model_name = "addmodeltest-{}".format(uuid.uuid4())
+ print("Adding model {}".format(model_name))
model = await controller.add_model(model_name)
print('Deploying ubuntu')
@@ -49,6 +51,10 @@ async def main():
print("Destroying model")
await controller.destroy_model(model.info.uuid)
+ except Exception as e:
+ LOG.exception(
+ "Test failed! Model {} may not be cleaned up".format(model_name))
+
finally:
print('Disconnecting from controller')
await model.disconnect()
|
Create a new model for each test run of examples/add_model.py.
|
py
|
diff --git a/djangoql/admin.py b/djangoql/admin.py
index <HASH>..<HASH> 100644
--- a/djangoql/admin.py
+++ b/djangoql/admin.py
@@ -40,13 +40,16 @@ class DjangoQLSearchMixin(object):
# enable search mode toggle
return self.search_fields != DjangoQLSearchMixin.search_fields
+ def djangoql_search_enabled(self, request):
+ return request.GET.get(DJANGOQL_SEARCH_MARKER, '').lower() == 'on'
+
def get_changelist(self, *args, **kwargs):
return DjangoQLChangeList
def get_search_results(self, request, queryset, search_term):
if (
self.search_mode_toggle_enabled() and
- request.GET.get(DJANGOQL_SEARCH_MARKER, '').lower() != 'on'
+ not self.djangoql_search_enabled(request)
):
return super(DjangoQLSearchMixin, self).get_search_results(
request=request,
|
extract method djangoql_search_enabled
|
py
|
diff --git a/src/sos/hosts.py b/src/sos/hosts.py
index <HASH>..<HASH> 100755
--- a/src/sos/hosts.py
+++ b/src/sos/hosts.py
@@ -376,7 +376,7 @@ class RemoteHost(object):
# The following is not a robust solution to #1300 but I cannot think of a better method.
env_vars = ' '.join([
f'{x}={os.environ[x]}' for x in os.environ.keys()
- if not any(y in os.environ[x] for y in (' ', '"', "'"))
+ if not any(y in os.environ[x] for y in (' ', '"', "'", '(', ')', '&', '|'))
])
return self.config.get(
'execute_cmd', 'ssh ' + self.cm_opts +
|
Exclude more environment variables passed to remote hosts #<I>
|
py
|
diff --git a/ipyrad/assemble/refmap.py b/ipyrad/assemble/refmap.py
index <HASH>..<HASH> 100644
--- a/ipyrad/assemble/refmap.py
+++ b/ipyrad/assemble/refmap.py
@@ -702,10 +702,12 @@ def bam_region_to_fasta(data, sample, proc1, chrom, region_start, region_end):
ref += line
## parse sam to fasta. Save ref location to name.
+ ## Set size= an improbably large value so the REF sequence
+ ## sorts to the top for muscle aligning.
try:
name, seq = ref.strip().split("\n", 1)
seq = "".join(seq.split("\n"))
- fasta = ["{}_REF;+\n{}".format(name, seq)]
+ fasta = ["{}_REF;size={};+\n{}".format(name, seq, 10000)]
except ValueError as inst:
LOGGER.error("ref failed to parse - {}".format(ref))
LOGGER.error(" ".join(cmd1))
|
Set a size= for reference sequence to sort it to the top of the chunk prior to muscle aligning.
|
py
|
diff --git a/nipap/nipap/backend.py b/nipap/nipap/backend.py
index <HASH>..<HASH> 100644
--- a/nipap/nipap/backend.py
+++ b/nipap/nipap/backend.py
@@ -46,6 +46,7 @@
^^^^^^^^^^^^^^^^^
* :attr:`id` - ID number of the prefix.
* :attr:`prefix` - The IP prefix itself.
+ * :attr:`prefix_length` - Prefix length of the prefix.
* :attr:`display_prefix` - A more user-friendly version of the prefix.
* :attr:`family` - Address family (integer 4 or 6). Set by NIPAP.
* :attr:`vrf_id` - ID of the VRF which the prefix belongs to.
|
docs: Added prefix_length attribute Added the prefix_length attribute to the documentation.
|
py
|
diff --git a/blended/__main__.py b/blended/__main__.py
index <HASH>..<HASH> 100644
--- a/blended/__main__.py
+++ b/blended/__main__.py
@@ -568,7 +568,10 @@ def build_files(outdir):
line = line.replace("{relative_root}", relative_path)
for i in range(len(plugins)):
if plugins[i][0] != "RUN":
- main = __import__(plugins[i][0])
+ if sys.version_info[0] < 2:
+ main = importlib.import_module(plugins[i][0])
+ elif sys.version_info[0] < 3:
+ main = __import__(plugins[i][0])
content = main.main()
line = line.replace("{"+plugins[i][0]+"}", content)
else:
|
fixed problems relating to importlib for <I>
|
py
|
diff --git a/glue/ligolw/metaio.py b/glue/ligolw/metaio.py
index <HASH>..<HASH> 100644
--- a/glue/ligolw/metaio.py
+++ b/glue/ligolw/metaio.py
@@ -2,10 +2,6 @@ __author__ = "Kipp Cannon <[email protected]>"
__date__ = "$Date$"
__version__ = "$Revision$"
-try:
- import numarray
-except:
- pass
import re
import sys
from xml import sax
@@ -214,6 +210,7 @@ class Column(ligolw.Column):
# if the list like object has 0 length, causing numarray to
# barf. If the object is, in fact, a real Python list then
# numarray is made happy.
+ import numarray
if not len(self):
return numarray.array([], type = ToNumArrayType[self.getAttribute("Type")], shape = (len(self),))
return numarray.array(self, type = ToNumArrayType[self.getAttribute("Type")], shape = (len(self),))
|
Move import of numarray into one piece of code that needs it --- speeds up module load significantly for the normal case in which numarray isn't needed.
|
py
|
diff --git a/hsreplay/__init__.py b/hsreplay/__init__.py
index <HASH>..<HASH> 100644
--- a/hsreplay/__init__.py
+++ b/hsreplay/__init__.py
@@ -2,6 +2,6 @@
__author__ = "Jerome Leclanche"
__email__ = "[email protected]"
-__version__ = "1.1"
+__version__ = "1.2"
SYSTEM_DTD = "https://hearthsim.info/hsreplay/dtd/hsreplay-%s.dtd" % (__version__)
|
Update DTD version to <I>
|
py
|
diff --git a/gitlint/__init__.py b/gitlint/__init__.py
index <HASH>..<HASH> 100644
--- a/gitlint/__init__.py
+++ b/gitlint/__init__.py
@@ -1 +1 @@
-__version__ = "0.11.0dev"
+__version__ = "0.11.0"
|
<I> release - Python <I> support - Python <I> no longer supported - Various dependency updates and under the hood fixes (see #<I>) Special
|
py
|
diff --git a/vstutils/management/commands/newproject.py b/vstutils/management/commands/newproject.py
index <HASH>..<HASH> 100644
--- a/vstutils/management/commands/newproject.py
+++ b/vstutils/management/commands/newproject.py
@@ -20,7 +20,7 @@ class Command(BaseCommand):
files_to_create = {
'frontend_src': {
'app': {
- 'index': Path('index.js')
+ 'index.js': Path('index.js')
},
'.editorconfig': Path('.editorconfig'),
'.eslintrc.js': Path('.eslintrc.js'),
|
Fix: new project generation with index.js.
|
py
|
diff --git a/exchangelib/items.py b/exchangelib/items.py
index <HASH>..<HASH> 100644
--- a/exchangelib/items.py
+++ b/exchangelib/items.py
@@ -225,7 +225,7 @@ class Item(RegisterMixIn):
send_meeting_invitations=send_meeting_invitations
)
assert self.item_id == item_id
- assert self.changekey != changekey
+ # Don't assert that changekeys are different. No-op saves will sometimes leave the changekey intact
self.changekey = changekey
else:
if update_fields:
@@ -502,7 +502,7 @@ class CalendarItem(Item):
BooleanField('allow_new_time_proposal', field_uri='calendar:AllowNewTimeProposal', default=None,
is_required_after_save=True, is_searchable=False),
BooleanField('is_online_meeting', field_uri='calendar:IsOnlineMeeting', default=None,
- is_required_after_save=True),
+ is_read_only=True),
URIField('meeting_workspace_url', field_uri='calendar:MeetingWorkspaceUrl'),
URIField('net_show_url', field_uri='calendar:NetShowUrl'),
]
|
IsOnlineMeeting is read-only (at least in Office<I>). Fixes #<I>
|
py
|
diff --git a/salt/cache/consul.py b/salt/cache/consul.py
index <HASH>..<HASH> 100644
--- a/salt/cache/consul.py
+++ b/salt/cache/consul.py
@@ -4,6 +4,8 @@ Minion data cache plugin for Consul key/value data store.
.. versionadded:: 2016.11.2
+:depends: python-consul >= 0.2.0
+
It is up to the system administrator to set up and configure the Consul
infrastructure. All is needed for this plugin is a working Consul agent
with a read-write access to the key-value store.
@@ -81,8 +83,11 @@ def __virtual__():
'verify': __opts__.get('consul.verify', True),
}
- global api
- api = consul.Consul(**consul_kwargs)
+ try:
+ global api
+ api = consul.Consul(**consul_kwargs)
+ except AttributeError:
+ return (False, "Failed to invoke consul.Consul, please make sure you have python-consul >= 0.2.0 installed")
return __virtualname__
|
Fix #<I>, better handling of consul initialization issues
|
py
|
diff --git a/views.py b/views.py
index <HASH>..<HASH> 100644
--- a/views.py
+++ b/views.py
@@ -139,8 +139,7 @@ def request_record(f):
record=record,
tabs=tabs,
title=title,
- get_mini_reviews=lambda *args, **kwargs:
- get_mini_reviews(*args, **kwargs).decode('utf8'),
+ get_mini_reviews=get_mini_reviews,
collection=collection,
format_record=_format_record
)
|
records: fix mini reviews display NOTE: beware, patch amended by Tibor to use new file names
|
py
|
diff --git a/pyrogram/client/types/list.py b/pyrogram/client/types/list.py
index <HASH>..<HASH> 100644
--- a/pyrogram/client/types/list.py
+++ b/pyrogram/client/types/list.py
@@ -27,6 +27,6 @@ class List(list):
return Object.__str__(self)
def __repr__(self):
- return "pyrogram.client.types.pyrogram_list.PyrogramList([{}])".format(
+ return "pyrogram.client.types.list.List([{}])".format(
",".join(Object.__repr__(i) for i in self)
)
|
Fix deserialization of pretty-printable lists
|
py
|
diff --git a/test/unit/Utilities/VertexOpsTest.py b/test/unit/Utilities/VertexOpsTest.py
index <HASH>..<HASH> 100644
--- a/test/unit/Utilities/VertexOpsTest.py
+++ b/test/unit/Utilities/VertexOpsTest.py
@@ -49,8 +49,8 @@ class VertexOpsTest:
sp.ones([self.geo.num_pores(), 3])) == 0.0
self.net['pore.coords'] = temp_coords
- def test_plot_pore(self):
- vo.plot_pore(self.geo, self.geo.pores())
+ # def test_plot_pore(self):
+ # vo.plot_pore(self.geo, self.geo.pores())
- def test_plot_throat(self):
- vo.plot_throat(self.geo, [1, 2, 3, 4])
+ # def test_plot_throat(self):
+ # vo.plot_throat(self.geo, [1, 2, 3, 4])
|
Committing an unspeakable sin...remove two tests that seem to be breaking Travis due to deprecation warnings coming from Matplotlib. Don't tell Brennan!
|
py
|
diff --git a/telemetry/telemetry/page/gtest_test_results.py b/telemetry/telemetry/page/gtest_test_results.py
index <HASH>..<HASH> 100644
--- a/telemetry/telemetry/page/gtest_test_results.py
+++ b/telemetry/telemetry/page/gtest_test_results.py
@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+import logging
import sys
import time
import unittest
@@ -58,6 +59,14 @@ class GTestTestResults(page_test_results.PageTestResults):
'(%0.f ms)' % self._GetMs())
sys.stdout.flush()
+ def addSkip(self, test, reason):
+ super(GTestTestResults, self).addSkip(test, reason)
+ test_name = GTestTestResults._formatTestname(test)
+ logging.warning('===== SKIPPING TEST %s: %s =====', test_name, reason)
+ print >> self._output_stream, '[ OK ]', test_name, (
+ '(%0.f ms)' % self._GetMs())
+ sys.stdout.flush()
+
def PrintSummary(self):
unit = 'test' if len(self.successes) == 1 else 'tests'
print >> self._output_stream, '[ PASSED ]', (
|
[telemetry] Print [ OK ] on skipped tests, the same as we do for successes. BUG=<I> TEST=I'm sure the trybots have got it covered. Review URL: <URL>
|
py
|
diff --git a/synapse/datamodel.py b/synapse/datamodel.py
index <HASH>..<HASH> 100644
--- a/synapse/datamodel.py
+++ b/synapse/datamodel.py
@@ -208,7 +208,7 @@ class Univ(PropBase):
('univ:re', (self.name, valu, {})),
)
- iops = self.type.getIndxOps(valu)
+ iops = self.type.getIndxOps(valu, cmpr)
return (
('indx', ('byuniv', self.pref, iops)),
@@ -310,7 +310,7 @@ class Form:
('form:re', (self.name, valu, {})),
)
- iops = self.type.getIndxOps(valu, cmpr=cmpr)
+ iops = self.type.getIndxOps(valu, cmpr)
return (
('indx', ('byprop', self.pref, iops)),
)
|
Pass cmpr in for Univ prop lifts; remove unneeded kwarg for another lops get
|
py
|
diff --git a/cloudshell/devices/standards/firewall/configuration_attributes_structure.py b/cloudshell/devices/standards/firewall/configuration_attributes_structure.py
index <HASH>..<HASH> 100644
--- a/cloudshell/devices/standards/firewall/configuration_attributes_structure.py
+++ b/cloudshell/devices/standards/firewall/configuration_attributes_structure.py
@@ -126,6 +126,20 @@ class GenericFirewallResource(object):
return self.attributes.get("{}SNMP V3 Private Key".format(self.namespace_prefix), None)
@property
+ def snmp_v3_auth_protocol(self):
+ """
+ :rtype: str
+ """
+ return self.attributes.get("{}SNMP V3 Authentication Protocol".format(self.namespace_prefix), None)
+
+ @property
+ def snmp_v3_priv_protocol(self):
+ """
+ :rtype: str
+ """
+ return self.attributes.get("{}SNMP V3 Privacy Protocol".format(self.namespace_prefix), None)
+
+ @property
def snmp_version(self):
"""
:rtype: str
|
Added snmpv3 attributes to firewall
|
py
|
diff --git a/modopt/opt/gradient.py b/modopt/opt/gradient.py
index <HASH>..<HASH> 100644
--- a/modopt/opt/gradient.py
+++ b/modopt/opt/gradient.py
@@ -33,6 +33,8 @@ class GradParent(object):
Method for calculating the cost (default is ``None``)
data_type : type, optional
Expected data type of the input data (default is ``None``)
+ verbose : bool, optional
+ Option for verbose output (default is ``True``)
Examples
--------
@@ -55,8 +57,9 @@ class GradParent(object):
"""
def __init__(self, data, op, trans_op, get_grad=None, cost=None,
- data_type=None):
+ data_type=None, verbose=True):
+ self.verbose = verbose
self._grad_data_type = data_type
self.obs_data = data
self.op = op
@@ -85,7 +88,8 @@ class GradParent(object):
if self._grad_data_type in (float, np.floating):
data = check_float(data)
- check_npndarray(data, dtype=self._grad_data_type, writeable=False)
+ check_npndarray(data, dtype=self._grad_data_type, writeable=False,
+ verbose=self.verbose)
self._obs_data = data
|
Adding verbose attribute to GradParent to solve issue #<I> (#<I>) * Adding verbose attribute to GradParent to solve issue #<I> * Improved the attribute docstring * Fixed correct order of the attributes of GradParent.
|
py
|
diff --git a/asteval/asteval.py b/asteval/asteval.py
index <HASH>..<HASH> 100644
--- a/asteval/asteval.py
+++ b/asteval/asteval.py
@@ -238,7 +238,7 @@ class Interpreter(object):
if len(self.error) > 0 and not isinstance(node, ast.Module):
msg = '%s' % msg
err = ExceptionHolder(node, exc=exc, msg=msg, expr=expr, lineno=lineno)
- self._interrupt = ast.Break()
+ self._interrupt = ast.Raise()
self.error.append(err)
if self.error_msg is None:
self.error_msg = "at expr='%s'" % (self.expr)
@@ -274,6 +274,10 @@ class Interpreter(object):
out = None
if len(self.error) > 0:
return out
+ if self.retval is not None:
+ return self.retval
+ if isinstance(self._interrupt, (ast.Break, ast.Continue)):
+ return self._interrupt
if node is None:
return out
if isinstance(node, str):
|
return without executing more of this node on return/break/continue
|
py
|
diff --git a/sos/plugins/docker.py b/sos/plugins/docker.py
index <HASH>..<HASH> 100644
--- a/sos/plugins/docker.py
+++ b/sos/plugins/docker.py
@@ -54,7 +54,7 @@ class Docker(Plugin):
class RedHatDocker(Docker, RedHatPlugin):
- packages = ('docker-io',)
+ packages = ('docker', 'docker-io')
def setup(self):
super(RedHatDocker, self).setup()
|
[docker] add 'docker' to the package list for Red Hat distros The docker package is named 'docker-io' in Fedora and 'docker' in RHEL and other downstream products. Add the 'docker' name to the package list in RedHatDocker to ensure the plugin runs.
|
py
|
diff --git a/qiskit/__init__.py b/qiskit/__init__.py
index <HASH>..<HASH> 100644
--- a/qiskit/__init__.py
+++ b/qiskit/__init__.py
@@ -33,6 +33,10 @@ from .result import Result
import qiskit.extensions.standard
import qiskit.extensions.quantum_initializer
+# Allow extending this namespace. Please note that currently this line needs
+# to be placed *before* the wrapper imports or any non-import code.
+__path__ = pkgutil.extend_path(__path__, __name__)
+
# Import circuit drawing methods by default
# This is wrapped in a try because the Travis tests fail due to non-framework
# Python build since using pyenv
@@ -41,10 +45,6 @@ try:
except (ImportError, RuntimeError) as expt:
print("Error: {0}".format(expt))
-# Allow extending this namespace. Please note that currently this line needs
-# to be placed *before* the wrapper imports.
-__path__ = pkgutil.extend_path(__path__, __name__)
-
from .wrapper._wrapper import (
available_backends, local_backends, remote_backends,
get_backend, compile, execute, register, unregister,
|
Fix main __init__ py instruction order (#<I>)
|
py
|
diff --git a/warehouse/packaging/views.py b/warehouse/packaging/views.py
index <HASH>..<HASH> 100644
--- a/warehouse/packaging/views.py
+++ b/warehouse/packaging/views.py
@@ -85,8 +85,8 @@ def release_detail(release, request):
.all()
)
- # Get the latest non-prerelease version of this Project, or the latest
- # of any version if there are no non-prerelease versions.
+ # Get the latest non-prerelease of this Project, or the latest release if
+ # all releases are prereleases.
latest_release = first(
all_releases,
key=lambda r: not r.is_prerelease,
|
Small clarification to comment (#<I>)
|
py
|
diff --git a/tests/helpers/test_dict.py b/tests/helpers/test_dict.py
index <HASH>..<HASH> 100644
--- a/tests/helpers/test_dict.py
+++ b/tests/helpers/test_dict.py
@@ -309,6 +309,8 @@ class TestDictHelper(unittest.TestCase):
self.assertEqual(expected, actual)
+ output_file.close()
+
os.remove(output_file.name)
def test_from_json_file_not_json(self) -> None:
@@ -327,6 +329,8 @@ class TestDictHelper(unittest.TestCase):
self.assertEqual(expected, actual)
+ output_file.close()
+
os.remove(output_file.name)
def test_to_json(self) -> None:
@@ -392,6 +396,8 @@ class TestDictHelper(unittest.TestCase):
self.assertEqual(expected, actual)
+ output_file.close()
+
os.remove(output_file.name)
def test_to_yaml(self) -> None:
|
fixup! Fix permission issue under windows.
|
py
|
diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py
index <HASH>..<HASH> 100644
--- a/aiohttp/__init__.py
+++ b/aiohttp/__init__.py
@@ -1,4 +1,4 @@
-__version__ = "3.8.0a3"
+__version__ = "3.8.0a4"
from typing import Tuple
|
Bump to <I>a4
|
py
|
diff --git a/visidata/vdtui.py b/visidata/vdtui.py
index <HASH>..<HASH> 100755
--- a/visidata/vdtui.py
+++ b/visidata/vdtui.py
@@ -290,15 +290,15 @@ def anytype(r=None):
anytype.__name__ = ''
option('float_chars', '+-0123456789.eE_', 'valid numeric characters')
-def currency(s):
+def currency(s=''):
'a `float` with any leading and trailing non-numeric characters stripped'
floatchars = options.float_chars
if isinstance(s, str):
- while s[0] not in floatchars:
+ while s and s[0] not in floatchars:
s = s[1:]
- while s[-1] not in floatchars:
+ while s and s[-1] not in floatchars:
s = s[:-1]
- return float(s)
+ return float(s) if s else float()
class date:
'`datetime` wrapper, constructing from time_t or from str with dateutil.parse'
@@ -315,6 +315,7 @@ class date:
self.dt = s.dt
else:
assert isinstance(s, datetime.datetime), (type(s), s)
+ self.dt = s
def to_string(self, fmtstr=None):
'Convert datetime object to string, in ISO 8601 format by default.'
|
Fix currency to accept empty string, and date to accept datetime
|
py
|
diff --git a/distutils/tests/test_config_cmd.py b/distutils/tests/test_config_cmd.py
index <HASH>..<HASH> 100644
--- a/distutils/tests/test_config_cmd.py
+++ b/distutils/tests/test_config_cmd.py
@@ -1,5 +1,4 @@
"""Tests for distutils.command.config."""
-import unittest
import os
import sys
from test.support import missing_compiler_executable
@@ -35,7 +34,7 @@ class TestConfig(support.LoggingSilencer, support.TempdirManager):
dump_file(this_file, 'I am the header')
assert len(self._logs) == numlines + 1
- @unittest.skipIf(sys.platform == 'win32', "can't test on Windows")
+ @pytest.mark.skipif('platform.system() == "Windows"')
def test_search_cpp(self):
cmd = missing_compiler_executable(['preprocessor'])
if cmd is not None:
|
Prefer pytest for skip
|
py
|
diff --git a/angr/project.py b/angr/project.py
index <HASH>..<HASH> 100644
--- a/angr/project.py
+++ b/angr/project.py
@@ -392,7 +392,7 @@ class Project(object):
return False
if self.analyzed('CFG'):
- return self.analyze('CFG').cfg.is_thumb_addr(addr)
+ return self.analyze('CFG').is_thumb_addr(addr)
# What binary is that ?
obj = self.binary_by_addr(addr)
|
removed .cfg so things don't crash when ARM functions again
|
py
|
diff --git a/src/resource-graph/azext_resourcegraph/custom.py b/src/resource-graph/azext_resourcegraph/custom.py
index <HASH>..<HASH> 100644
--- a/src/resource-graph/azext_resourcegraph/custom.py
+++ b/src/resource-graph/azext_resourcegraph/custom.py
@@ -25,6 +25,7 @@ from .vendored_sdks.resourcegraph.models import \
__ROWS_PER_PAGE = 1000
__CACHE_FILE_NAME = ".azgraphcache"
__CACHE_KEY = "query_extension"
+__SUBSCRIPTION_LIMIT = 1000
__logger = get_logger(__name__)
@@ -32,6 +33,15 @@ def execute_query(client, graph_query, first, skip, subscriptions, include):
# type: (ResourceGraphClient, str, int, int, list[str], str) -> object
subs_list = subscriptions or _get_cached_subscriptions()
+
+ if len(subs_list) > __SUBSCRIPTION_LIMIT:
+ subs_list = subs_list[:__SUBSCRIPTION_LIMIT]
+ warning_message = "The query included more subscriptions than allowed. "\
+ "Only the first {0} subscriptions were included for the results. "\
+ "To use more than {0} subscriptions, "\
+ "see the docs for examples: https://aka.ms/arg-error-toomanysubs".format(__SUBSCRIPTION_LIMIT)
+ __logger.warning(warning_message)
+
results = []
skip_token = None
full_query = graph_query
|
Resource Graph: show warning when too many subscriptions (#<I>)
|
py
|
diff --git a/trakt/client.py b/trakt/client.py
index <HASH>..<HASH> 100644
--- a/trakt/client.py
+++ b/trakt/client.py
@@ -5,6 +5,7 @@ from trakt.request import TraktRequest
import logging
import requests
+import socket
log = logging.getLogger(__name__)
@@ -49,8 +50,27 @@ class TraktClient(object):
prepared = request.prepare()
- # TODO retrying requests on 502, 503 errors
- return self._session.send(prepared)
+ # TODO retrying requests on 502, 503 errors?
+
+ try:
+ return self._session.send(prepared)
+ except socket.gaierror, e:
+ code, _ = e
+
+ if code != 8:
+ raise e
+
+ log.warn('Encountered socket.gaierror (code: 8)')
+
+ return self._rebuild().send(prepared)
+
+ def _rebuild(self):
+ log.info('Rebuilding session and connection pools...')
+
+ # Rebuild the connection pool (old pool has stale connections)
+ self._session = requests.Session()
+
+ return self._session
def __getitem__(self, path):
parts = path.strip('/').split('/')
|
Rebuild session on socket.gaierror (code: 8)
|
py
|
diff --git a/lancet/commands/repository.py b/lancet/commands/repository.py
index <HASH>..<HASH> 100644
--- a/lancet/commands/repository.py
+++ b/lancet/commands/repository.py
@@ -9,9 +9,11 @@ from ..helpers import get_issue, get_transition, set_issue_status, get_branch
@click.command()
@click.option('--base', '-b', 'base_branch')
[email protected]('-s', '--stop-timer/--no-stop-timer', default=False,
+ help='Stop the Harvest timer after creating the pull request.')
@click.option('-o', '--open-pr/--no-open-pr', default=False)
@click.pass_context
-def pull_request(ctx, base_branch, open_pr):
+def pull_request(ctx, base_branch, open_pr, stop_timer):
"""Create a new pull request for this issue."""
lancet = ctx.obj
@@ -96,9 +98,10 @@ def pull_request(ctx, base_branch, open_pr):
# TODO: Post to HipChat?
# Stop harvest timer
- with taskstatus('Pausing harvest timer') as ts:
- lancet.timer.pause()
- ts.ok('Harvest timer paused')
+ if stop_timer:
+ with taskstatus('Pausing harvest timer') as ts:
+ lancet.timer.pause()
+ ts.ok('Harvest timer paused')
# Open the pull request page in the browser if requested
if open_pr:
|
Stop the Harvest timer after creating the PR by default
|
py
|
diff --git a/pluginmanager/compat.py b/pluginmanager/compat.py
index <HASH>..<HASH> 100644
--- a/pluginmanager/compat.py
+++ b/pluginmanager/compat.py
@@ -21,10 +21,8 @@ if is_py3:
import importlib
def load_source(name, file_path):
- spec = importlib.util.spec_from_file_location(name,
- file_path)
- module = spec.loader.load_module()
- return module
+ loader = importlib.machinery.SourceFileLoader(name, file_path)
+ return loader.load_module()
else:
# flake8: noqa
import imp
|
changed way using importlib in compat
|
py
|
diff --git a/openquake/engine/calculators/risk/base.py b/openquake/engine/calculators/risk/base.py
index <HASH>..<HASH> 100644
--- a/openquake/engine/calculators/risk/base.py
+++ b/openquake/engine/calculators/risk/base.py
@@ -116,6 +116,12 @@ def run_risk(sorted_assocs, calc, monitor):
with get_assets_mon:
assets = models.ExposureData.objects.get_asset_chunk(
exposure_model, time_event, assocs)
+ if not assets:
+ # NB: this may happen if the user provides a wrong time_event;
+ # the check should be done at the exposure parsing time and
+ # it will done that way in the future
+ raise RuntimeError('Could not find any asset for taxonomy=%s, '
+ 'time_event=%s' % (taxonomy, time_event))
for it in models.ImtTaxonomy.objects.filter(
job=calc.job, taxonomy=taxonomy):
imt = it.imt.imt_str
|
Raise an error for wrong time events
|
py
|
diff --git a/spyder/plugins/editor/panels/scrollflag.py b/spyder/plugins/editor/panels/scrollflag.py
index <HASH>..<HASH> 100644
--- a/spyder/plugins/editor/panels/scrollflag.py
+++ b/spyder/plugins/editor/panels/scrollflag.py
@@ -50,7 +50,7 @@ class ScrollFlagArea(Panel):
"""This property holds the vertical offset of the scroll flag area
relative to the top of the text editor."""
vsb = self.editor.verticalScrollBar()
- style = vsb.style()
+ style = QApplication.instance().style()
opt = QStyleOptionSlider()
vsb.initStyleOption(opt)
@@ -172,7 +172,7 @@ class ScrollFlagArea(Panel):
"""Return the pixel span height of the scrollbar area in which
the slider handle may move"""
vsb = self.editor.verticalScrollBar()
- style = vsb.style()
+ style = QApplication.instance().style()
opt = QStyleOptionSlider()
vsb.initStyleOption(opt)
|
Fetch style from QApplication This is required for the Dark Style
|
py
|
diff --git a/pgcontents/tests/test_pgcontents_api.py b/pgcontents/tests/test_pgcontents_api.py
index <HASH>..<HASH> 100644
--- a/pgcontents/tests/test_pgcontents_api.py
+++ b/pgcontents/tests/test_pgcontents_api.py
@@ -64,7 +64,7 @@ class _APITestBase(APITest):
Test ContentsManager.walk.
"""
results = {
- dname: (subdirs, files)
+ _norm_unicode(dname): (subdirs, files)
for dname, subdirs, files in walk(self.notebook.contents_manager)
}
# This is a dictionary because the ordering of these is all messed up
@@ -118,7 +118,7 @@ class _APITestBase(APITest):
}
for dname, (subdirs, files) in iteritems(expected):
- result_subdirs, result_files = results.pop(dname)
+ result_subdirs, result_files = results.pop(_norm_unicode(dname))
if dname == '':
sep = ''
else:
|
TEST: Unicode is not my favorite method of text representation.
|
py
|
diff --git a/tests/test_page.py b/tests/test_page.py
index <HASH>..<HASH> 100644
--- a/tests/test_page.py
+++ b/tests/test_page.py
@@ -108,7 +108,7 @@ def test_seed_url_keywords_multiple_params(base_url, driver):
assert "key={}".format(value[1]) in seed_url
import re
- assert re.match("{}\?key=(foo|bar)&key=(foo|bar)".format(base_url), seed_url)
+ assert re.match(r"{}\?key=(foo|bar)&key=(foo|bar)".format(base_url), seed_url)
def test_seed_url_keywords_multiple_params_special(base_url, driver):
@@ -120,7 +120,7 @@ def test_seed_url_keywords_multiple_params_special(base_url, driver):
import re
assert re.match(
- "{}\?key=(foo|mozilla%26co)&key=(foo|mozilla%26co)".format(base_url), seed_url
+ r"{}\?key=(foo|mozilla%26co)&key=(foo|mozilla%26co)".format(base_url), seed_url
)
|
Stab at fixing flake8/pyflakes
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -121,7 +121,8 @@ setup(
],
include_package_data=True,
zip_safe=False,
- scripts=[ # This is separate from the other console scripts just for efficiency's
+ scripts=[
+ # This is separate from the other console scripts just for efficiency's
# sake. It gets called over and over and over again by our mediawiki
# plugin/mod_php. By making it *not* a setuptools console_script it
# does a lot less IO work to stand up.
|
setup.py: Fix PEP8 violation
|
py
|
diff --git a/fmn/rules/fedora_elections.py b/fmn/rules/fedora_elections.py
index <HASH>..<HASH> 100644
--- a/fmn/rules/fedora_elections.py
+++ b/fmn/rules/fedora_elections.py
@@ -1,4 +1,4 @@
-def fedora_electionsi_candidate_delete(config, message):
+def fedora_elections_candidate_delete(config, message):
""" Elections: A candidate was deleted from an election.
Adding this rule will let through elections from `Fedora
|
fix typo in fedora_elections
|
py
|
diff --git a/cassiopeia/type/core/common.py b/cassiopeia/type/core/common.py
index <HASH>..<HASH> 100644
--- a/cassiopeia/type/core/common.py
+++ b/cassiopeia/type/core/common.py
@@ -274,6 +274,8 @@ class Season(enum.Enum):
season_4 = "SEASON2014"
preseason_5 = "PRESEASON2015"
season_5 = "SEASON2015"
+ preseason_6 = "PRESEASON2016"
+ season_6 = "SEASON2016"
stats_seasons = {Season.season_3, Season.season_4, Season.season_5}
|
update season enum for <I>
|
py
|
diff --git a/realtime/test/test_utilities.py b/realtime/test/test_utilities.py
index <HASH>..<HASH> 100644
--- a/realtime/test/test_utilities.py
+++ b/realtime/test/test_utilities.py
@@ -20,7 +20,7 @@ __copyright__ = ('Copyright 2012, Australia Indonesia Facility for '
import os
import logging
import unittest
-from datetime import date
+import datetime
from realtime.utilities import (
base_data_dir,
@@ -87,8 +87,8 @@ class UtilsTest(unittest.TestCase):
#noinspection PyMethodMayBeStatic
def test_logging(self):
path = os.path.join(log_dir(), 'realtime.log')
- current_date = date.today()
- date_string = current_date.strftime('%d-%m-%Y-%s')
+ current_date = datetime.datetime.now()
+ date_string = current_date.strftime('%d-%m-%Y-%H:%M:%S')
message = 'Testing logger %s' % date_string
LOGGER.info(message)
log_file = open(path)
|
[Realtime] A better logging tests in realtime.
|
py
|
diff --git a/glances/processes.py b/glances/processes.py
index <HASH>..<HASH> 100644
--- a/glances/processes.py
+++ b/glances/processes.py
@@ -400,15 +400,17 @@ class GlancesProcesses(object):
s = self.__get_process_stats(proc,
mandatory_stats=True,
standard_stats=self.max_processes is None)
+ # Check if s is note None (issue #879)
# ignore the 'idle' process on Windows and *BSD
# ignore the 'kernel_task' process on OS X
# waiting for upstream patch from psutil
- if (BSD and s['name'] == 'idle' or
+ if (s is None or
+ BSD and s['name'] == 'idle' or
WINDOWS and s['name'] == 'System Idle Process' or
OSX and s['name'] == 'kernel_task'):
continue
# Continue to the next process if it has to be filtered
- if s is None or self._filter.is_filtered(s):
+ if self._filter.is_filtered(s):
excluded_processes.add(proc)
continue
|
Correct TypeError on processlist in MacOS (issue #<I>)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -32,6 +32,7 @@ if __name__ == "__main__":
author_email="[email protected]",
url="https://github.com/Salamek/cron-descriptor",
long_description=long_description,
+ long_description_content_type='text/markdown',
packages=setuptools.find_packages(),
package_data={
'cron_descriptor': [
|
Tell PyPI the long_description is markdown As per [packaging tutorial](<URL>) (see bottom). This should render it as markdown on <URL>
|
py
|
diff --git a/lol_scraper/match_downloader.py b/lol_scraper/match_downloader.py
index <HASH>..<HASH> 100644
--- a/lol_scraper/match_downloader.py
+++ b/lol_scraper/match_downloader.py
@@ -48,7 +48,8 @@ def get_patch_changed():
finally:
patch_changed_lock.release()
-cache_autostore(version_key, 60*60, cache, on_change=set_patch_changed)
+
+@cache_autostore(version_key, 60 * 60, cache, on_change=set_patch_changed)
def get_last_patch_version():
version_extended = baseriotapi.get_versions()[0]
version = ".".join(version_extended.split(".")[:2])
|
Fixed a bug where cache_autostore was not used as a wrapper
|
py
|
diff --git a/tests/test_profiling.py b/tests/test_profiling.py
index <HASH>..<HASH> 100644
--- a/tests/test_profiling.py
+++ b/tests/test_profiling.py
@@ -40,7 +40,7 @@ class TestProfiling(GPflowTestCase):
m = self.prepare()
s = gpflow.settings.get_settings()
s.profiling.dump_timeline = True
- s.profiling.output_directory = os.path.dirname(__file__)
+ s.profiling.output_directory = '/tmp/'
s.profiling.output_file_name = 'test_trace_autoflow'
with gpflow.settings.temp_settings(s):
|
Change temporary file directory for profiling tests.
|
py
|
diff --git a/scripts/make_dist.py b/scripts/make_dist.py
index <HASH>..<HASH> 100644
--- a/scripts/make_dist.py
+++ b/scripts/make_dist.py
@@ -314,7 +314,7 @@ def is_valid(filename):
for arc, wfunc in (dist1, dist1.add), (dist2, dist2.add), (dist3, dist3.write):
- for path in 'README.txt', 'LICENCE.txt':
+ for path in 'README.md', 'LICENCE.txt':
wfunc(os.path.join(pdir, path), arcname=os.path.join(name, path))
wfunc(os.path.join(pdir, 'src', 'brython.js'),
|
In make_dist, replace README.txt by README.md
|
py
|
diff --git a/project_generator/project.py b/project_generator/project.py
index <HASH>..<HASH> 100644
--- a/project_generator/project.py
+++ b/project_generator/project.py
@@ -405,10 +405,10 @@ class Project:
@staticmethod
def _generate_output_dir(path):
"""this is a separate function, so that it can be more easily tested."""
+ relpath = os.path.relpath(os.getcwd(),path)
+ count = relpath.count(os.sep) + 1
- count = path.count(os.sep) + 1
-
- return (os.sep.join('..' for _ in range(count)) + os.sep), count
+ return relpath+os.path.sep, count
def _set_output_dir(self):
path = self.project['output_dir']['path']
|
Changes relpath calulation for generated project files. Closes #<I>
|
py
|
diff --git a/fastfood/book.py b/fastfood/book.py
index <HASH>..<HASH> 100644
--- a/fastfood/book.py
+++ b/fastfood/book.py
@@ -40,8 +40,7 @@ class CookBook(object):
def metadata(self):
"""Return dict representation of this cookbook's metadata.rb ."""
if not self._metadata:
- with open(self.metadata_path) as meta:
- self._metadata = MetadataRb(meta)
+ self._metadata = MetadataRb(open(self.metadata_path, 'r+'))
return self._metadata
@property
@@ -51,8 +50,7 @@ class CookBook(object):
if not os.path.isfile(self.berks_path):
raise ValueError("No Berksfile found at %s"
% self.berks_path)
- with open(self.berks_path) as berks:
- self._berksfile = Berksfile(berks)
+ self._berksfile = Berksfile(open(self.berks_path, 'r+'))
return self._berksfile
|
using with statement causes file to close Fixes IOErrors and opens the files in read+write mode
|
py
|
diff --git a/lib/websession_webinterface.py b/lib/websession_webinterface.py
index <HASH>..<HASH> 100644
--- a/lib/websession_webinterface.py
+++ b/lib/websession_webinterface.py
@@ -660,7 +660,7 @@ class WebInterfaceYourAccountPages(WebInterfaceDirectory):
if len(roles) == 1:
# There's only one role enabled to see this collection
# Let's redirect to log to it!
- return redirect_to_url(req, '%s%s' % (CFG_SITE_SECURE_URL, make_canonical_urlargd({'realm' : roles[0][0], 'referer' : args['referer']}, {})))
+ return redirect_to_url(req, '%s/%s' % (CFG_SITE_SECURE_URL, make_canonical_urlargd({'realm' : roles[0][0], 'referer' : args['referer']}, {})))
except InvenioWebAccessMailCookieError:
pass
|
Fixed a url redirection.
|
py
|
diff --git a/scoop/_control.py b/scoop/_control.py
index <HASH>..<HASH> 100644
--- a/scoop/_control.py
+++ b/scoop/_control.py
@@ -121,7 +121,14 @@ def runFuture(future):
uniqueReference)
try:
future.resultValue = future.callable(*future.args, **future.kargs)
- except Exception as err:
+ except BaseException as err:
+ import traceback
+ scoop.logger.error(
+ "The following error occurend on a worker:\n{err}\n{tb}".format(
+ err=err,
+ tb=traceback.format_exc(),
+ )
+ )
future.exceptionValue = err
future.executionTime = future.stopWatch.get()
future.isDone = True
|
* Improved handling and logging of errors happening on workers.
|
py
|
diff --git a/instabot/api/api.py b/instabot/api/api.py
index <HASH>..<HASH> 100644
--- a/instabot/api/api.py
+++ b/instabot/api/api.py
@@ -482,10 +482,10 @@ class API(object):
def get_total_followers_or_followings(self, user_id, amount=None, which='followers'):
if which == 'followers':
key = 'follower_count'
- get = getattr(self, 'get_user_followers')
+ get = self.get_user_followers
elif which == 'followings':
key = 'following_count'
- get = getattr(self, 'get_user_followings')
+ get = self.get_user_followings
sleep_track = 0
result = []
|
don't use 'getattr' when we can call the methods directly
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -12,7 +12,7 @@ def read(fname):
setup(
name='unleash',
- version='0.4.dev1',
+ version='0.3.2.dev1',
description=('Creates release commits directly in git, unleashes them on '
'PyPI and pushes tags to github.'),
long_description=read('README.rst'),
|
Increased version to <I>.dev1 after release of <I>. Commit using `unleash <I>dev <<URL>
|
py
|
diff --git a/salt/modules/schedule.py b/salt/modules/schedule.py
index <HASH>..<HASH> 100644
--- a/salt/modules/schedule.py
+++ b/salt/modules/schedule.py
@@ -55,7 +55,8 @@ SCHEDULE_CONF = [
'until',
'after',
'return_config',
- 'return_kwargs'
+ 'return_kwargs',
+ 'run_on_start'
]
|
Add run_on_start to SCHEDULE_CONF (#<I>) This is a fix for <I>.
|
py
|
diff --git a/dockerpty/pty.py b/dockerpty/pty.py
index <HASH>..<HASH> 100644
--- a/dockerpty/pty.py
+++ b/dockerpty/pty.py
@@ -121,7 +121,7 @@ class PseudoTerminal(object):
"""
Present the PTY of the container inside the current process.
- If the container is not running, an IOError is raised.
+ The container must be started before this method is invoked.
This will take over the current process' TTY until the container's PTY
is closed.
|
Don't specify behaviour when container isn't started
|
py
|
diff --git a/pymola/backends/casadi/generator.py b/pymola/backends/casadi/generator.py
index <HASH>..<HASH> 100644
--- a/pymola/backends/casadi/generator.py
+++ b/pymola/backends/casadi/generator.py
@@ -627,7 +627,7 @@ def generate(ast_tree: ast.Collection, model_name: str) -> Model:
component_ref = ast.ComponentRef.from_string(model_name)
ast_walker = TreeWalker()
flat_tree = flatten(ast_tree, component_ref)
- component_ref_tuple = component_ref.to_tuple(component_ref)
+ component_ref_tuple = component_ref.to_tuple()
casadi_gen = Generator(flat_tree, component_ref_tuple[-1])
ast_walker.walk(casadi_gen, flat_tree)
return casadi_gen.model
|
CasADi generator: Fix conflict introduced by rebase.
|
py
|
diff --git a/machina/apps/forum/abstract_models.py b/machina/apps/forum/abstract_models.py
index <HASH>..<HASH> 100644
--- a/machina/apps/forum/abstract_models.py
+++ b/machina/apps/forum/abstract_models.py
@@ -97,7 +97,6 @@ class AbstractForum(MPTTModel, ActiveModel):
('can_vote_in_polls', _('Can vote in polls')),
('can_change_existing_vote', ('Can change existing vote')),
# Moderation
- ('can_edit_topics', _('Can edit topics')),
('can_close_topics', _('Can close topics')),
('can_move_topics', _('Can move topics')),
('can_edit_posts', _('Can edit posts')),
|
'can_edit_topics' permission removed from Forum per-object permissions
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -6,8 +6,27 @@ mydir = os.path.dirname(__file__)
if mydir:
os.chdir(mydir)
+version = '0.4'
+base_url = 'https://github.com/dranjan/python-plyfile'
+
setup(name='plyfile',
- version='0.4',
+ author='Darsh Ranjan',
+ version=version,
install_requires=['numpy>=1.8'],
description='PLY file reader/writer',
- py_modules=['plyfile'])
+ url=base_url,
+ download_url=('%s/releases/tag/v%s' % (base_url, version)),
+ license='GPLv3+',
+ classifiers=[
+ 'Programming Language :: Python :: 2',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.4',
+ 'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
+ 'Operating System :: OS Independent',
+ 'Development Status :: 4 - Beta',
+ 'Topic :: Scientific/Engineering'
+ ],
+ py_modules=['plyfile'],
+ keywords=['ply', 'numpy'])
|
Add missing setup arguments (preparing for submission to PyPI)
|
py
|
diff --git a/storage/google/cloud/storage/bucket.py b/storage/google/cloud/storage/bucket.py
index <HASH>..<HASH> 100644
--- a/storage/google/cloud/storage/bucket.py
+++ b/storage/google/cloud/storage/bucket.py
@@ -421,9 +421,9 @@ class Bucket(_PropertyMixin):
STANDARD_STORAGE_CLASS,
NEARLINE_STORAGE_CLASS,
COLDLINE_STORAGE_CLASS,
- MULTI_REGIONAL_LEGACY_STORAGE_CLASS, # deprecated
- REGIONAL_LEGACY_STORAGE_CLASS, # deprecated
- DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS, # deprecated
+ MULTI_REGIONAL_LEGACY_STORAGE_CLASS, # legacy
+ REGIONAL_LEGACY_STORAGE_CLASS, # legacy
+ DURABLE_REDUCED_AVAILABILITY_LEGACY_STORAGE_CLASS, # legacy
)
"""Allowed values for :attr:`storage_class`.
|
Mark old storage classes as legacy, not deprecated. (#<I>)
|
py
|
diff --git a/django_q/models.py b/django_q/models.py
index <HASH>..<HASH> 100644
--- a/django_q/models.py
+++ b/django_q/models.py
@@ -134,7 +134,7 @@ class Schedule(models.Model):
url = reverse('admin:django_q_success_change', args=(task.id,))
else:
url = reverse('admin:django_q_failure_change', args=(task.id,))
- return '<a href="{}">[{}]</a>'.format(url, self.task)
+ return '<a href="{}">[{}]</a>'.format(url, task.name)
return None
def __unicode__(self):
|
Schedule admin should show name instead of id in task link
|
py
|
diff --git a/ppb/sprites.py b/ppb/sprites.py
index <HASH>..<HASH> 100644
--- a/ppb/sprites.py
+++ b/ppb/sprites.py
@@ -156,8 +156,7 @@ class BaseSprite(EventMixin):
resource_path = None
position: Vector = Vector(0, 0)
facing: Vector = Vector(0, -1)
- _size: Union[int, float] = 1
- _offset_value = None
+ size: Union[int, float] = 1
def __init__(self, **kwargs):
super().__init__()
@@ -224,13 +223,8 @@ class BaseSprite(EventMixin):
self.position.y = value - self._offset_value
@property
- def size(self) -> Union[int, float]:
- return self._size
-
- @size.setter
- def size(self, value: Union[int, float]):
- self._size = value
- self._offset_value = self._size / 2
+ def _offset_value(self):
+ return self.size / 2
def rotate(self, degrees: Number):
self.facing.rotate(degrees)
|
Replaces size property with a calculated _offset_value property.
|
py
|
diff --git a/_data.py b/_data.py
index <HASH>..<HASH> 100644
--- a/_data.py
+++ b/_data.py
@@ -1669,10 +1669,12 @@ class fitter():
f = self._evaluate_all_functions(self._xdata_massaged, p)
# get the full residuals list
- r = []
+ residuals = []
for n in range(len(f)):
- r.append((self._ydata_massaged[n]-f[n]) / _n.absolute(self._eydata_massaged[n]))
- return r
+ numerator = self._ydata_massaged[n]-f[n]
+ denominator = _n.absolute(self._eydata_massaged[n])
+ residuals.append(numerator/denominator)
+ return residuals
def _studentized_residuals_concatenated(self, p=None):
"""
|
changed "r" to "residuals" for the returned studentized residuals list. Split up a fraction into variables to make it easier to read by reducing the number of ()'s and line length.
|
py
|
diff --git a/mmcv/runner/hooks/evaluation.py b/mmcv/runner/hooks/evaluation.py
index <HASH>..<HASH> 100644
--- a/mmcv/runner/hooks/evaluation.py
+++ b/mmcv/runner/hooks/evaluation.py
@@ -55,7 +55,10 @@ class EvalHook(Hook):
rule_map = {'greater': lambda x, y: x > y, 'less': lambda x, y: x < y}
init_value_map = {'greater': -inf, 'less': inf}
- greater_keys = ['acc', 'top', 'AR@', 'auc', 'precision', 'mAP']
+ greater_keys = [
+ 'acc', 'top', 'AR@', 'auc', 'precision', 'mAP', 'mDice', 'mIoU',
+ 'mAcc', 'aAcc'
+ ]
less_keys = ['loss']
def __init__(self,
|
Add segmentation keys for greater_keys. (#<I>)
|
py
|
diff --git a/pycompilation/dist.py b/pycompilation/dist.py
index <HASH>..<HASH> 100644
--- a/pycompilation/dist.py
+++ b/pycompilation/dist.py
@@ -194,7 +194,7 @@ class clever_build_ext(build_ext.build_ext):
if ext.logger: ext.logger.info("Copying files needed for distribution..")
for f, rel_dst in ext.dist_files:
- rel_dst = rel_dst or os.path.basename(f)
+ rel_dst = rel_dst or os.path.dirname(f)
copy(
f,
os.path.join(
|
Keep relative path structure of dist_files
|
py
|
diff --git a/validator/sawtooth_validator/gossip/permission_verifier.py b/validator/sawtooth_validator/gossip/permission_verifier.py
index <HASH>..<HASH> 100644
--- a/validator/sawtooth_validator/gossip/permission_verifier.py
+++ b/validator/sawtooth_validator/gossip/permission_verifier.py
@@ -516,8 +516,9 @@ class IdentityCache():
value = self._identity_view.get_role(item)
return value
- value = self._cache.get(item)
- if value is None:
+ if item in self._cache:
+ value = self._cache.get(item)
+ else:
if self._identity_view is None:
self.update_view(state_root)
value = self._identity_view.get_policy(item)
|
Fix IdentityCache existence check Fixes how the IdentityCache checks if an item has already been retrieved. Previously, the cache assumed that if the value in the cache was None, the item was never retrieved; instead, if the item is in the cache, the item has already been retrieved (even if the value is None), so we don't need to read from state.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -345,7 +345,9 @@ password = [
'flask-bcrypt>=0.7.1',
]
pinot = [
- 'pinotdb>=0.1.1,<1.0.0',
+ # pinotdb v0.1.1 may still work with older versions of Apache Pinot, but we've confirmed that it
+ # causes a problem with newer versions.
+ 'pinotdb>0.1.2,<1.0.0',
]
plexus = [
'arrow>=0.16.0',
|
Limit old versions of pinotdb to force update on CI (#<I>)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -7,7 +7,7 @@ requirements = []
with open('requirements.txt') as f:
requirements = f.read().splitlines()
-version = '0.1.0'
+version = '0.2.0'
readme = ''
with open('README.md') as f:
|
version updated I'm bad at version control
|
py
|
diff --git a/python/mxnet/optimizer.py b/python/mxnet/optimizer.py
index <HASH>..<HASH> 100644
--- a/python/mxnet/optimizer.py
+++ b/python/mxnet/optimizer.py
@@ -1090,7 +1090,7 @@ class AdaGrad(Optimizer):
Parameters
----------
eps: float, optional
- Small value to avoid division by 0.
+ Initial value of the history accumulator. Avoids division by 0.
"""
def __init__(self, eps=1e-7, **kwargs):
|
Document AdaGrad eps as initial history accumulator value (#<I>)
|
py
|
diff --git a/devassistant/commands.py b/devassistant/commands.py
index <HASH>..<HASH> 100644
--- a/devassistant/commands.py
+++ b/devassistant/commands.py
@@ -139,10 +139,10 @@ class DotDevassistantCommand(object):
original_path_as_dict = {}
for i, subas in enumerate(original_assistant_path):
original_path_as_dict[settings.SUBASSISTANT_N_STRING.format(i)] = subas
- from devassistant.bin import CreatorAssistant
+ from devassistant.bin import TopAssistant
from devassistant import yaml_assistant
try:
- path = CreatorAssistant().get_selected_subassistant_path(**original_path_as_dict)
+ path = TopAssistant().get_selected_subassistant_path(**original_path_as_dict)
except exceptions.AssistantNotFoundException as e:
path = []
logger.warning(str(e))
|
When installing dependencies from .devassistant, use correct top-level assistant
|
py
|
diff --git a/cornice_sphinx/__init__.py b/cornice_sphinx/__init__.py
index <HASH>..<HASH> 100644
--- a/cornice_sphinx/__init__.py
+++ b/cornice_sphinx/__init__.py
@@ -301,6 +301,7 @@ def rst2node(data):
document.settings.rfc_references = False
document.settings.character_level_inline_markup = False
document.settings.env = Env()
+ document.settings.env.ref_context = {}
parser.parse(data, document)
if len(document.children) == 1:
return document.children[0]
|
Initialize the ref_context as a dictionary. fixes #7
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,7 @@ THIS_DIR = abspath(dirname(__file__))
REQUIRES = [
"python-dateutil~=2.8.0",
"requests~=2.23.0",
- "python-slugify~=3.0.2",
+ "python-slugify~=4.0.0",
]
PACKAGES = find_packages(exclude=['tests*', 'docs'])
|
Bump slugify to <I>
|
py
|
diff --git a/numina/core/oresult.py b/numina/core/oresult.py
index <HASH>..<HASH> 100644
--- a/numina/core/oresult.py
+++ b/numina/core/oresult.py
@@ -41,6 +41,7 @@ class ObservationResult(object):
self.configuration = 'default'
self.prodid = None
self.tags = {}
+ self.results = {}
def update_with_product(self, prod):
self.tags = prod.tags
@@ -88,6 +89,9 @@ def obsres_from_dict(values):
obsres.instrument = values['instrument']
obsres.configuration = values.get('configuration', 'default')
obsres.pipeline = values.get('pipeline', 'default')
+ obsres.children = values.get('children', [])
+ obsres.parent = values.get('parent', None)
+ obsres.results = values.get('results', {})
try:
obsres.frames = [dataframe_from_list(val) for val in values[ikey]]
except Exception:
|
Add a dictionary con contain the IDs of the results of other observations
|
py
|
diff --git a/estnltk/storage/postgres/collection.py b/estnltk/storage/postgres/collection.py
index <HASH>..<HASH> 100755
--- a/estnltk/storage/postgres/collection.py
+++ b/estnltk/storage/postgres/collection.py
@@ -640,7 +640,7 @@ class PgCollection:
for row in data_iterator:
text_id, text = row[0], row[1]
for record in row_mapper(row):
- fragment_dict = layer_to_dict(record['fragment'], text)
+ fragment_dict = layer_to_dict(record['fragment'])
parent_layer_id = record['parent_id']
if ngram_index is not None:
ngram_values = [create_ngram_fingerprint_index(record.layer, attr, n)
@@ -875,7 +875,7 @@ class PgCollection:
record = row_mapper(row)
layer = record.layer
- layer_dict = layer_to_dict(layer, text)
+ layer_dict = layer_to_dict(layer)
layer_json = json.dumps(layer_dict, ensure_ascii=False)
values = [collection_id, collection_id, layer_json]
|
remove text paremeter from layer_to_dict
|
py
|
diff --git a/django_ses/__init__.py b/django_ses/__init__.py
index <HASH>..<HASH> 100644
--- a/django_ses/__init__.py
+++ b/django_ses/__init__.py
@@ -92,12 +92,7 @@ class SESBackend(BaseEmailBackend):
def close(self):
"""Close any open HTTP connections to the API server.
"""
- try:
- self.connection.close()
- self.connection = None
- except Exception:
- if not self.fail_silently:
- raise
+ self.connection = None
def send_messages(self, email_messages):
"""Sends one or more EmailMessage objects and returns the number of
|
Fix close connection error. (#<I>)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -9,11 +9,9 @@ setup(
author="Beau Barker",
author_email="[email protected]",
classifiers=[
- "Programming Language :: Python :: 2.7",
- "Programming Language :: Python :: 3.3",
- "Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
+ "Programming Language :: Python :: 3.7",
],
description="Send JSON-RPC requests",
entry_points={"console_scripts": ["jsonrpc = jsonrpcclient.__main__:main"]},
@@ -34,7 +32,7 @@ setup(
"zmq": ["pyzmq"],
},
include_package_data=True,
- install_requires=["future<1", "jsonschema>2,<3", "click>6,<7"],
+ install_requires=["jsonschema>2,<3", "click>6,<7"],
license="MIT",
long_description=README,
long_description_content_type="text/markdown",
|
Remove unsupported pythons from classifiers
|
py
|
diff --git a/andes/core/symprocessor.py b/andes/core/symprocessor.py
index <HASH>..<HASH> 100644
--- a/andes/core/symprocessor.py
+++ b/andes/core/symprocessor.py
@@ -142,6 +142,8 @@ class SymProcessor:
self.lambdify_func[0]['Indicator'] = lambda x: x
self.lambdify_func[0]['imag'] = np.imag
self.lambdify_func[0]['real'] = np.real
+ self.lambdify_func[0]['im'] = np.imag
+ self.lambdify_func[0]['re'] = np.real
# build ``non_vars_dict`` by removing ``vars_dict`` keys from a copy of ``inputs``
self.non_vars_dict = OrderedDict(self.inputs_dict)
@@ -222,6 +224,8 @@ class SymProcessor:
# convert service equations
# Service equations are converted sequentially due to possible dependency
+ import inspect
+
s_args = OrderedDict()
s_syms = OrderedDict()
s_calls = OrderedDict()
|
Added `re` and `im` as custom functions.
|
py
|
diff --git a/tests/test_ctl.py b/tests/test_ctl.py
index <HASH>..<HASH> 100644
--- a/tests/test_ctl.py
+++ b/tests/test_ctl.py
@@ -32,7 +32,7 @@ def test_rw_config():
@patch('patroni.ctl.load_config',
- Mock(return_value={'scope': 'alpha', 'postgresql': {'data_dir': '.', 'parameters': {}, 'retry_timeout': 5},
+ Mock(return_value={'scope': 'alpha', 'postgresql': {'data_dir': '.', 'pgpass': './pgpass', 'parameters': {}, 'retry_timeout': 5},
'restapi': {'listen': '::', 'certfile': 'a'}, 'etcd': {'host': 'localhost:2379'}}))
class TestCtl(unittest.TestCase):
|
Set postgresql.pgpass to ./pgpass (#<I>) This avoids test failures if $HOME is not available (fixes: #<I>).
|
py
|
diff --git a/gbdxtools/images/worldview.py b/gbdxtools/images/worldview.py
index <HASH>..<HASH> 100644
--- a/gbdxtools/images/worldview.py
+++ b/gbdxtools/images/worldview.py
@@ -66,7 +66,10 @@ class WVImage(IpeImage):
def parts(self):
if self._parts is None:
self._parts = [IdahoImage(rec['properties']['attributes']['idahoImageId'],
- product=self.options["product"], proj=self.options["proj"], gsd=self.options["gsd"])
+ product=self.options["product"],
+ proj=self.options["proj"],
+ bucket=rec['properties']['attributes']['bucketName']
+ gsd=self.options["gsd"])
for rec in self._find_parts(self.cat_id, self.options["band_type"])]
return self._parts
|
adding bucket name to idaho image inits
|
py
|
diff --git a/bcbio/variation/validate.py b/bcbio/variation/validate.py
index <HASH>..<HASH> 100644
--- a/bcbio/variation/validate.py
+++ b/bcbio/variation/validate.py
@@ -115,7 +115,7 @@ def _normalize_cwl_inputs(items):
vrn_files.append(data["vrn_file"])
ready_items.append(data)
if len(with_validate) == 0:
- data = _pick_lead_item(items)
+ data = _pick_lead_item(ready_items)
data["batch_samples"] = batch_samples
return data
else:
|
CWL: pass normalized objects to lead item picking for ensemble
|
py
|
diff --git a/AppiumLibrary/keywords/_touch.py b/AppiumLibrary/keywords/_touch.py
index <HASH>..<HASH> 100644
--- a/AppiumLibrary/keywords/_touch.py
+++ b/AppiumLibrary/keywords/_touch.py
@@ -106,7 +106,7 @@ class _TouchKeywords(KeywordGroup):
driver.execute_script("mobile: scroll", {"direction": 'up', 'element': element.id})
def long_press(self, locator, duration=1000):
- """ Long press the element """
+ """ Long press the element with optional duration """
driver = self._current_application()
element = self._element_find(locator, True, True)
long_press = TouchAction(driver).long_press(element, duration)
|
added duration as another argument for long_press method, also added to description
|
py
|
diff --git a/testkitbackend/requests.py b/testkitbackend/requests.py
index <HASH>..<HASH> 100644
--- a/testkitbackend/requests.py
+++ b/testkitbackend/requests.py
@@ -305,7 +305,7 @@ def ResultConsume(backend, data):
"protocolVersion":
".".join(map(str, summary.server.protocol_version)),
"agent": summary.server.agent,
- # "address": ":".join(map(str, summary.server.address))
+ "address": ":".join(map(str, summary.server.address)),
}
})
|
TestKit backend: add address field to Summary.serverInfo (#<I>)
|
py
|
diff --git a/rarfile.py b/rarfile.py
index <HASH>..<HASH> 100644
--- a/rarfile.py
+++ b/rarfile.py
@@ -94,7 +94,24 @@ from hashlib import sha1
# only needed for encryped headers
try:
- from Crypto.Cipher import AES
+ try:
+ from cryptography.hazmat.primitives.ciphers import algorithms, modes, Cipher
+ from cryptography.hazmat.backends import default_backend
+ class AES_CBC_Decrypt:
+ block_size = 16
+ def __init__(self, key, iv):
+ ciph = Cipher(algorithms.AES(key), modes.CBC(iv), default_backend())
+ self.dec = ciph.decryptor()
+ def decrypt(self, data):
+ return self.dec.update(data)
+ except ImportError:
+ from Crypto.Cipher import AES
+ class AES_CBC_Decrypt:
+ block_size = 16
+ def __init__(self, key, iv):
+ self.dec = AES.new(key, AES.MODE_CBC, iv)
+ def decrypt(self, data):
+ return self.dec.decrypt(data)
_have_crypto = 1
except ImportError:
_have_crypto = 0
@@ -1691,7 +1708,7 @@ class HeaderDecrypt:
"""File-like object that decrypts from another file"""
def __init__(self, f, key, iv):
self.f = f
- self.ciph = AES.new(key, AES.MODE_CBC, iv)
+ self.ciph = AES_CBC_Decrypt(key, iv)
self.buf = EMPTY
def tell(self):
|
Use cryptography module as preferred AES impl.
|
py
|
diff --git a/openquake/hazardlib/valid.py b/openquake/hazardlib/valid.py
index <HASH>..<HASH> 100644
--- a/openquake/hazardlib/valid.py
+++ b/openquake/hazardlib/valid.py
@@ -415,7 +415,6 @@ def coordinates(value):
[(1.1, 1.2, 0.0), (2.2, 2.3, 0.0)]
>>> coordinates('1.1 1.2 -0.4, 2.2 2.3 -0.5')
[(1.1, 1.2, -0.4), (2.2, 2.3, -0.5)]
-
>>> coordinates('0 0 0, 0 0 -1')
Traceback (most recent call last):
...
|
Remove an empty line [skip CI]
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -159,7 +159,9 @@ else:
n_cpu = 0
ext_modules_settings = cythonize(generate_extensions(ext_modules, line_trace),
- compiler_directives={'embedsignature': True, 'linetrace': line_trace},
+ compiler_directives={"embedsignature": True,
+ "linetrace": line_trace,
+ "language_level": 3},
nthreads=n_cpu)
|
FEATURE: added cython language level settings
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@ from setuptools import setup, find_packages
from setuptools import Command
import sys, os
-version = '0.1.0b'
+version = '0.1.0b2'
class PyTest(Command):
user_options = []
|
Released version <I>b2
|
py
|
diff --git a/pysat/tests/test_instrument.py b/pysat/tests/test_instrument.py
index <HASH>..<HASH> 100644
--- a/pysat/tests/test_instrument.py
+++ b/pysat/tests/test_instrument.py
@@ -194,14 +194,26 @@ class TestBasics():
assert files[-1] == dt.datetime(2009, 1, 31)
def test_download_updated_files(self, caplog):
- with caplog.at_level(logging.DEBUG, logger='pysat'):
+ with caplog.at_level(logging.INFO, logger='pysat'):
self.testInst.download_updated_files()
+ # Perform a local search
+ assert "files locally" in caplog.text
+ # New files are found
assert "that are new or updated" in caplog.text
+ # download new files
+ assert "Downloading data to" in caplog.text
+ # Update local file list
+ assert "Updating pysat file list" in caplog.text
def test_download_recent_data(self, caplog):
- with caplog.at_level(logging.DEBUG, logger='pysat'):
+ with caplog.at_level(logging.INFO, logger='pysat'):
self.testInst.download()
- assert "most recent data" in caplog.text
+ # Tells user that recent data will be downloaded
+ assert "most recent data by default" in caplog.text
+ # download new files
+ assert "Downloading data to" in caplog.text
+ # Update local file list
+ assert "Updating pysat file list" in caplog.text
# --------------------------------------------------------------------------
#
|
TST: test each part of routine
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -63,7 +63,7 @@ setup(
# ]},
#install_requires = install_requires,
#dependency_links = dependency_links,
- requires = [
+ install_requires = [
'pug-nlp',
'pug-ann',
# 'pug-invest',
|
setuptools uses install_requires NOT require or requires
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -34,7 +34,7 @@ setup(
packages=find_packages(exclude='tests'),
license='MIT',
install_requires=[
- 'click==4.1',
+ 'click==5.1',
'gitpython==1.0.1',
'invoke==0.10.1',
'semver==2.2.0',
|
Upgrade dependency click to ==<I>
|
py
|
diff --git a/tests/settings.py b/tests/settings.py
index <HASH>..<HASH> 100644
--- a/tests/settings.py
+++ b/tests/settings.py
@@ -22,6 +22,7 @@ if LIVE_TEST:
'use_mars': USE_MARS,
'bytes_to_unicode': True,
'pooling': True,
+ 'timeout': 30,
}
if 'tds_version' in os.environ:
|
adding timeout of <I> seconds for tests Make sure that tests don't run forever if there is some kind of lock condition.
|
py
|
diff --git a/GEOparse/utils.py b/GEOparse/utils.py
index <HASH>..<HASH> 100644
--- a/GEOparse/utils.py
+++ b/GEOparse/utils.py
@@ -64,7 +64,8 @@ def download_from_url(url, destination_path,
try:
fn = Downloader(
url,
- outdir=os.path.dirname(destination_path))
+ outdir=os.path.dirname(destination_path),
+ filename=os.path.basename(destination_path))
if aspera:
fn.download_aspera(
user="anonftp",
|
fix: Pass filename to downloader
|
py
|
diff --git a/pyaavso/utils.py b/pyaavso/utils.py
index <HASH>..<HASH> 100644
--- a/pyaavso/utils.py
+++ b/pyaavso/utils.py
@@ -31,10 +31,10 @@ def download_observations(observer_code):
'obs_types': 'all',
'page': page_number,
})
- parser = WebObsResultsParser(response.content)
+ parser = WebObsResultsParser(response.text)
observations.extend(parser.get_observations())
# kinda silly, but there's no need for lxml machinery here
- if '>Next</a>' not in response.content:
+ if '>Next</a>' not in response.text:
break
page_number += 1
return observations
|
Use response.text for automatic decoding.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -18,7 +18,7 @@ setup(
author="Steve Engledow",
author_email="[email protected]",
license="Apache2",
- packages=find_packages(),
+ packages=find_packages(exclude=["tests"]),
install_requires=[
"Click",
"PyYAML",
|
Update setup.py When using the servereless framework with troposphere this project is pulled since it's included in the setup.py of troposphere since version <I> and the "tests" file being included causes deployments to fail since tests is a common folder in our microservice projects.
|
py
|
diff --git a/functional/test/test_streams.py b/functional/test/test_streams.py
index <HASH>..<HASH> 100644
--- a/functional/test/test_streams.py
+++ b/functional/test/test_streams.py
@@ -3,6 +3,7 @@ from __future__ import absolute_import
import sqlite3
import unittest
import collections
+import sys
import six
@@ -10,8 +11,6 @@ from functional import seq, pseq
class TestStreams(unittest.TestCase):
-
-
def setUp(self):
self.seq = seq
@@ -270,7 +269,8 @@ class TestStreams(unittest.TestCase):
print('pandas not installed, skipping unit test')
+# Skipping tests on pypy because of https://github.com/uqfoundation/dill/issues/73
[email protected]('__pypy__' in sys.builtin_module_names, 'Skip parallel tests on pypy')
class TestParallelStreams(TestStreams):
-
def setUp(self):
self.seq = pseq
|
Ignore more pypy parallel tests
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.