diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
---|---|---|
diff --git a/discord/__main__.py b/discord/__main__.py
index <HASH>..<HASH> 100644
--- a/discord/__main__.py
+++ b/discord/__main__.py
@@ -250,8 +250,9 @@ def newcog(parser, args):
name = args.class_name
else:
name = str(directory.stem)
- if '-' in name:
- name = name.replace('-', ' ').title().replace(' ', '')
+ if '-' in name or '_' in name:
+ translation = str.maketrans('-_', ' ')
+ name = name.translate(translation).title().replace(' ', '')
else:
name = name.title()
|
Strip both - and _ from newcog class names
|
py
|
diff --git a/discord/client.py b/discord/client.py
index <HASH>..<HASH> 100644
--- a/discord/client.py
+++ b/discord/client.py
@@ -143,11 +143,12 @@ class Client:
intents: :class:`Intents`
The intents that you want to enable for the session. This is a way of
disabling and enabling certain gateway events from triggering and being sent.
+ If not given, defaults to a regularly constructed :class:`Intents` class.
.. versionadded:: 1.5
member_cache_flags: :class:`MemberCacheFlags`
Allows for finer control over how the library caches members.
- If not given, defaults to cache as much as possible is with the
+ If not given, defaults to cache as much as possible with the
currently selected intents.
.. versionadded:: 1.5
|
Some documentation fixes for MemberCacheFlags and Intents
|
py
|
diff --git a/kafka/errors.py b/kafka/errors.py
index <HASH>..<HASH> 100644
--- a/kafka/errors.py
+++ b/kafka/errors.py
@@ -87,7 +87,7 @@ class InvalidMessageError(BrokerResponseError):
class UnknownTopicOrPartitionError(BrokerResponseError):
errno = 3
- message = 'UNKNOWN_TOPIC_OR_PARTITON'
+ message = 'UNKNOWN_TOPIC_OR_PARTITION'
description = ('This request is for a topic or partition that does not'
' exist on this broker.')
invalid_metadata = True
|
tweak spelling mistake (#<I>)
|
py
|
diff --git a/salt/fileserver/gitfs.py b/salt/fileserver/gitfs.py
index <HASH>..<HASH> 100644
--- a/salt/fileserver/gitfs.py
+++ b/salt/fileserver/gitfs.py
@@ -1168,6 +1168,7 @@ def _file_lists(load, form):
'''
Return a dict containing the file lists for files and dirs
'''
+ load = copy.deepcopy(load)
if 'env' in load:
salt.utils.warn_until(
'Boron',
|
prevent gitfs.py changing load variable that is used by other fs modules too
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@ from distutils.core import setup
setup(
name = 'SimpleHTTPSServer',
packages = ['SimpleHTTPSServer'], # this must be the same as the name above
- version = '0.5',
+ version = '0.5.2',
description = 'A simple python http and https webserver',
author = 'John Andersen',
author_email = '[email protected]',
|
Update to <I> Fixed setup version
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -39,6 +39,13 @@ building of the binary extension module.
""".format(
NO_EXTENSION_ENV
)
+READTHEDOCS_ENV = "READTHEDOCS"
+ON_READTHEDOCS_MESSAGE = """\
+The {} environment variable has been detected, the binary extension module
+will not be built.
+""".format(
+ READTHEDOCS_ENV
+)
INSTALL_PREFIX_ENV = "BEZIER_INSTALL_PREFIX"
NO_INSTALL_PREFIX_MESSAGE = (
"The {} environment variable must be set."
@@ -74,6 +81,10 @@ def numpy_include_dir():
def extension_modules():
+ if os.environ.get(READTHEDOCS_ENV) == "True":
+ print(ON_READTHEDOCS_MESSAGE, file=sys.stderr)
+ return []
+
if NO_EXTENSION_ENV in os.environ:
print(NO_SPEEDUPS_MESSAGE, file=sys.stderr)
return []
|
Explicitly opting out of binary extension on read the docs.
|
py
|
diff --git a/src/pyctools/components/io/dumpmetadata.py b/src/pyctools/components/io/dumpmetadata.py
index <HASH>..<HASH> 100644
--- a/src/pyctools/components/io/dumpmetadata.py
+++ b/src/pyctools/components/io/dumpmetadata.py
@@ -37,5 +37,11 @@ class DumpMetadata(Transformer):
self.last_metadata = in_frame.metadata
print('Frame %04d' % in_frame.frame_no)
print('==========')
- print(in_frame.metadata.get('audit'))
+ indent = 0
+ for line in in_frame.metadata.get('audit').splitlines():
+ print(' ' * indent, line)
+ if '{' in line:
+ indent += 8
+ if '}' in line:
+ indent -= 8
return True
|
Indent audit wrapped in {} characters
|
py
|
diff --git a/pysnmp/smi/mibs/SNMPv2-TC.py b/pysnmp/smi/mibs/SNMPv2-TC.py
index <HASH>..<HASH> 100644
--- a/pysnmp/smi/mibs/SNMPv2-TC.py
+++ b/pysnmp/smi/mibs/SNMPv2-TC.py
@@ -267,7 +267,7 @@ class TextualConvention:
else:
return base.prettyIn(self, value)
- outputValue = octets.ints2octs()
+ outputValue = octets.str2octs('')
runningValue = value
displayHint = self.displayHint
while runningValue and displayHint:
|
fix to TextualConvention initializer
|
py
|
diff --git a/prepro/__init__.py b/prepro/__init__.py
index <HASH>..<HASH> 100644
--- a/prepro/__init__.py
+++ b/prepro/__init__.py
@@ -7,4 +7,3 @@ from id_ import ID
from definestable import DefinesTable
from macrocall import MacroCall
from args import ArgList, Arg
-from output import msg, warning, error
|
Removed unused reference to output package.
|
py
|
diff --git a/spyderlib/config/main.py b/spyderlib/config/main.py
index <HASH>..<HASH> 100644
--- a/spyderlib/config/main.py
+++ b/spyderlib/config/main.py
@@ -744,6 +744,7 @@ DEFAULTS = [
# 3. You don't need to touch this value if you're just adding a new option
CONF_VERSION = '20.0.0'
+
# XXX: Previously we had load=(not DEV) here but DEV was set to *False*.
# Check if it *really* needs to be updated or not
CONF = UserConfig('spyder', defaults=DEFAULTS, load=True, version=CONF_VERSION,
|
remove print shortcut (previous commit started this)
|
py
|
diff --git a/DataPanel.py b/DataPanel.py
index <HASH>..<HASH> 100644
--- a/DataPanel.py
+++ b/DataPanel.py
@@ -517,6 +517,7 @@ class DataPanel(Panel.Panel):
# update the selected image panel
image_panel = self.document_controller.selected_image_panel
if image_panel:
+ # this next statement will eventually end up back in this class via the data_panel_selection_changed_from_image_panel method.
image_panel.data_panel_selection = DataItem.DataItemSpecifier(self.data_item_model_controller.data_group, data_item)
def data_item_widget_key_pressed(index, key):
|
Add comment for clarification. svn r<I>
|
py
|
diff --git a/oct2py/core.py b/oct2py/core.py
index <HASH>..<HASH> 100644
--- a/oct2py/core.py
+++ b/oct2py/core.py
@@ -278,7 +278,14 @@ class Oct2Py(object):
self.logger.debug(resp)
if return_ans:
- resp = [l for l in resp.splitlines() if not l.startswith('ans =')]
+ lines = resp.splitlines()
+ resp = []
+ saw_ans = False
+ for line in reversed(lines):
+ if line.startswith('ans =') and not saw_ans:
+ saw_ans = True
+ continue
+ resp.append(line)
resp = '\n'.join(resp)
if return_both:
|
Only replace the last 'ans =' in the text
|
py
|
diff --git a/convertbng/util.py b/convertbng/util.py
index <HASH>..<HASH> 100644
--- a/convertbng/util.py
+++ b/convertbng/util.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-from ctypes import cdll, c_float, Structure, ARRAY, c_int32
+from ctypes import cdll, c_float, Structure, c_int32
from sys import platform
import os
@@ -10,11 +10,11 @@ else:
ext = "so"
__author__ = u"Stephan Hügel"
-__version__ = "0.1.8"
+__version__ = "0.1.9"
# hacky: http://stackoverflow.com/a/30789980/416626
class Int32_2(Structure):
- _fields_ = [("array", ARRAY(c_int32, 2))]
+ _fields_ = [("array", c_int32 * 2)]
# liblonlat_bng.dylib
file_path = os.path.dirname(__file__)
|
Remove deprecated ARRAY call
|
py
|
diff --git a/distutils/tests/test_install.py b/distutils/tests/test_install.py
index <HASH>..<HASH> 100644
--- a/distutils/tests/test_install.py
+++ b/distutils/tests/test_install.py
@@ -229,6 +229,7 @@ class InstallTestCase(
]
assert found == expected
+ @pytest.mark.xfail(reason="#166")
def test_record_extensions(self):
cmd = test_support.missing_compiler_executable()
if cmd is not None:
|
Mark test as xfail for now. Ref pypa/distutils#<I>.
|
py
|
diff --git a/hwt/hdl/architecture.py b/hwt/hdl/architecture.py
index <HASH>..<HASH> 100755
--- a/hwt/hdl/architecture.py
+++ b/hwt/hdl/architecture.py
@@ -6,10 +6,6 @@ class Architecture(object):
"""
def __init__(self, entity):
self.entity = entity
- if entity:
- self.entityName = entity.name
- else:
- self.entityName = None
self.name = "rtl"
self.variables = []
self.processes = []
@@ -17,10 +13,7 @@ class Architecture(object):
self.componentInstances = []
def getEntityName(self):
- if self.entity:
- return self.entity.name
- else:
- return self.entityName
+ return self.entity.name
def __repr__(self):
from hwt.serializer.vhdl.serializer import VhdlSerializer
|
entity required for architecture (was optional)
|
py
|
diff --git a/mixbox/entities.py b/mixbox/entities.py
index <HASH>..<HASH> 100644
--- a/mixbox/entities.py
+++ b/mixbox/entities.py
@@ -264,6 +264,10 @@ class Entity(object):
if ns_info:
ns_info.collect(self)
+ # null behavior for classes that inherit from Entity but do not have _binding_class
+ if not hasattr(self, "_binding_class"):
+ return None
+
entity_obj = self._binding_class()
for field, val in six.iteritems(self._fields):
|
Check for `binding_class` before serializing
|
py
|
diff --git a/thinc/tests/unit/test_model.py b/thinc/tests/unit/test_model.py
index <HASH>..<HASH> 100644
--- a/thinc/tests/unit/test_model.py
+++ b/thinc/tests/unit/test_model.py
@@ -1,5 +1,10 @@
# encoding: utf8
from __future__ import unicode_literals
+
+import tempfile
+
+import os
+
import pytest
from hypothesis import given, strategies
@@ -176,3 +181,18 @@ def test_all_operators(op):
with pytest.raises(TypeError):
value = m1 | m2
assert base.Model._operators == {}
+
+
+def test_model_can_save_to_disk():
+ temp_file = os.path.join(tempfile.mkdtemp(), 'serialized_thinc_model_test')
+ with base.Model.define_operators({'+': lambda a, b: a}):
+ m = base.Model()
+ m.to_disk(temp_file)
+
+def test_model_can_load_from_disk():
+ temp_file = os.path.join(tempfile.mkdtemp(), 'serialized_thinc_model_test')
+ with base.Model.define_operators({'+': lambda a, b: a}):
+ m = base.Model()
+ m.to_disk(temp_file)
+ m2 = m.from_disk(temp_file)
+ assert m.to_bytes() == m2.to_bytes()
|
Add failing tests to show to_disk exploding with python3
|
py
|
diff --git a/plans/admin.py b/plans/admin.py
index <HASH>..<HASH> 100644
--- a/plans/admin.py
+++ b/plans/admin.py
@@ -49,7 +49,7 @@ def copy_plan(modeladmin, request, queryset):
copy_plan.short_description = _("Make plan copy")
class PlanAdmin(OrderedModelAdmin):
- search_fields = ('customized__username', 'customized__email', )
+ search_fields = ('name', 'customized__username', 'customized__email', )
list_filter = ( 'available', )
list_display = ('name', 'description', 'customized', 'default', 'available', 'created', 'move_up_down_links')
inlines = (PlanPricingInline, PlanQuotaInline)
@@ -104,8 +104,8 @@ class InvoiceAdmin(admin.ModelAdmin):
raw_id_fields = ('user', 'order')
class UserPlanAdmin(UserLinkMixin, admin.ModelAdmin):
- list_filter = ('active', 'expire')
- search_fields = ('user__username', 'user__email')
+ list_filter = ('active', 'expire', 'plan__name')
+ search_fields = ('user__username', 'user__email', 'plan__name')
list_display = ('user', 'plan', 'expire', 'active')
list_select_related = True
readonly_fields = ['user_link', ]
|
Providing more search fields for admin.
|
py
|
diff --git a/stellar_sdk/client/requests_client.py b/stellar_sdk/client/requests_client.py
index <HASH>..<HASH> 100644
--- a/stellar_sdk/client/requests_client.py
+++ b/stellar_sdk/client/requests_client.py
@@ -67,7 +67,7 @@ class RequestsClient(BaseSyncClient):
backoff_factor=self.backoff_factor,
redirect=0,
status_forcelist=self.status_forcelist,
- method_whitelist=frozenset(["GET", "POST"]),
+ allowed_methods=frozenset(["GET", "POST"]),
raise_on_status=False,
)
# init transport adapter
|
refactor: update the parameters used in Retry. (#<I>)
|
py
|
diff --git a/webview/__init__.py b/webview/__init__.py
index <HASH>..<HASH> 100755
--- a/webview/__init__.py
+++ b/webview/__init__.py
@@ -26,6 +26,20 @@ from .localization import localization as original_localization
from .wsgi import Routing, StaticFiles, StaticResources
+__all__ = (
+ # Stuff that's here
+ 'start', 'create_window',
+ # From wsgi
+ 'Routing', 'StaticFiles', 'StaticResources',
+ # From event
+ 'Event',
+ # from util
+ '_token', 'base_uri', 'parse_file_type', 'escape_string', 'make_unicode',
+ 'escape_line_breaks', 'WebViewException',
+ # from window
+ 'Window',
+)
+
logger = logging.getLogger('pywebview')
handler = logging.StreamHandler()
formatter = logging.Formatter('[pywebview] %(message)s')
|
Add __all__ to webview
|
py
|
diff --git a/tests/test_shorteners.py b/tests/test_shorteners.py
index <HASH>..<HASH> 100644
--- a/tests/test_shorteners.py
+++ b/tests/test_shorteners.py
@@ -214,3 +214,4 @@ class ShortenersTest(unittest.TestCase):
def test_none_qrcode(self):
shortener = Shortener('TinyurlShortener')
self.assertIsNone(shortener.qrcode())
+
|
Work around the fact that git doesn't register adding a newline at the end of the file
|
py
|
diff --git a/spyderlib/widgets/fileswitcher.py b/spyderlib/widgets/fileswitcher.py
index <HASH>..<HASH> 100644
--- a/spyderlib/widgets/fileswitcher.py
+++ b/spyderlib/widgets/fileswitcher.py
@@ -541,16 +541,14 @@ class FileSwitcher(QDialog):
results.append((score_value, line, text, rich_text,
fold_level, icons[index], token))
- template_1 = '<code>{0}<big>{1} {2}</big></code>'
- template_2 = '<br><code>{0}</code><i>[Line {1}]</i>'
+ template_1 = '<code>{0}<big>{1}</big></code>'
for (score, line, text, rich_text, fold_level, icon,
token) in sorted(results):
fold_space = ' '*(fold_level)
line_number = line + 1
self.filtered_symbol_lines.append(line_number)
- textline = template_1.format(fold_space, token, rich_text)
- textline += template_2.format(fold_space, line_number)
+ textline = template_1.format(fold_space, rich_text)
item = QListWidgetItem(icon, textline)
item.setSizeHint(QSize(0, 16))
self.list.addItem(item)
|
File switcher: Don't show line number and token for symbols
|
py
|
diff --git a/stanfordnlp/pipeline/pos_processor.py b/stanfordnlp/pipeline/pos_processor.py
index <HASH>..<HASH> 100644
--- a/stanfordnlp/pipeline/pos_processor.py
+++ b/stanfordnlp/pipeline/pos_processor.py
@@ -70,14 +70,4 @@ class POSProcessor:
preds += self.trainer.predict(b)
batch.conll.set(['upos', 'xpos', 'feats'], [y for x in preds for y in x])
- def write_conll(self, batch):
- """ Write current conll contents to file.
- """
- return_string = ""
- for sent in batch.conll.sents:
- for ln in sent:
- return_string += ("\t".join(ln))
- return_string += "\n"
- return_string += "\n"
- return return_string
|
removed copied code from pos_processor
|
py
|
diff --git a/plex/__init__.py b/plex/__init__.py
index <HASH>..<HASH> 100644
--- a/plex/__init__.py
+++ b/plex/__init__.py
@@ -7,5 +7,5 @@ __version__ = '0.6.2'
try:
from plex.client import Plex
-except Exception, ex:
+except Exception as ex:
log.warn('Unable to import submodules - %s', ex)
|
Updated "except" statement for python 3+
|
py
|
diff --git a/django_socketio/example_project/settings.py b/django_socketio/example_project/settings.py
index <HASH>..<HASH> 100644
--- a/django_socketio/example_project/settings.py
+++ b/django_socketio/example_project/settings.py
@@ -38,7 +38,7 @@ MIDDLEWARE_CLASSES = (
STATIC_URL = "/static/"
ROOT_URLCONF = "urls"
TEMPLATE_DIRS = full_path("templates")
-LOGIN_URL = "/admin/"
+#LOGIN_URL = "/admin/"
INSTALLED_APPS = (
'django.contrib.admin',
|
Fix tests for Django <I>
|
py
|
diff --git a/steam/steamid.py b/steam/steamid.py
index <HASH>..<HASH> 100644
--- a/steam/steamid.py
+++ b/steam/steamid.py
@@ -57,6 +57,9 @@ class SteamID(intBase):
def __init__(self, *args, **kwargs):
pass
+ def __str__(self):
+ return str(int(self))
+
def __repr__(self):
return "%s(id=%s, type=%s, universe=%s, instance=%s)" % (
self.__class__.__name__,
|
steamid: define __str__ as py<I> maps it __repr__
|
py
|
diff --git a/petrel/setup.py b/petrel/setup.py
index <HASH>..<HASH> 100644
--- a/petrel/setup.py
+++ b/petrel/setup.py
@@ -13,7 +13,7 @@ long_description = open(README).read() + '\n\n'
PACKAGE = "petrel"
-PETREL_VERSION = '0.2'
+PETREL_VERSION = '0.3'
def get_storm_version():
version = subprocess.check_output(['storm', 'version']).strip()
|
Change version number to <I> after addition of is_tick_tuple() function
|
py
|
diff --git a/src/wormhole/cli/public_relay.py b/src/wormhole/cli/public_relay.py
index <HASH>..<HASH> 100644
--- a/src/wormhole/cli/public_relay.py
+++ b/src/wormhole/cli/public_relay.py
@@ -1,5 +1,5 @@
# This is a relay I run on a personal server. If it gets too expensive to
# run, I'll shut it down.
-RENDEZVOUS_RELAY = u"http://wormhole-relay.petmail.org:3000/wormhole-relay/"
-TRANSIT_RELAY = u"tcp:wormhole-transit-relay.petmail.org:3001"
+RENDEZVOUS_RELAY = u"ws://wormhole-relay.petmail.org:4000/"
+TRANSIT_RELAY = u"tcp:wormhole-transit-relay.petmail.org:4001"
|
use new relay URL, for new protocol
|
py
|
diff --git a/pingouin/distribution.py b/pingouin/distribution.py
index <HASH>..<HASH> 100644
--- a/pingouin/distribution.py
+++ b/pingouin/distribution.py
@@ -505,6 +505,10 @@ def epsilon(data, correction='gg'):
n = data.shape[0]
k = data.shape[1]
+ # Epsilon is always 1 with only two repeated measures.
+ if k <= 2:
+ return 1.
+
# Degrees of freedom
if S.columns.nlevels == 1:
dof = k - 1
@@ -655,6 +659,10 @@ def sphericity(data, method='mauchly', alpha=.05):
n = data.shape[0]
k = data.shape[1]
+ # Sphericity is always met with only two repeated measures.
+ if k <= 1:
+ return True, np.nan, np.nan, 1, 1.
+
# Degrees of freedom
if S.columns.nlevels == 1:
d = k - 1
|
Better handling of two measurements in epsilon and sphericity
|
py
|
diff --git a/mt940/models.py b/mt940/models.py
index <HASH>..<HASH> 100644
--- a/mt940/models.py
+++ b/mt940/models.py
@@ -228,7 +228,7 @@ class Transactions(collections.Sequence):
# Combine multiple results together as one string, Rabobank has
# multiple :86: tags for a single transaction
for k, v in _compat.iteritems(result):
- if k in transaction.data:
+ if k in transaction.data and hasattr(v, 'strip'):
transaction.data[k] += '\n%s' % v.strip()
else:
transaction.data[k] = v
|
fixed issue with transaction scoped non-string objects
|
py
|
diff --git a/unbabel/xliff_converter.py b/unbabel/xliff_converter.py
index <HASH>..<HASH> 100644
--- a/unbabel/xliff_converter.py
+++ b/unbabel/xliff_converter.py
@@ -57,7 +57,7 @@ def create_trans_unit(key, value):
'''%(key,value)
def get_dictionary_from_xliff(xliff_text,side="target"):
- soup = BeautifulSoup(xliff_text)
+ soup = BeautifulSoup(xliff_text, "html.parser")
trans_units = soup.find_all("trans-unit")
result_dic = {}
for trans_unit in trans_units:
|
Fixes the BeautifulSoup call. Explicitly defines html.parser as the parser to avoid BeautifulSoup warnings when using this function.
|
py
|
diff --git a/video_encoding/files.py b/video_encoding/files.py
index <HASH>..<HASH> 100644
--- a/video_encoding/files.py
+++ b/video_encoding/files.py
@@ -41,9 +41,31 @@ class VideoFile(File):
"""
if not hasattr(self, '_info_cache'):
encoding_backend = get_backend()
- try:
- path = os.path.abspath(self.path)
- except AttributeError:
- path = os.path.abspath(self.name)
- self._info_cache = encoding_backend.get_media_info(path)
+
+ if hasattr(self, 'file'):
+ # Its an actual file
+ try:
+ path = os.path.abspath(self.path)
+ except AttributeError:
+ path = os.path.abspath(self.name)
+
+ info_cache = encoding_backend.get_media_info(path)
+ else:
+ # Its not an actual file, so assume storage abstraction
+ storage_path = getattr(self, 'path', self.name)
+ if not hasattr(self, 'storage'):
+ raise Exception('VideoFile uses storages yet has no self.storage')
+
+ storage = self.storage
+
+ try:
+ # If its a storage with file system implementation
+ storage_local_path = storage.path(storage_path)
+ except NotImplementedError:
+ storage_local_path = storage.url(storage_path)
+
+ info_cache = encoding_backend.get_media_info(storage_local_path)
+
+ self._info_cache = info_cache
+
return self._info_cache
|
:construction: first attempt at using storages
|
py
|
diff --git a/mpu/aws.py b/mpu/aws.py
index <HASH>..<HASH> 100644
--- a/mpu/aws.py
+++ b/mpu/aws.py
@@ -13,7 +13,7 @@ from tempfile import mkstemp
import boto3
-def list_files(bucket, prefix=None, profile_name=None):
+def list_files(bucket, prefix="", profile_name=None):
"""
List up to 1000 files in a bucket.
|
BUG: Default the prefix of aws.list_files is a string
|
py
|
diff --git a/smartcard/scard/__init__.py b/smartcard/scard/__init__.py
index <HASH>..<HASH> 100644
--- a/smartcard/scard/__init__.py
+++ b/smartcard/scard/__init__.py
@@ -1,4 +1 @@
-try:
- from scard import *
-except:
- from smartcard.scard._scard import *
+from smartcard.scard.scard import *
|
Make import of smartcard.scard predictable & fix a pylint bare-except warning Importing smartcard.pyscard should either succeed in using the generated scard.py wrapper, or fail. Silently importing _scard.so in place of the wrapper risks creating hard-to-diagnose heisenbugs.
|
py
|
diff --git a/annotypes/_anno.py b/annotypes/_anno.py
index <HASH>..<HASH> 100644
--- a/annotypes/_anno.py
+++ b/annotypes/_anno.py
@@ -61,8 +61,8 @@ def make_repr(inst, attrs):
class Anno(object):
- def __init__(self, description, typ=None, name=None):
- # type: (str, type, str) -> None
+ def __init__(self, description, typ=None, name=None, default=NO_DEFAULT):
+ # type: (str, type, str, Any) -> None
"""Annotate a type with run-time accessible metadata
Args:
@@ -71,7 +71,7 @@ class Anno(object):
name: The name of the Anno, can also be set via context manager
"""
self._names_on_enter = None # type: Set[str]
- self.default = NO_DEFAULT # type: Any
+ self.default = default # type: Any
self.typ = typ # type: Union[Tuple[type], type]
self.name = name # type: str
self.is_array = None # type: bool
|
Allow Anno.default to be set in constructor
|
py
|
diff --git a/uncompyle6/parser.py b/uncompyle6/parser.py
index <HASH>..<HASH> 100644
--- a/uncompyle6/parser.py
+++ b/uncompyle6/parser.py
@@ -209,7 +209,7 @@ class PythonParser(GenericASTBuilder):
print("%s%s" % (indent, instructions[i]))
raise ParserError(err_token, err_token.offset, self.debug["reduce"])
else:
- raise ParserError(None, -1)
+ raise ParserError(None, -1, self.debug["reduce"])
def get_pos_kw(self, token):
"""Return then the number of positional parameters and
|
Fix one more call to ParseError
|
py
|
diff --git a/master/buildbot/buildslave.py b/master/buildbot/buildslave.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/buildslave.py
+++ b/master/buildbot/buildslave.py
@@ -450,9 +450,9 @@ class AbstractBuildSlave(pb.Avatar, service.MultiService):
def sendBuilderList(self):
our_builders = self.botmaster.getBuildersForSlave(self.slavename)
blist = [(b.name, b.slavebuilddir) for b in our_builders]
-# if blist == self._old_builder_list:
-# log.msg("Builder list is unchanged; not calling setBuilderList")
-# return defer.succeed(None)
+ if blist == self._old_builder_list:
+ log.msg("Builder list is unchanged; not calling setBuilderList")
+ return defer.succeed(None)
d = self.slave.callRemote("setBuilderList", blist)
def sentBuilderList(ign):
|
re-activate <I>dd<I>aaee3c8b<I>d5a0cb<I>e9fa<I>bbf This was accidentally commented out in e1a<I>cb<I>ccfe5ac<I>aea<I>fd1e<I>fd2e<I>
|
py
|
diff --git a/twitcher/owsregistry.py b/twitcher/owsregistry.py
index <HASH>..<HASH> 100644
--- a/twitcher/owsregistry.py
+++ b/twitcher/owsregistry.py
@@ -95,4 +95,11 @@ def includeme(config):
def owsregistry(request):
adapter = get_adapter_factory(request)
return adapter.owsregistry_factory(request)
- config.add_request_method(owsregistry, reify=True)
+
+ # In case the adapter employs caching or other per-request/session dependent transaction details, we must ensure
+ # to regenerate the owsregistry object each time since the service-store it provides (amongst other things),
+ # only initializes the request once instead of per-request method calls.
+ # For example, 'request.owsregistry.get_service_by_name' will call 'ServiceStore.fetch_by_name' with
+ # the 'ServiceStore' initialized and stored with the first ever request (if reify=True). All following service
+ # operations would employ the stored database session contained within this request.
+ config.add_request_method(owsregistry, reify=False, property=True)
|
recompute OWSRegistry on demand to avoid caching issues with db-session
|
py
|
diff --git a/core.py b/core.py
index <HASH>..<HASH> 100644
--- a/core.py
+++ b/core.py
@@ -232,6 +232,9 @@ class Orchestrator(Module):
super(Orchestrator, self).__init__(settings, logger)
self.on_playing_changed = Event()
self.lock = threading.Lock()
+ self.playing_media = None
+ self.satisfied_request = None
+ self.player.endTime = None
def get_playing(self):
with self.lock:
return (self.playing_media,
@@ -244,7 +247,8 @@ class Orchestrator(Module):
def run(self):
self.running = True
while self.running:
- with self.lock:
+ self.lock.acquire()
+ try:
if not self.running: break
req = None
try:
@@ -256,10 +260,14 @@ class Orchestrator(Module):
media = self.randomQueue.shift(
).media
except EmptyQueueException:
+ self.lock.release()
self.wait_for_media()
+ self.lock.acquire()
continue
self.playing_media = media
self.satisfied_request = req
+ finally:
+ self.lock.release()
startTime = datetime.datetime.now()
self.on_playing_changed()
self.player.play(media)
|
core: orchestrator: don't keel lock while waiting on media
|
py
|
diff --git a/pylint/checkers/typecheck.py b/pylint/checkers/typecheck.py
index <HASH>..<HASH> 100644
--- a/pylint/checkers/typecheck.py
+++ b/pylint/checkers/typecheck.py
@@ -487,7 +487,7 @@ accessed. Python regular expressions are accepted.'}
if not isinstance(arg, astroid.Starred):
continue
- inferred = next(arg.value.infer())
+ inferred = safe_infer(arg.value)
if isinstance(inferred, astroid.Tuple):
length = len(inferred.elts)
elif isinstance(inferred, astroid.Dict):
|
Use safe_infer when inferring a Starred node's value.
|
py
|
diff --git a/two_factor/urls.py b/two_factor/urls.py
index <HASH>..<HASH> 100644
--- a/two_factor/urls.py
+++ b/two_factor/urls.py
@@ -17,7 +17,7 @@ core = [
name='setup',
),
url(
- regex=r'^account/two_factor/qrcode$',
+ regex=r'^account/two_factor/qrcode/$',
view=QRGeneratorView.as_view(),
name='qr',
),
|
Added missing trailing slash
|
py
|
diff --git a/internals/transitions.py b/internals/transitions.py
index <HASH>..<HASH> 100644
--- a/internals/transitions.py
+++ b/internals/transitions.py
@@ -86,7 +86,7 @@ class dir_concentration_parameter(dp_concentration_parameter):
super(dir_concentration_parameter,self).resample()
else:
if reweights is None:
- reweights = np.ones(rolldata.shape[1])
+ reweights = np.ones(rolldata.shape[1])/rolldata.shape[1]
for itr in range(niter):
# same m sampling code as below, should be reused
# this splits atoms into tables
@@ -94,7 +94,7 @@ class dir_concentration_parameter(dp_concentration_parameter):
for (rowidx,colidx), val in np.ndenumerate(rolldata):
n = 0.
for i in range(val):
- m[rowidx,colidx] += random() < self.concentration / rolldata.shape[1] * reweights[colidx] / (n + self.concentration / rolldata.shape[1] * reweights[colidx])
+ m[rowidx,colidx] += random() < self.concentration * reweights[colidx] / (n + self.concentration * reweights[colidx])
n += 1.
super(dir_concentration_parameter,self).resample(sample_numbers=rolldata.sum(1),total_num_distinct=m.sum(),niter=20)
|
fixed a minor bug where the /L in Dir(alpha/L) was in the wrong place in the concentration parameter resampling code
|
py
|
diff --git a/omego/upgrade.py b/omego/upgrade.py
index <HASH>..<HASH> 100644
--- a/omego/upgrade.py
+++ b/omego/upgrade.py
@@ -316,7 +316,7 @@ class WindowsInstall(Install):
except AttributeError:
with tempfile.NamedTemporaryFile(dir=targetdir) as test:
return os.path.exists(
- os.path.join(link), os.path.basename(test.name))
+ os.path.join(link, os.path.basename(test.name)))
# Symlinks are a bit more complicated on Windows:
# - You must have (elevated) administrator privileges
|
Fix typo in constructing temp file name
|
py
|
diff --git a/ddmrp/models/stock_buffer.py b/ddmrp/models/stock_buffer.py
index <HASH>..<HASH> 100644
--- a/ddmrp/models/stock_buffer.py
+++ b/ddmrp/models/stock_buffer.py
@@ -1097,6 +1097,7 @@ class StockBuffer(models.Model):
string="Replenishment Location",
comodel_name="stock.location",
readonly=True,
+ index=True,
help="Source location from where goods will be replenished. "
"Computed when buffer is refreshed from following the Stock Rules.",
)
|
ddmrp: add index on 'distributed_source_location_id' The filter on 'distributed_source_location_qty' is using a search method querying the 'distributed_source_location_id' field having no index.
|
py
|
diff --git a/proliantutils/tests/ilo/test_ris.py b/proliantutils/tests/ilo/test_ris.py
index <HASH>..<HASH> 100755
--- a/proliantutils/tests/ilo/test_ris.py
+++ b/proliantutils/tests/ilo/test_ris.py
@@ -800,7 +800,7 @@ class IloRisTestCase(testtools.TestCase):
_uefi_boot_mode_mock.return_value = False
ret = self.client.get_persistent_boot_device()
get_host_details_mock.assert_called_once_with()
- self.assertEqual(ret, None)
+ self.assertIsNone(ret)
@mock.patch.object(ris.RISOperations, '_get_persistent_boot_devices')
@mock.patch.object(ris.RISOperations, '_is_boot_mode_uefi')
|
Using assertIsNone() instead of assertEqual(None) Instead of using assertEqual(None, ***), developers should use assertIsNone(***). Change-Id: If<I>da3acc<I>fd5e<I>b7d<I>e4f9aebeab<I>e7
|
py
|
diff --git a/dju_common/__init__.py b/dju_common/__init__.py
index <HASH>..<HASH> 100644
--- a/dju_common/__init__.py
+++ b/dju_common/__init__.py
@@ -1 +1 @@
-__version__ = '0.3.1'
+__version__ = '0.3.2'
|
Update makemessages commans. Release <I>
|
py
|
diff --git a/wright/stage/c.py b/wright/stage/c.py
index <HASH>..<HASH> 100644
--- a/wright/stage/c.py
+++ b/wright/stage/c.py
@@ -62,7 +62,7 @@ int main() {
def __call__(self, name, headers=()):
source = self.source % (name,)
for header in headers:
- source = '#include <{}>\n'.format(header)
+ source = '#include <{}>\n'.format(header) + source
with TempFile('define', '.c', content=source) as temp:
return super(CheckDefine, self).__call__(temp.filename, run=True)
|
c: pass headers in CheckDefine
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@
from os.path import exists
from setuptools import setup
-import debas
+
setup(name='Satyr',
version='0.1',
@@ -16,6 +16,7 @@ setup(name='Satyr',
long_description=(open('README.rst').read() if exists('README.rst')
else ''),
install_requires=[],
+ extras_require={'mesos': ['mesos.native']},
setup_requires=['pytest-runner'],
tests_require=['pytest'],
zip_safe=False)
|
added mesos.native to extras
|
py
|
diff --git a/sigal/__init__.py b/sigal/__init__.py
index <HASH>..<HASH> 100644
--- a/sigal/__init__.py
+++ b/sigal/__init__.py
@@ -59,7 +59,7 @@ def init_logging(level=logging.INFO):
logger.setLevel(level)
if level == logging.DEBUG:
- formatter = Formatter('%(asctime)s - %(levelname)s - %(message)s')
+ formatter = Formatter('%(levelname)s - %(message)s')
else:
formatter = Formatter('%(message)s')
|
Logging - don't show the timae.
|
py
|
diff --git a/centinel/primitives/tcpdump.py b/centinel/primitives/tcpdump.py
index <HASH>..<HASH> 100644
--- a/centinel/primitives/tcpdump.py
+++ b/centinel/primitives/tcpdump.py
@@ -31,7 +31,7 @@ class Tcpdump():
# use the centinel configured tcpdump options if available
# (if not specified by the user, this will be -i any, so
# the same as below
- if hasattr(centinel.conf['experiments'], 'tcpdump_params'):
+ if 'tcpdump_params' in centinel.conf['experiments']:
pcap_args = centinel.conf['experiments']['tcpdump_params']
# for backwards compatability, ensure that we give some
# pcap args for what to capture
|
fixed tcpdump bug where doing the wrong check for a config param
|
py
|
diff --git a/spyder/widgets/variableexplorer/utils.py b/spyder/widgets/variableexplorer/utils.py
index <HASH>..<HASH> 100644
--- a/spyder/widgets/variableexplorer/utils.py
+++ b/spyder/widgets/variableexplorer/utils.py
@@ -82,9 +82,9 @@ def get_numpy_dtype(obj):
# Pandas support
#==============================================================================
if programs.is_module_installed('pandas', PANDAS_REQVER):
- from pandas import DataFrame, Series
+ from pandas import DataFrame, DatetimeIndex, Series
else:
- DataFrame = Series = FakeObject # analysis:ignore
+ DataFrame = DatetimeIndex = Series = FakeObject # analysis:ignore
#==============================================================================
@@ -201,7 +201,8 @@ COLORS = {
MaskedArray,
matrix,
DataFrame,
- Series): ARRAY_COLOR,
+ Series,
+ DatetimeIndex): ARRAY_COLOR,
Image: "#008000",
datetime.date: "#808000",
}
|
Variable explorer: Add Panda's DatetimeIndex as supported type
|
py
|
diff --git a/gimmemotifs/config.py b/gimmemotifs/config.py
index <HASH>..<HASH> 100644
--- a/gimmemotifs/config.py
+++ b/gimmemotifs/config.py
@@ -57,7 +57,10 @@ class MotifConfig:
self.config.set("params", k, v)
def get_default_params(self):
- return dict(self.config.items("params"))
+ d = dict(self.config.items("params"))
+ for k in ["use_strand", "use_cache"]:
+ d[k] = self.config.getboolean("params", k)
+ return d
def get_seqlogo(self):
try:
|
fixed issue with boolean in config file
|
py
|
diff --git a/tools/vcrpy/setup.py b/tools/vcrpy/setup.py
index <HASH>..<HASH> 100644
--- a/tools/vcrpy/setup.py
+++ b/tools/vcrpy/setup.py
@@ -24,7 +24,7 @@ class PyTest(TestCommand):
install_requires = [
"PyYAML",
- "wrapt",
+ "wrapt<=1.12.1",
"six>=1.5",
'contextlib2; python_version=="2.7"',
'mock; python_version=="2.7"',
|
Pin wrapt version to pass on Windows with Python <I>. (#<I>)
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -32,7 +32,8 @@ setup(
]
},
install_requires=[
- 'tornado',
+ 'tornado;python_version>"2.7"',
+ 'tornado==5.1.1;python_version=="2.7"',
'six',
],
license='BSD',
|
install older version of tornado for <I>
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -24,17 +24,18 @@ setup(
packages=find_packages(),
install_requires=['Django>=1.8.2', 'cryptography>=0.9'],
classifiers=[
- 'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
- 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
- 'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: Implementation :: CPython',
+ 'Programming Language :: Python :: Implementation :: PyPy',
'Framework :: Django',
],
zip_safe=False,
|
Update Trove classifiers in setup.py.
|
py
|
diff --git a/openquake/hazardlib/tests/gsim/check_gsim.py b/openquake/hazardlib/tests/gsim/check_gsim.py
index <HASH>..<HASH> 100755
--- a/openquake/hazardlib/tests/gsim/check_gsim.py
+++ b/openquake/hazardlib/tests/gsim/check_gsim.py
@@ -103,11 +103,6 @@ def check_gsim(gsim_cls, datafile, max_discrep_percentage, debug=False):
discrep_percentage = numpy.abs(
result / expected_result * 100 - 100)
discrepancies.extend(discrep_percentage)
- print(imt, rctx.mag)
- comps = numpy.column_stack([dctx.rrup, sctx.vs30, result,
- expected_result, discrep_percentage])
- for row in comps:
- print("%.1f %.1f %.6e %.6e %.4f" % tuple(row))
errors += (discrep_percentage > max_discrep_percentage).sum()
if errors and debug:
|
Removes dropped in print statements from check_gsim Former-commit-id: <I>b<I>e4fcfb6f<I>a<I>ac0b8e8be2f<I>c
|
py
|
diff --git a/holoviews/core/options.py b/holoviews/core/options.py
index <HASH>..<HASH> 100644
--- a/holoviews/core/options.py
+++ b/holoviews/core/options.py
@@ -126,6 +126,18 @@ class abbreviated_exception(object):
raise AbbreviatedException(etype, value, traceback)
+@contextmanager
+def options_policy(skip_invalid, warn_on_skip):
+ """
+ Context manager to temporarily set the skip_invalid and warn_on_skip
+ class parameters on Options.
+ """
+ settings = (Options.skip_invalid, Options.warn_on_skip)
+ (Options.skip_invalid, Options.warn_on_skip) = (skip_invalid, warn_on_skip)
+ yield
+ (Options.skip_invalid, Options.warn_on_skip) = settings
+
+
class Keywords(param.Parameterized):
"""
A keywords objects represents a set of Python keywords. It is
|
Added the options_policy context manager
|
py
|
diff --git a/telluric/util/local_tile_server.py b/telluric/util/local_tile_server.py
index <HASH>..<HASH> 100644
--- a/telluric/util/local_tile_server.py
+++ b/telluric/util/local_tile_server.py
@@ -44,7 +44,7 @@ class TileServerHandler(tornado.web.RequestHandler):
if isinstance(obj.obj, tl.GeoFeature) and obj.obj.has_raster:
tile = yield self._get_raster_png_tile(obj.obj.raster(), x, y, z)
elif isinstance(obj.obj, BaseCollection):
- tile = yield self._get_collection_png_tile(obj.obj, x, y, z)
+ tile = yield self._get_collection_png_tile(obj.obj(), x, y, z)
if tile:
self.set_header("Content-type", "image/png")
|
raster is am method of GeoFeature
|
py
|
diff --git a/master/buildbot/changes/gerritchangesource.py b/master/buildbot/changes/gerritchangesource.py
index <HASH>..<HASH> 100644
--- a/master/buildbot/changes/gerritchangesource.py
+++ b/master/buildbot/changes/gerritchangesource.py
@@ -102,12 +102,12 @@ class GerritChangeSource(base.ChangeSource):
return defer.succeed(None)
# flatten the event dictionary, for easy access with WithProperties
- def flatten(event, base, d):
- for k, v in d.items():
+ def flatten(properties, base, event):
+ for k, v in event.items():
if type(v) == dict:
- flatten(event, base + "." + k, v)
+ flatten(properties, base + "." + k, v)
else: # already there
- event[base + "." + k] = v
+ properties[base + "." + k] = v
properties = {}
flatten(properties, "event", event)
|
don't use 'd' for something that's not a dictionary
|
py
|
diff --git a/nodeconductor/iaas/backend/openstack.py b/nodeconductor/iaas/backend/openstack.py
index <HASH>..<HASH> 100644
--- a/nodeconductor/iaas/backend/openstack.py
+++ b/nodeconductor/iaas/backend/openstack.py
@@ -277,9 +277,11 @@ class OpenStackBackend(object):
nova_quotas[nova_quota_mapping[quota_name][0]] = quota_backend_mapping(quota_value)
continue
+ if len(cinder_quotas) == 0 and len(nova_quotas) == 0:
+ return
+
try:
session = self.create_tenant_session(membership)
-
try:
if len(cinder_quotas) > 0:
cinder = self.create_cinder_client(session)
|
Exit early if no quota updates are required - NC-<I>
|
py
|
diff --git a/airflow/providers/exasol/hooks/exasol.py b/airflow/providers/exasol/hooks/exasol.py
index <HASH>..<HASH> 100644
--- a/airflow/providers/exasol/hooks/exasol.py
+++ b/airflow/providers/exasol/hooks/exasol.py
@@ -20,7 +20,6 @@ from contextlib import closing
from typing import Union, Optional, List, Tuple, Any
import pyexasol
-from past.builtins import basestring
from pyexasol import ExaConnection
from airflow.hooks.dbapi_hook import DbApiHook
@@ -122,7 +121,7 @@ class ExasolHook(DbApiHook):
:param parameters: The parameters to render the SQL query with.
:type parameters: dict or iterable
"""
- if isinstance(sql, basestring):
+ if isinstance(sql, str):
sql = [sql]
with closing(self.get_conn()) as conn:
|
Replaced basestring with str in the Exasol hook (#<I>)
|
py
|
diff --git a/cqlengine/query.py b/cqlengine/query.py
index <HASH>..<HASH> 100644
--- a/cqlengine/query.py
+++ b/cqlengine/query.py
@@ -1,6 +1,9 @@
import copy
from datetime import datetime
from uuid import uuid4
+from hashlib import md5
+from time import time
+from uuid import uuid1
from cqlengine import BaseContainerColumn, BaseValueManager, Map, columns
from cqlengine.connection import connection_manager, execute, RowResult
@@ -545,8 +548,8 @@ class AbstractQuerySet(object):
qs = ' '.join(qs)
- cur = execute(qs, self._where_values())
- return cur.fetchone()[0]
+ result = execute(qs, self._where_values(), row_factory=decoder.tuple_factory)
+ return result[0][0]
else:
return len(self._result_cache)
|
fixing count method Conflicts: cqlengine/query.py
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -15,6 +15,7 @@ from setuptools.command import easy_install
install_requires = [
'click>=6.7',
'pip>=9.0.3',
+ 'setuptools',
]
extras_require = {
|
Add setuptools as a dependency. If we try to `pip install` something that doesn't have a wheel uploaded, pip will try to `setup.py install` it, which fails on systems that don't already have setuptools installed.
|
py
|
diff --git a/polyaxon/sso/wizard.py b/polyaxon/sso/wizard.py
index <HASH>..<HASH> 100644
--- a/polyaxon/sso/wizard.py
+++ b/polyaxon/sso/wizard.py
@@ -24,7 +24,7 @@ class IdentityWizard(Wizard):
manager = providers.default_manager
def redirect_url(self, request):
- associate_url = reverse('sso:create_identity', args=['github'])
+ associate_url = reverse('sso:create_identity', args=[self.provider.key])
# Use configured redirect_url if specified for the pipeline if available
associate_url = self.config.get('redirect_url', associate_url)
@@ -63,7 +63,8 @@ class IdentityWizard(Wizard):
username=identity['username'],
first_name=identity['first_name'],
last_name=identity['last_name'],
- password='github.{}'.format(uuid.uuid4().hex) # Generate a random password
+ password='{}.{}'.format(
+ self.provider.key, uuid.uuid4().hex) # Generate a random password
)
def finish_wizard(self):
|
Generate passoword based on provider's name
|
py
|
diff --git a/scripts/pricefeeds/config-example.py b/scripts/pricefeeds/config-example.py
index <HASH>..<HASH> 100644
--- a/scripts/pricefeeds/config-example.py
+++ b/scripts/pricefeeds/config-example.py
@@ -137,7 +137,7 @@ feedSources["huobi"] = feedsources.Huobi(allowFailure=True)
# Default: "latest" # Will fetch prices from exchanges and publish it
################################################################################
blame = "latest"
-blame = "1428190"
+#blame = "1428190"
################################################################################
## Git revision for storage in blame files
|
[Price Feed] Committee Recomended settings 2
|
py
|
diff --git a/src/_pytest/terminal.py b/src/_pytest/terminal.py
index <HASH>..<HASH> 100644
--- a/src/_pytest/terminal.py
+++ b/src/_pytest/terminal.py
@@ -849,11 +849,8 @@ class TerminalReporter(object):
msg = self._getfailureheadline(rep)
if rep.when == "collect":
msg = "ERROR collecting " + msg
- elif rep.when == "setup":
- msg = "ERROR at setup of " + msg
else:
- assert rep.when == "teardown", "Unexpected rep: %r" % (rep,)
- msg = "ERROR at teardown of " + msg
+ msg = "ERROR at %s of %s" % (rep.when, msg)
self.write_sep("_", msg, red=True, bold=True)
self._outrep_summary(rep)
|
fixup! terminal: summary_errors: replace if with assert
|
py
|
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -107,7 +107,7 @@ todo_include_todos = False
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
-html_theme = 'alabaster'
+html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
|
Change readthedocs theme to default
|
py
|
diff --git a/test/tabletmanager.py b/test/tabletmanager.py
index <HASH>..<HASH> 100755
--- a/test/tabletmanager.py
+++ b/test/tabletmanager.py
@@ -257,9 +257,9 @@ def run_test_vtctl_clone():
tablet_62344.populate('vt_snapshot_test', create_vt_insert_test,
populate_vt_insert_test)
-
tablet_62344.start_vttablet()
+ tablet_62044.create_db('vt_snapshot_test')
tablet_62044.init_tablet('idle', start=True)
# small test to make sure the directory validation works
|
Fixing a missing database creation command. LGTM Ric.
|
py
|
diff --git a/hatemile/util/beautifulsoup/beautifulsouphtmldomelement.py b/hatemile/util/beautifulsoup/beautifulsouphtmldomelement.py
index <HASH>..<HASH> 100644
--- a/hatemile/util/beautifulsoup/beautifulsouphtmldomelement.py
+++ b/hatemile/util/beautifulsoup/beautifulsouphtmldomelement.py
@@ -38,10 +38,10 @@ class BeautifulSoupHTMLDOMElement(HTMLDOMElement):
def getAttribute(self, name):
if not self.hasAttribute(name):
return None
- if type(self.data[name]) == type([]):
- array = self.data[name]
+ if isinstance(self.data[name], list):
+ values = self.data[name]
value = ''
- for item in array:
+ for item in values:
value += item + ' '
return value.strip()
else:
@@ -88,11 +88,11 @@ class BeautifulSoupHTMLDOMElement(HTMLDOMElement):
return element
def getChildren(self):
- array = []
+ children = []
for child in self.data.children:
if isinstance(child, PageElement):
- array.append(BeautifulSoupHTMLDOMElement(child))
- return array
+ children.append(BeautifulSoupHTMLDOMElement(child))
+ return children
def appendText(self, text):
self.data.append(text)
|
:recycle: Compare types using isinstance function
|
py
|
diff --git a/tabular_predDB/timing_analysis/runtime_scripting.py b/tabular_predDB/timing_analysis/runtime_scripting.py
index <HASH>..<HASH> 100644
--- a/tabular_predDB/timing_analysis/runtime_scripting.py
+++ b/tabular_predDB/timing_analysis/runtime_scripting.py
@@ -142,10 +142,7 @@ if __name__ == '__main__':
input_filename=input_filename,
table_data_filename=table_data_filename,
)
- was_successful = HE.send_hadoop_command(hadoop_engine,
- table_data_filename,
- input_filename,
- output_path, n_tasks=n_tasks)
+ was_successful = hadoop_engine.send_hadoop_command(n_tasks)
if was_successful:
hu.copy_hadoop_output(output_path, output_filename)
else:
|
modify for new HadoopEngine.send_hadoop_command format
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -58,6 +58,7 @@ setup(
install_requires=[
'Click>=3.3',
'rfc3987>=1.3.4',
+ 'PyYAML>=3.11'
],
entry_points='''
[console_scripts]
|
Add PyYAML dependency (#<I>)
|
py
|
diff --git a/lib/gruvi/ssl.py b/lib/gruvi/ssl.py
index <HASH>..<HASH> 100644
--- a/lib/gruvi/ssl.py
+++ b/lib/gruvi/ssl.py
@@ -55,6 +55,7 @@ class SSLSocket(gruvi.Socket):
self.ssl_version = ssl_version
self.ca_certs = ca_certs
self.do_handshake_on_connect = do_handshake_on_connect
+ self.ciphers = ciphers
self._wrapped = sock
try:
name = sock.getpeername()
@@ -77,7 +78,8 @@ class SSLSocket(gruvi.Socket):
else:
self._sslobj = _ssl.sslwrap(self._sock, server_side, keyfile,
certfile, cert_reqs, ssl_version,
- ca_certs, ciphers)
+ ca_certs)
+ # XXX set ciphers
if do_handshake_on_connect:
self.do_handshake()
@@ -117,7 +119,7 @@ class SSLSocket(gruvi.Socket):
super(SSLSocket, self).connect(address)
self._sslobj = _ssl.sslwrap(self._sock, server_side, keyfile,
certfile, cert_reqs, ssl_version,
- ca_certs, ciphers)
+ ca_certs)
if self.do_handshake_on_connect:
self.do_handshake()
|
Fixed for Python <I>. Python <I> doesn't allow setting the ciphers for an SSLSocket. Remove setting ciphers for now.
|
py
|
diff --git a/stored_messages/settings.py b/stored_messages/settings.py
index <HASH>..<HASH> 100644
--- a/stored_messages/settings.py
+++ b/stored_messages/settings.py
@@ -21,6 +21,7 @@ from django.utils import six, importlib
from .constants import *
USER_SETTINGS = getattr(settings, 'STORED_MESSAGES', None)
+MESSAGE_TAGS = getattr(settings, 'MESSAGE_TAGS', None)
DEFAULTS = {
'STORE_LEVELS': (
@@ -30,6 +31,13 @@ DEFAULTS = {
STORED_WARNING,
STORED_ERROR,
),
+ 'MESSAGE_TAGS': MESSAGE_TAGS.update({
+ STORED_DEBUG: 'stored debug',
+ STORED_INFO: 'stored info',
+ STORED_SUCCESS: 'stored success',
+ STORED_WARNING: 'stored warning',
+ STORED_ERROR: 'stored error',
+ }),
'INBOX_EXPIRE_DAYS': 30,
}
|
added default for MESSAGE_TAGS settings
|
py
|
diff --git a/salt/state.py b/salt/state.py
index <HASH>..<HASH> 100644
--- a/salt/state.py
+++ b/salt/state.py
@@ -2397,9 +2397,13 @@ class BaseHighState(object):
self.merge_included_states(highstate, state, errors)
for i, error in enumerate(errors[:]):
if 'is not available on the salt master' in error:
- errors[i] = (
- 'No matching sls found for {0!r} '
- 'in env {1!r}'.format(sls_match, saltenv))
+ # match SLS foobar in environment
+ this_sls = 'SLS {0} in environment'.format(
+ sls_match)
+ if this_sls in error:
+ errors[i] = (
+ 'No matching sls found for {0!r} '
+ 'in env {1!r}'.format(sls_match, saltenv))
all_errors.extend(errors)
self.clean_duplicate_extends(highstate)
|
salt.state: fix an edge case of inclusion for error reporting
|
py
|
diff --git a/pyschema_extensions/avro_schema_parser.py b/pyschema_extensions/avro_schema_parser.py
index <HASH>..<HASH> 100644
--- a/pyschema_extensions/avro_schema_parser.py
+++ b/pyschema_extensions/avro_schema_parser.py
@@ -96,6 +96,7 @@ class AvroSchemaParser(object):
field_dct["__doc__"] = schema_struct["doc"]
schema = pyschema.core.PySchema(record_name.encode("ascii"), (pyschema.core.Record,), field_dct)
+ self.schema_store.add_record(schema)
return schema
def _get_field_builder(self, type_def_struct, enclosing_namespace):
@@ -143,7 +144,6 @@ class AvroSchemaParser(object):
" declaration: {0!r}"
).format(type_def_struct))
schema_class = self.parse_schema_struct(type_def_struct, enclosing_namespace)
- self.schema_store.add_record(schema_class)
else:
if not isinstance(type_def_struct, basestring):
raise AVSCParseException((
|
Adding all parsed schemas to the store. This is needed when wanting to parse multiple schemas referencing each other
|
py
|
diff --git a/codecs/proxy.py b/codecs/proxy.py
index <HASH>..<HASH> 100644
--- a/codecs/proxy.py
+++ b/codecs/proxy.py
@@ -8,7 +8,8 @@ from os import path
CODECS_DIR = path.expandvars("$sarkCodecs")
# Load the codecs based on the filename of the proxy
-codec_filename = path.basename(__file__)
+name = __name__.split(".")[-1]
+codec_filename = name + ".py"
codec_path = path.join(CODECS_DIR, codec_filename)
codec = imp.load_source(__name__, codec_path)
|
BUGFIX: fixed a bug in the proxy codec that causes it to fail if .pyc files exist.
|
py
|
diff --git a/indra/sources/cwms/api.py b/indra/sources/cwms/api.py
index <HASH>..<HASH> 100644
--- a/indra/sources/cwms/api.py
+++ b/indra/sources/cwms/api.py
@@ -78,7 +78,8 @@ def process_ekb_file(fname, extract_filter=None,
# Process EKB XML file into statements
with open(fname, 'rb') as fh:
ekb_str = fh.read().decode('utf-8')
- return process_ekb(ekb_str, extract_filter=extract_filter)
+ return process_ekb(ekb_str, extract_filter=extract_filter,
+ grounding_mode=grounding_mode)
def process_ekb(ekb_str, extract_filter=None,
|
Propagate grounding mode to EKB processing
|
py
|
diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/http.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/http.py
index <HASH>..<HASH> 100644
--- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/http.py
+++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/validate/http.py
@@ -22,6 +22,8 @@ REQUEST_LIBRARY_FUNCTIONS = {
'requests.options',
}
+TEMPLATES = ['http', 'openmetrics', 'openmetrics_legacy']
+
def validate_config_http(file, check):
"""Determines if integration with http wrapper class
@@ -39,9 +41,10 @@ def validate_config_http(file, check):
has_failed = False
with open(file, 'r', encoding='utf-8') as f:
for _, line in enumerate(f):
- if 'instances/http' in line or 'instances/openmetrics_legacy' in line:
+ if any('instances/{}'.format(temp) in line for temp in TEMPLATES):
has_instance_http = True
- if 'init_config/http' in line or 'init_config/openmetrics_legacy' in line:
+
+ if any('init_config/{}'.format(temp) in line for temp in TEMPLATES):
has_init_config_http = True
if not has_instance_http:
|
Include new and legacy openmetrics template in http validation (#<I>) * Include new and legacy wrapper * Fix style
|
py
|
diff --git a/phono3py/cui/phono3py_script.py b/phono3py/cui/phono3py_script.py
index <HASH>..<HASH> 100644
--- a/phono3py/cui/phono3py_script.py
+++ b/phono3py/cui/phono3py_script.py
@@ -816,14 +816,17 @@ def init_phph_interaction(phono3py,
nac_q_direction=settings.nac_q_direction,
constant_averaged_interaction=ave_pp,
frequency_scale_factor=updated_settings['frequency_scale_factor'])
+
if not settings.read_phonon:
+ if log_level:
+ print("-" * 27 + " Phonon calculations " + "-" * 28)
+ dm = phono3py.dynamical_matrix
+ if (dm.is_nac() and dm.nac_method == 'gonze'):
+ dm.show_Gonze_nac_message()
+ print("Running harmonic phonon calculations...")
+ sys.stdout.flush()
phono3py.run_phonon_solver()
- if log_level > 0:
- dm = phono3py.dynamical_matrix
- if (dm.is_nac() and dm.nac_method == 'gonze'):
- dm.show_Gonze_nac_message()
-
if settings.write_phonon:
freqs, eigvecs, grid_address = phono3py.get_phonon_data()
filename = write_phonon_to_hdf5(
|
Location of showing NAC info was slightly moved.
|
py
|
diff --git a/apitools/base/protorpclite/protojson_test.py b/apitools/base/protorpclite/protojson_test.py
index <HASH>..<HASH> 100644
--- a/apitools/base/protorpclite/protojson_test.py
+++ b/apitools/base/protorpclite/protojson_test.py
@@ -440,7 +440,7 @@ class ProtojsonTest(test_util.TestCase,
"""Test decoding improperly encoded base64 bytes value."""
self.assertRaisesWithRegexpMatch(
messages.DecodeError,
- 'Base64 decoding error: Incorrect padding',
+ 'Base64 decoding error',
protojson.decode_message,
test_util.OptionalMessage,
'{"bytes_value": "abcdefghijklmnopq"}')
|
Fix a Python <I> test failure in protojson_test.py. (#<I>) This fixes a test failure caused by an error message changing from "Base<I> decoding error: Incorrect padding" to "Base<I> decoding error: Invalid base<I>-encoded string: number of data characters (<I>) cannot be 1 more than a multiple of 4".
|
py
|
diff --git a/instabot/bot/bot_direct.py b/instabot/bot/bot_direct.py
index <HASH>..<HASH> 100644
--- a/instabot/bot/bot_direct.py
+++ b/instabot/bot/bot_direct.py
@@ -21,7 +21,7 @@ def send_message(self, text, user_ids, thread_id=None):
delay.message_delay(self)
urls = self.extract_urls(text)
- item_type = 'link' if urls else 'message'
+ item_type = 'links' if urls else 'message'
if self.api.send_direct_item(
item_type,
user_ids,
|
Fix "link" to "links"
|
py
|
diff --git a/regions/core/mask.py b/regions/core/mask.py
index <HASH>..<HASH> 100644
--- a/regions/core/mask.py
+++ b/regions/core/mask.py
@@ -26,6 +26,8 @@ class Mask(object):
"""
def __init__(self, mask, bbox):
+ if mask.shape != bbox.shape:
+ raise ValueError("shape of mask and bounding box should match")
self.data = np.asanyarray(mask)
self.bbox = bbox
|
Make sure that bounding box shape matches mask shape
|
py
|
diff --git a/ampy/cli.py b/ampy/cli.py
index <HASH>..<HASH> 100644
--- a/ampy/cli.py
+++ b/ampy/cli.py
@@ -246,14 +246,15 @@ def put(local, remote):
try:
# Create remote parent directory.
board_files.mkdir(remote_parent)
- # Loop through all the files and put them on the board too.
- for filename in child_files:
- with open(os.path.join(parent, filename), "rb") as infile:
- remote_filename = posixpath.join(remote_parent, filename)
- board_files.put(remote_filename, infile.read())
except files.DirectoryExistsError:
# Ignore errors for directories that already exist.
pass
+ # Loop through all the files and put them on the board too.
+ for filename in child_files:
+ with open(os.path.join(parent, filename), "rb") as infile:
+ remote_filename = posixpath.join(remote_parent, filename)
+ board_files.put(remote_filename, infile.read())
+
else:
# File copy, open the file and copy its contents to the board.
|
The put method now updates the contents of existing folders instead of completely skipping them.
|
py
|
diff --git a/tests/tests.py b/tests/tests.py
index <HASH>..<HASH> 100644
--- a/tests/tests.py
+++ b/tests/tests.py
@@ -1,8 +1,13 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
+import sys
import unittest
-from unittest.mock import patch, mock_open
+
+if sys.version_info[0] < 3:
+ import mock
+else:
+ from unittest import mock
from bs4 import BeautifulSoup
@@ -135,8 +140,8 @@ class TestConflictedExtractor(unittest.TestCase):
)
-@patch('csv.writer')
-@patch('html_table_extractor.extractor.open')
[email protected]('csv.writer')
[email protected]('html_table_extractor.extractor.open')
class TestWriteToCsv(unittest.TestCase):
def setUp(self):
html = """
@@ -153,7 +158,7 @@ class TestWriteToCsv(unittest.TestCase):
"""
self.extractor = Extractor(html)
self.extractor.parse()
- mock_open()
+ mock.mock_open()
def test_write_to_csv_default(self, csv_mock, _):
self.extractor.write_to_csv()
|
python <I> for unittest's mock
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -6,7 +6,7 @@ def read(fname):
setup(
name='xapian-haystack',
- version='1.1.3beta',
+ version='1.1.4alpha',
description="A Xapian backend for Haystack",
long_description=read('README.rst'),
classifiers=[
|
Updated version number in setup.py to match the version number in the xapian_backend.py file.
|
py
|
diff --git a/spyderlib/widgets/arrayeditor.py b/spyderlib/widgets/arrayeditor.py
index <HASH>..<HASH> 100644
--- a/spyderlib/widgets/arrayeditor.py
+++ b/spyderlib/widgets/arrayeditor.py
@@ -551,7 +551,7 @@ class ArrayEditor(QDialog):
self.setLayout(self.layout)
self.setWindowIcon(ima.icon('arredit'))
if title:
- title = to_text_string(title) # in case title is not a string
+ title = to_text_string(title) + " - " + _("NumPy array")
else:
title = _("Array editor")
if readonly:
|
Variable Explorer: Add type name to array editor title
|
py
|
diff --git a/scapy.py b/scapy.py
index <HASH>..<HASH> 100755
--- a/scapy.py
+++ b/scapy.py
@@ -21,6 +21,9 @@
#
# $Log: scapy.py,v $
+# Revision 1.0.4.10 2006/04/20 09:13:49 pbi
+# - fixed SetGen to better test int couples for intervals
+#
# Revision 1.0.4.9 2006/04/10 05:31:11 pbi
# - use None value to specify timeout must be calculated in __sr_loop()
#
@@ -1391,7 +1394,7 @@
from __future__ import generators
-RCSID="$Id: scapy.py,v 1.0.4.9 2006/04/10 05:31:11 pbi Exp $"
+RCSID="$Id: scapy.py,v 1.0.4.10 2006/04/20 09:13:49 pbi Exp $"
VERSION = RCSID.split()[2]+"beta"
@@ -2486,7 +2489,7 @@ class SetGen(Gen):
return element
def __iter__(self):
for i in self.set:
- if (type(i) is tuple) and (len(i) == 2):
+ if (type(i) is tuple) and (len(i) == 2) and type(i[0]) is int and type(i[1]) is int:
if (i[0] <= i[1]):
j=i[0]
while j <= i[1]:
|
- fixed SetGen to better test int couples for intervals
|
py
|
diff --git a/proselint/checks/garner/preferred_forms.py b/proselint/checks/garner/preferred_forms.py
index <HASH>..<HASH> 100644
--- a/proselint/checks/garner/preferred_forms.py
+++ b/proselint/checks/garner/preferred_forms.py
@@ -155,6 +155,8 @@ def check(text):
["Meanwhile,", ["Meantime,"]],
["modus operandi", ["mode of operandi"]],
["modi operandi", ["modes of operandi"]],
+ ["motion seconded", ["notion seconded"]],
+ ["seconded the motion", ["seconded the notion"]],
# Verbosity
["try to", ["make an attempt to"]],
|
Adds daily GMAU on seconded motions
|
py
|
diff --git a/cbamf/viz/ilmplots.py b/cbamf/viz/ilmplots.py
index <HASH>..<HASH> 100644
--- a/cbamf/viz/ilmplots.py
+++ b/cbamf/viz/ilmplots.py
@@ -31,7 +31,7 @@ def smile_comparison_plot(state0, state1, stdfrac=0.7):
sl = np.s_[s.pad:-s.pad,s.pad:-s.pad,s.pad:-s.pad]
diff = -(s.image - s.get_model_image())[sl]
- m = good_particles(s, False, False)
+ m = good_particles(s, False)
r = s.obj.rad[m]
std = stdfrac*r.std()
@@ -42,7 +42,7 @@ def smile_comparison_plot(state0, state1, stdfrac=0.7):
for i,(s,o,color) in enumerate(zip(states, orders, colors)):
ax = ig[i]
- m = good_particles(s, True, True)
+ m = good_particles(s, False)
p = s.obj.pos[m]
r = s.obj.rad[m]
z,y,x = p.T
|
better defaults for ilm plot
|
py
|
diff --git a/openquake/calculators/tests/event_based_test.py b/openquake/calculators/tests/event_based_test.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/tests/event_based_test.py
+++ b/openquake/calculators/tests/event_based_test.py
@@ -245,11 +245,8 @@ class EventBasedTestCase(CalculatorTestCase):
def test_case_8(self):
out = self.run_calc(case_8.__file__, 'job.ini', exports='csv')
[fname] = out['ruptures', 'csv']
- maxyear = self.calc.datastore['events']['year'].max()
- itime = self.calc.datastore['oqparam'].investigation_time
- # the year in the events table is below the investigation time because
- # this is a case with nonparametric sources, the SES are independent
- self.assertGreater(itime, maxyear)
+ years = sorted(self.calc.datastore['events']['year'])
+ self.assertEqual(years, [15, 29, 39, 43])
if REFERENCE_OS:
self.assertEqualFiles('expected/rup_data.csv', fname)
|
Fixed test [skip CI]
|
py
|
diff --git a/pypet/pypetlogging.py b/pypet/pypetlogging.py
index <HASH>..<HASH> 100644
--- a/pypet/pypetlogging.py
+++ b/pypet/pypetlogging.py
@@ -109,7 +109,7 @@ LOGGING_DICT = {
def _change_logging_kwargs(kwargs):
""" Helper function to turn the simple logging kwargs into a `log_config`."""
- log_levels = kwargs.pop('log_levels', None)
+ log_levels = kwargs.pop('log_level', None)
log_folder = kwargs.pop('log_folder', 'logs')
logger_names = kwargs.pop('logger_names', '')
if log_levels is None:
|
FIX: Wrong popping of arguments;
|
py
|
diff --git a/pypiper/ngstk.py b/pypiper/ngstk.py
index <HASH>..<HASH> 100644
--- a/pypiper/ngstk.py
+++ b/pypiper/ngstk.py
@@ -185,7 +185,12 @@ def count_mapped_reads(file, paired_end=True):
return -1
+
def sam_conversions(sam, depth=True):
+ '''
+ Convert sam files to bam files, then sort and index them for later use.
+ :param depth: also calculate coverage over each position
+ '''
cmd = "samtools view -bS " + sam + " > " + sam.replace(".sam", ".bam") + "\n"
cmd += "samtools sort " + sam.replace(".sam", ".bam") + " " + sam.replace(".sam", "_sorted") + "\n"
cmd += "samtools index " + sam.replace(".sam", "_sorted.bam") + "\n"
@@ -196,6 +201,10 @@ def sam_conversions(sam, depth=True):
def bam_conversions(bam, depth=True):
+ '''
+ Sort and index bam files for later use.
+ :param depth: also calculate coverage over each position
+ '''
cmd = "samtools view -h " + bam + " > " + bam.replace(".bam", ".sam") + "\n"
cmd += "samtools sort " + bam + " " + bam.replace(".bam", "_sorted") + "\n"
cmd += "samtools index " + bam.replace(".bam", "_sorted.bam") + "\n"
|
comment some ngstk functions
|
py
|
diff --git a/tests/test_easyid3.py b/tests/test_easyid3.py
index <HASH>..<HASH> 100644
--- a/tests/test_easyid3.py
+++ b/tests/test_easyid3.py
@@ -37,8 +37,11 @@ class TEasyID3(TestCase):
self.id3["artist"] = "baz"
self.id3.pprint()
- def test_has_key(self):
- if not PY3:
+ def test_in(self):
+ self.failIf("foo" in self.id3)
+
+ if not PY3:
+ def test_has_key(self):
self.failIf(self.id3.has_key("foo"))
def test_empty_file(self):
|
test_easyid3.py: added test for in operator.
|
py
|
diff --git a/tests/integration/container_test.py b/tests/integration/container_test.py
index <HASH>..<HASH> 100644
--- a/tests/integration/container_test.py
+++ b/tests/integration/container_test.py
@@ -342,7 +342,6 @@ class CreateContainerTest(api_test.BaseTestCase):
BUSYBOX, 'true',
host_config=self.client.create_host_config(
memswap_limit='1G',
- mem_swappiness='40',
mem_limit='700M'
)
)
@@ -353,7 +352,7 @@ class CreateContainerTest(api_test.BaseTestCase):
self.assertIn('HostConfig', inspect)
host_config = inspect['HostConfig']
- for limit in ['Memory', 'MemorySwappiness', 'MemorySwap']:
+ for limit in ['Memory', 'MemorySwap']:
self.assertIn(limit, host_config)
def test_create_with_memory_constraints_with_int(self):
|
Dont include mem_swappiness when testing use of str values
|
py
|
diff --git a/test/test_GoogleGeocoder.py b/test/test_GoogleGeocoder.py
index <HASH>..<HASH> 100644
--- a/test/test_GoogleGeocoder.py
+++ b/test/test_GoogleGeocoder.py
@@ -1,5 +1,8 @@
+import pytest
+
from astral import GoogleGeocoder
[email protected]
def test_GoogleLocator():
locator = GoogleGeocoder()
l = locator['Eiffel Tower']
|
Marked test so we can skip it.
|
py
|
diff --git a/taskw/warrior.py b/taskw/warrior.py
index <HASH>..<HASH> 100644
--- a/taskw/warrior.py
+++ b/taskw/warrior.py
@@ -97,7 +97,7 @@ class TaskWarriorBase(with_metaclass(abc.ABCMeta, object)):
else:
annotations.append(v)
- for key in task.keys():
+ for key in list(task.keys()):
if key.startswith('annotation_'):
annotations.append(task[key])
del(task[key])
|
Fix py3 iterator behavior. This was already committed in another branch, but we have so many flying around now I can't find it. Credit due originally to Adam Coddington, I believe.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -13,7 +13,8 @@ else:
exec(open('docker/version.py').read())
-test_requirements = []
+with open('./test-requirements.txt') as test_reqs_txt:
+ test_requirements = [line for line in test_reqs_txt]
with open(requirements_file) as requirements_txt:
requirements = [line for line in requirements_txt]
|
Correct test requirements in setup.py
|
py
|
diff --git a/wakatime/packages/pygments/lexers/javascript.py b/wakatime/packages/pygments/lexers/javascript.py
index <HASH>..<HASH> 100644
--- a/wakatime/packages/pygments/lexers/javascript.py
+++ b/wakatime/packages/pygments/lexers/javascript.py
@@ -37,7 +37,7 @@ class JavascriptLexer(RegexLexer):
name = 'JavaScript'
aliases = ['js', 'javascript']
- filenames = ['*.js', '*.jsm']
+ filenames = ['*.js', '*.jsm', '*.mjs']
mimetypes = ['application/javascript', 'application/x-javascript',
'text/x-javascript', 'text/javascript']
@@ -1035,7 +1035,6 @@ class CoffeeScriptLexer(RegexLexer):
filenames = ['*.coffee']
mimetypes = ['text/coffeescript']
-
_operator_re = (
r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|'
r'\|\||\\(?=\n)|'
@@ -1464,6 +1463,7 @@ class EarlGreyLexer(RegexLexer):
],
}
+
class JuttleLexer(RegexLexer):
"""
For `Juttle`_ source code.
|
support *.mjs as JavaScript
|
py
|
diff --git a/awesome/context.py b/awesome/context.py
index <HASH>..<HASH> 100644
--- a/awesome/context.py
+++ b/awesome/context.py
@@ -23,6 +23,14 @@ def consuming(iterator):
@contextmanager
+def calling(callable, *args, **kwargs):
+ try:
+ yield
+ finally:
+ callable(*args, **kwargs)
+
+
+@contextmanager
def change_directory(path):
original_path = os.getcwdu()
|
Added a `calling` context manager. The context manager makes sure the function is called with desired arguments when the context iis exited.
|
py
|
diff --git a/MAVProxy/mavproxy.py b/MAVProxy/mavproxy.py
index <HASH>..<HASH> 100755
--- a/MAVProxy/mavproxy.py
+++ b/MAVProxy/mavproxy.py
@@ -669,7 +669,7 @@ def main_loop():
rin = []
for master in mpstate.mav_master:
- if master.fd is not None:
+ if master.fd is not None and not master.portdead:
rin.append(master.fd)
for m in mpstate.mav_outputs:
rin.append(m.fd)
|
don't select ports that are dead this fixes a delay when main serial port dead
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.