diff
stringlengths 139
3.65k
| message
stringlengths 8
627
| diff_languages
stringclasses 1
value |
---|---|---|
diff --git a/chat_example.py b/chat_example.py
index <HASH>..<HASH> 100644
--- a/chat_example.py
+++ b/chat_example.py
@@ -3,22 +3,30 @@ from terrabot.events import Events
import threading
-bot = TerraBot('127.0.0.1', protocol=155)
-eventm = bot.event_manager
+bot = TerraBot('127.0.0.1')
+event = bot.get_event_manager()
[email protected]_event(Events.Chat)
[email protected]_event(Events.Chat)
def chat(event_id, msg):
msg = str(msg, "utf-8")
print(msg)
if "stop" in msg:
bot.stop()
[email protected]_event(Events.Blocked)
[email protected]_event(Events.Blocked)
def cant_connect(event_id, msg):
print(msg)
bot.stop()
[email protected]_event(Events.Login)
[email protected]_event(Events.TileUpdate)
+def tile_update(event_id, tiles):
+ print("Tile update")
+
[email protected]_event(Events.Initialized)
+def initialized(event_id, data):
+ print("Initialized")
+
[email protected]_event(Events.Login)
def logged_in(event_id, data):
print("Logged in")
|
Improved the chat example a bit
|
py
|
diff --git a/metaknowledge/bin/metaknowledgeCLI.py b/metaknowledge/bin/metaknowledgeCLI.py
index <HASH>..<HASH> 100644
--- a/metaknowledge/bin/metaknowledgeCLI.py
+++ b/metaknowledge/bin/metaknowledgeCLI.py
@@ -259,7 +259,7 @@ def getThresholds(clargs, grph):
('5', "Remove nodes below some degree"),
('6', "Remove nodes above some degree"),
])
- print("The network contains {0} nodes and {1} edges, of which {2} are isolated and {3} are self loops.".format(len(list(grph.nodes())), len(list(grph.edges())), len(list(nx.isolates(grph))), len(list(grph.selfloop_edges()))))
+ print("The network contains {0} nodes and {1} edges, of which {2} are isolated and {3} are self loops.".format(len(list(grph.nodes())), len(list(grph.edges())), len(list(nx.isolates(grph))), len(list(nx.selfloop_edges(grph)))))
thresID = int(inputMenu(thresDict, header = "What type of filtering to you want? "))
if thresID == 0:
return grph
|
replaced deprecated networkx function
|
py
|
diff --git a/owncloud/owncloud.py b/owncloud/owncloud.py
index <HASH>..<HASH> 100644
--- a/owncloud/owncloud.py
+++ b/owncloud/owncloud.py
@@ -184,7 +184,7 @@ class Client():
self.__session.verify = self.__verify_certs
self.__session.auth = (user_id, password)
# TODO: use another path to prevent that the server renders the file list page
- res = self.__session.get(self.url)
+ res = self.__session.get(self.url + 'index.php')
if res.status_code == 200:
if self.__single_session:
# Keep the same session, no need to re-auth every call
|
Added index.php to the request url
|
py
|
diff --git a/git/remote.py b/git/remote.py
index <HASH>..<HASH> 100644
--- a/git/remote.py
+++ b/git/remote.py
@@ -122,14 +122,14 @@ class PushInfoList(IterableList):
def __init__(self) -> None:
super().__init__('push_infos')
- self.exception = None
+ self.error = None
- def raise_on_error(self):
+ def raise_if_error(self):
"""
Raise an exception if any ref failed to push.
"""
- if self.exception:
- raise self.exception
+ if self.error:
+ raise self.error
class PushInfo(IterableObj, object):
@@ -819,7 +819,7 @@ class Remote(LazyMixin, IterableObj):
raise
elif stderr_text:
log.warning("Error lines received while fetching: %s", stderr_text)
- output.exception = e
+ output.error = e
return output
|
Rename exception to error, raise_on_error to raise_if_error
|
py
|
diff --git a/spikeylab/gui/stim/component_detail.py b/spikeylab/gui/stim/component_detail.py
index <HASH>..<HASH> 100644
--- a/spikeylab/gui/stim/component_detail.py
+++ b/spikeylab/gui/stim/component_detail.py
@@ -126,7 +126,7 @@ class ComponentAttributerChecker(QtGui.QFrame):
for i in range(layout.count()):
w = layout.itemAt(i).widget()
if w.isChecked():
- attrs.append(w.text())
+ attrs.append(str(w.text()))
return attrs
def clearLayout(layout):
|
cast to python string for compatability
|
py
|
diff --git a/pylimit/pyratelimit.py b/pylimit/pyratelimit.py
index <HASH>..<HASH> 100644
--- a/pylimit/pyratelimit.py
+++ b/pylimit/pyratelimit.py
@@ -75,9 +75,9 @@ class PyRateLimit(object):
connection.zadd(namespace, current_time, current_time)
else:
current_count = 1 # initialize at 1 to compensate the case that this attempt is not getting counted
- connection.zrange(namespace, 0, -1)
+ connection.zcard(namespace)
redis_result = connection.execute()
- current_count += len(redis_result[-1])
+ current_count += redis_result[-1]
if current_count <= self.limit:
can_attempt = True
return can_attempt
|
Let redis count element Instead of receiving all results assign to the requested key
|
py
|
diff --git a/src/jobTreeSlave.py b/src/jobTreeSlave.py
index <HASH>..<HASH> 100644
--- a/src/jobTreeSlave.py
+++ b/src/jobTreeSlave.py
@@ -64,6 +64,13 @@ def loadPickleFile(pickleFile):
fileHandle.close()
return i
+def nextOpenDescriptor():
+ """Gets the number of the next available file descriptor.
+ """
+ descriptor = os.open("/dev/null", os.O_RDONLY)
+ os.close(descriptor)
+ return descriptor
+
def main():
sys.path.append(sys.argv[1])
sys.argv.remove(sys.argv[1])
@@ -165,7 +172,9 @@ def main():
print "---JOBTREE SLAVE OUTPUT LOG---"
sys.stdout.flush()
-
+ #Log the number of open file descriptors so we can tell if we're leaking
+ #them.
+ logger.debug("Next available descriptor: {}".format(nextOpenDescriptor())
##########################################
#Parse input files
|
Added logging message to debug leaking file descriptors.
|
py
|
diff --git a/testing/code/test_source.py b/testing/code/test_source.py
index <HASH>..<HASH> 100644
--- a/testing/code/test_source.py
+++ b/testing/code/test_source.py
@@ -374,10 +374,7 @@ def test_getfslineno():
fspath, lineno = getfslineno(f)
- fname = __file__
- fname = fname[:fname.find('.py')] + '.py'
-
- assert fspath == py.path.local(fname)
+ assert fspath.basename == "test_source.py"
assert lineno == py.code.getrawcode(f).co_firstlineno-1 # see findsource
class A(object):
@@ -386,5 +383,5 @@ def test_getfslineno():
fspath, lineno = getfslineno(A)
_, A_lineno = py.std.inspect.findsource(A)
- assert fspath == py.path.local(fname)
+ assert fspath.basename == "test_source.py"
assert lineno == A_lineno
|
fix test to work on jython and cpy --HG-- branch : trunk
|
py
|
diff --git a/sphinx_autobuild/__init__.py b/sphinx_autobuild/__init__.py
index <HASH>..<HASH> 100755
--- a/sphinx_autobuild/__init__.py
+++ b/sphinx_autobuild/__init__.py
@@ -28,6 +28,12 @@ __version__ = '0.5.0'
__url__ = 'https://github.com/GaretJax/sphinx-autobuild'
+DEFAULT_IGNORE_REGEX = [
+ r'__pycache__/.*\.py',
+ r'.*\.pyc',
+]
+
+
class _WatchdogHandler(FileSystemEventHandler):
def __init__(self, watcher, action):
@@ -266,7 +272,10 @@ def main():
if not os.path.exists(outdir):
os.makedirs(outdir)
- builder = SphinxBuilder(outdir, build_args, ignored, args.re_ignore)
+ re_ignore = args.re_ignore
+ re_ignore.append(DEFAULT_IGNORE_REGEX)
+
+ builder = SphinxBuilder(outdir, build_args, ignored, re_ignore)
server = Server(watcher=LivereloadWatchdogWatcher())
server.watch(srcdir, builder)
|
Add some regexes to ignore by default
|
py
|
diff --git a/ucoinpy/api/bma/__init__.py b/ucoinpy/api/bma/__init__.py
index <HASH>..<HASH> 100644
--- a/ucoinpy/api/bma/__init__.py
+++ b/ucoinpy/api/bma/__init__.py
@@ -127,10 +127,6 @@ class API(object):
response = yield from asyncio.wait_for(
aiohttp.post(self.reverse_url(path), data=kwargs, headers=self.headers),
timeout=15)
-
- if response.status != 200:
- raise ValueError('status code != 200 => %d (%s)' % (response.status, (yield from (response.text()))))
-
return response
from . import network, blockchain, tx, wot, node, ud
|
No not throw exception when POSTING
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -6,10 +6,13 @@
# General Public License version 3 (LGPLv3) as published by the Free
# Software Foundation. See the file README for copying conditions.
-from ez_setup import use_setuptools
-use_setuptools()
+try:
+ from setuptools import setup, find_packages
+except ImportError:
+ from ez_setup import use_setuptools
+ use_setuptools()
+ from setuptools import setup, find_packages
-from setuptools import setup, find_packages
from smuggler import get_version
setup(
|
Fixing a bug on setup.py.
|
py
|
diff --git a/gkeepapi/__init__.py b/gkeepapi/__init__.py
index <HASH>..<HASH> 100644
--- a/gkeepapi/__init__.py
+++ b/gkeepapi/__init__.py
@@ -3,7 +3,7 @@
.. moduleauthor:: Kai <[email protected]>
"""
-__version__ = '0.11.13'
+__version__ = '0.11.14'
import logging
import re
@@ -181,7 +181,7 @@ class API(object):
self._session = requests.Session()
self._auth = auth
self._base_url = base_url
- self._session.headers.update({'User-Agent': 'gkeepapi/' + __version__})
+ self._session.headers.update({'User-Agent': 'x-gkeepapi/%s (https://github.com/kiwiz/gkeepapi)' % __version__})
def getAuth(self):
"""Get authentication details for this API.
|
Issue #<I>: Work around blacklisting issue
|
py
|
diff --git a/bin/dat2tdv.py b/bin/dat2tdv.py
index <HASH>..<HASH> 100644
--- a/bin/dat2tdv.py
+++ b/bin/dat2tdv.py
@@ -9,6 +9,9 @@ if __name__ == "__main__":
try:
with open(sys.argv[1], "rb") as _in_file:
_log = cPickle.load(_in_file)
- _log.write_to_file(format=CAN.LOG_FORMAT_TDV, name=("%s.log" % os.path.splitext(_options.input_file)[0]), path=os.path.dirname(_options.input_file))
+ _path = os.path.dirname(sys.argv[1])
+ if len(_path) == 0:
+ _path = "./"
+ _log.write_to_file(format=CAN.LOG_FORMAT_TDV, name=("%s.log" % os.path.splitext(sys.argv[1])[0]), path=_path)
except IOError:
sys.stderr.write("ERROR: Input file %s not found\n" % sys.argv[1])
|
Removed references to non-existent "options" variable Fixed a crash when converting a log file in the current directory
|
py
|
diff --git a/blockstack/blockstackd.py b/blockstack/blockstackd.py
index <HASH>..<HASH> 100644
--- a/blockstack/blockstackd.py
+++ b/blockstack/blockstackd.py
@@ -2037,6 +2037,17 @@ def load_expected_snapshots( snapshots_path ):
snapshots_path = os.path.expanduser(snapshots_path)
expected_snapshots = {}
try:
+ # sqlite3 db?
+ db_con = virtualchain.StateEngine.db_connect(snapshots_path)
+ expected_snapshots = virtualchain.StateEngine.get_consensus_hashes(None, None, db_con=db_con, completeness_check=False)
+ log.debug("Loaded expected snapshots from chainstate DB {}, {} entries".format(snapshots_path, len(expected_snapshots)))
+ return expected_snapshots
+
+ except:
+ log.debug("{} does not appear to be a chainstate DB".format(snapshots_path))
+
+ # legacy chainstate?
+ try:
with open(snapshots_path, "r") as f:
snapshots_json = f.read()
|
extract snapshots from chainstate db as well as legacy snapshots JSON
|
py
|
diff --git a/pynlpl/tests/folia.py b/pynlpl/tests/folia.py
index <HASH>..<HASH> 100755
--- a/pynlpl/tests/folia.py
+++ b/pynlpl/tests/folia.py
@@ -1973,6 +1973,7 @@ class Test4Edit(unittest.TestCase):
"""Edit Check - Altering word text"""
#Important note: directly altering text is usually bad practise, you'll want to use proper corrections instead.
+ #this may also lead to inconsistencies if there is redundant text on higher levels
w = self.doc['WR-P-E-J-0000000001.p.1.s.8.w.9']
self.assertEqual(w.text(), 'terweil')
@@ -1983,6 +1984,7 @@ class Test4Edit(unittest.TestCase):
"""Edit Check - Altering word text with reserved symbols"""
#Important note: directly altering text is usually bad practise, you'll want to use proper corrections instead.
+ #This test just serves to test reserved symbols
w = self.doc['WR-P-E-J-0000000001.p.1.s.8.w.9']
w.settext('1 & 1 > 0')
|
added some notes for test<I>a and test<I>b
|
py
|
diff --git a/vexbot/adapters/messaging.py b/vexbot/adapters/messaging.py
index <HASH>..<HASH> 100644
--- a/vexbot/adapters/messaging.py
+++ b/vexbot/adapters/messaging.py
@@ -22,16 +22,15 @@ class _HeartbeatReciever:
def __init__(self, messaging, loop):
self.messaging = messaging
self._heart_beat_check = PeriodicCallback(self._get_state, 1000, loop)
- self._last_bot_uuid = None
self.last_message = None
- self.last_message_time = None
- self._last_message = time.time()
+ self._last_bot_uuid = None
+ self._last_message_time = time.time()
def start(self):
self._heart_beat_check.start()
def message_recieved(self, message):
- self._last_message = time.time()
+ self._last_message_time = time.time()
self.last_message = message
def _get_state(self):
|
small fixes to heartbeats time recording
|
py
|
diff --git a/openid/server/server.py b/openid/server/server.py
index <HASH>..<HASH> 100644
--- a/openid/server/server.py
+++ b/openid/server/server.py
@@ -382,10 +382,6 @@ class CheckIDRequest(OpenIDRequest):
'identity',
'return_to',
]
- optional = [
- 'trust_root',
- 'assoc_handle',
- ]
for field in required:
value = query.get(OPENID_PREFIX + field)
@@ -396,10 +392,11 @@ class CheckIDRequest(OpenIDRequest):
% (field, query))
setattr(self, field, value)
- for field in optional:
- value = query.get(OPENID_PREFIX + field)
- if value:
- setattr(self, field, value)
+ # There's a case for making self.trust_root be a TrustRoot
+ # here. But if TrustRoot isn't currently part of the "public" API,
+ # I'm not sure it's worth doing.
+ self.trust_root = query.get(OPENID_PREFIX + 'trust_root')
+ self.assoc_handle = query.get(OPENID_PREFIX + 'assoc_handle')
if not TrustRoot.parse(self.return_to):
raise MalformedReturnURL(query, self.return_to)
|
[project @ server.server.CheckIDRequest.fromQuery: simplify a bit.]
|
py
|
diff --git a/striplog/striplog.py b/striplog/striplog.py
index <HASH>..<HASH> 100644
--- a/striplog/striplog.py
+++ b/striplog/striplog.py
@@ -550,7 +550,8 @@ class Striplog(object):
null=None,
ignore=None,
source=None,
- stop=None):
+ stop=None,
+ fieldnames=None):
"""
Load from a CSV file or text.
"""
@@ -565,6 +566,13 @@ class Striplog(object):
source = source or 'CSV'
+ # Deal with multiple spaces in space delimited file.
+ if dlm == ' ':
+ text = re.sub(r'[ \t]+', ' ', text)
+
+ if fieldnames is not None:
+ text = dlm.join(fieldnames) + '\n' + text
+
try:
f = StringIO(text) # Python 3
except TypeError:
|
more flexible load from csv
|
py
|
diff --git a/inplaceeditform/fields.py b/inplaceeditform/fields.py
index <HASH>..<HASH> 100644
--- a/inplaceeditform/fields.py
+++ b/inplaceeditform/fields.py
@@ -90,7 +90,8 @@ class BaseAdaptorField(object):
return self.empty_value()
def empty_value(self):
- return getattr(settings, 'INPLACEEDIT_EDIT_EMPTY_VALUE', ugettext('Dobleclick to edit'))
+ return ugettext(getattr(settings, 'INPLACEEDIT_EDIT_EMPTY_VALUE',
+ 'Dobleclick to edit'))
def render_field(self, template_name="inplaceeditform/render_field.html", extra_context=None):
extra_context = extra_context or {}
@@ -172,8 +173,8 @@ class BaseAdaptorField(object):
self.field_name = transmeta.get_real_fieldname(self.field_name)
self.transmeta = True
if not self.render_value(self.field_name):
- message_translation = getattr(settings, 'INPLACEEDIT_EDIT_MESSAGE_TRANSLATION',
- ugettext('Write a traslation'))
+ message_translation = ugettext(getattr(settings, 'INPLACEEDIT_EDIT_MESSAGE_TRANSLATION',
+ 'Write a traslation'))
self.initial = {self.field_name: message_translation}
return
self.transmeta = False
|
Fixes #<I> Now the texts are translatables
|
py
|
diff --git a/tests/integration/states/archive.py b/tests/integration/states/archive.py
index <HASH>..<HASH> 100644
--- a/tests/integration/states/archive.py
+++ b/tests/integration/states/archive.py
@@ -119,6 +119,24 @@ class ArchiveTest(integration.ModuleCase,
self._check_ext_remove(ARCHIVE_DIR, UNTAR_FILE)
+ @skipIf(os.geteuid() != 0, 'you must be root to run this test')
+ def test_archive_extracted_with_root_user_and_group(self):
+ '''
+ test archive.extracted without skip_verify
+ only external resources work to check to
+ ensure source_hash is verified correctly
+ '''
+ ret = self.run_state('archive.extracted', name=ARCHIVE_DIR,
+ source=ARCHIVE_TAR_SOURCE, archive_format='tar',
+ source_hash=ARCHIVE_TAR_HASH,
+ user='root', group='root')
+ if 'Timeout' in ret:
+ self.skipTest('Timeout talking to local tornado server.')
+
+ self.assertSaltTrueReturn(ret)
+
+ self._check_ext_remove(ARCHIVE_DIR, UNTAR_FILE)
+
if __name__ == '__main__':
from integration import run_tests
|
Add integration test for archive.extracted with user/group set to root
|
py
|
diff --git a/docs/conf.py b/docs/conf.py
index <HASH>..<HASH> 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -34,7 +34,6 @@
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosectionlabel',
- 'sphinxcontrib.asyncio',
]
# Add any paths that contain templates here, relative to this directory.
|
Remove sphinxcontrib.asyncio from docs build - Broken for newer sphinx versions
|
py
|
diff --git a/src/python/pants/backend/core/tasks/confluence_publish.py b/src/python/pants/backend/core/tasks/confluence_publish.py
index <HASH>..<HASH> 100644
--- a/src/python/pants/backend/core/tasks/confluence_publish.py
+++ b/src/python/pants/backend/core/tasks/confluence_publish.py
@@ -41,7 +41,7 @@ class ConfluencePublish(Task):
legacy='confluence_publish_open')
register('--user',
help='Confluence user name, defaults to unix user.',
- legacy='confluence_user"')
+ legacy='confluence_user')
def __init__(self, *args, **kwargs):
super(ConfluencePublish, self).__init__(*args, **kwargs)
|
get the name right, no stray quote mark, sheesh Testing Done: monkey-patched in Twitter's pants. hand-copied over. what coudl go wrong? Bugs closed: <I> Reviewed at <URL>
|
py
|
diff --git a/trezorlib/pinmatrix.py b/trezorlib/pinmatrix.py
index <HASH>..<HASH> 100644
--- a/trezorlib/pinmatrix.py
+++ b/trezorlib/pinmatrix.py
@@ -7,7 +7,7 @@ from PyQt4.QtCore import QObject, SIGNAL, QRegExp, Qt
class PinButton(QPushButton):
def __init__(self, password, encoded_value):
- super(PinButton, self).__init__()
+ super(PinButton, self).__init__('?')
self.password = password
self.encoded_value = encoded_value
|
Added '?' to pinmatrix
|
py
|
diff --git a/ipyrad/assemble/clustmap_across.py b/ipyrad/assemble/clustmap_across.py
index <HASH>..<HASH> 100644
--- a/ipyrad/assemble/clustmap_across.py
+++ b/ipyrad/assemble/clustmap_across.py
@@ -136,7 +136,7 @@ class Step6:
if self.data.populations:
self.cgroups = {}
for idx, val in enumerate(self.data.populations.values()):
- self.cgroups[idx] = val[1]
+ self.cgroups[idx] = [self.data.samples[x] for x in val[1]]
# by default let's split taxa into groups of 20-50 samples at a time
else:
@@ -272,6 +272,8 @@ class Step6:
rasyncs = {}
for jobid, group in self.cgroups.items():
# should we use sample objects or sample names in cgroups?
+ # Well you gotta choose one! W/o pops file it uses sample objects
+ # so I made it use sample objects if pop_assign_file is set iao
samples = [i for i in self.samples if i in group]
args = (self.data, jobid, samples, self.randomseed)
rasyncs[jobid] = self.lbview.apply(build_concat_files, *args)
|
Fix step 6 with pop_assign_file
|
py
|
diff --git a/matrix_client/client.py b/matrix_client/client.py
index <HASH>..<HASH> 100644
--- a/matrix_client/client.py
+++ b/matrix_client/client.py
@@ -251,7 +251,6 @@ class MatrixClient(object):
def add_leave_listener(self, callback):
""" Add a listener that will send a callback when the client has left a room.
- an invite request.
Args:
callback (func(room_id, room)): Callback called when the client
|
Remove cpy/pst leftover in add_leave_listener() Removed " \n an invite request." from "MatrixClient.add_leave_listener()" docstring. Looks like a leftover from copying/pasting from MatrixClient.add_invite_listener() docstring
|
py
|
diff --git a/taxi/timesheet/__init__.py b/taxi/timesheet/__init__.py
index <HASH>..<HASH> 100644
--- a/taxi/timesheet/__init__.py
+++ b/taxi/timesheet/__init__.py
@@ -112,7 +112,7 @@ class Timesheet(object):
def continue_entry(self, date, end_time, description=None):
try:
entry = self.entries[date][-1]
- except KeyError:
+ except IndexError:
raise NoActivityInProgressError()
if (not isinstance(entry.duration, tuple)
|
Fix stop command when no activity is in progress
|
py
|
diff --git a/tests/test_annex.py b/tests/test_annex.py
index <HASH>..<HASH> 100644
--- a/tests/test_annex.py
+++ b/tests/test_annex.py
@@ -1,8 +1,3 @@
-import json
-
-import falcon
-import pytest
-
from .dataset_fixtures import *
from datalad_service.common.annex import create_file_obj
|
Cleanup extra imports in create_file_obj test.
|
py
|
diff --git a/vyked/jsonprotocol.py b/vyked/jsonprotocol.py
index <HASH>..<HASH> 100644
--- a/vyked/jsonprotocol.py
+++ b/vyked/jsonprotocol.py
@@ -16,6 +16,7 @@ class JSONProtocol(asyncio.Protocol):
self._connected = False
self._transport = None
self._obj_streamer = None
+ self._pending_data = []
@staticmethod
def _make_frame(packet):
|
Fix bug where pending data list was null
|
py
|
diff --git a/src/ocrmypdf/pdfa.py b/src/ocrmypdf/pdfa.py
index <HASH>..<HASH> 100644
--- a/src/ocrmypdf/pdfa.py
+++ b/src/ocrmypdf/pdfa.py
@@ -131,6 +131,7 @@ def _encode_ascii(s: str) -> str:
'(': '',
')': '',
'\\': '',
+ '\0': ''
})
return s.translate(trans).encode('ascii', errors='replace').decode()
@@ -284,7 +285,7 @@ def generate_pdfa_ps(target_filename, pdfmark, icc='sRGB', ascii_docinfo=False):
hex_icc_profile = hexlify(bytes_icc_profile)
icc_profile = '<' + hex_icc_profile.decode('ascii') + '>'
- ps = _get_pdfa_def(icc_profile, icc, pdfmark)
+ ps = _get_pdfa_def(icc_profile, icc, pdfmark, ascii_docinfo=ascii_docinfo)
# We should have encoded everything to pure ASCII by this point, and
# to be safe, only allow ASCII in PostScript
|
Ghostscript: fix issues in strict ASCII implementation
|
py
|
diff --git a/mtcnn/__init__.py b/mtcnn/__init__.py
index <HASH>..<HASH> 100644
--- a/mtcnn/__init__.py
+++ b/mtcnn/__init__.py
@@ -24,4 +24,4 @@
#SOFTWARE.
__author__ = "Iván de Paz Centeno"
-__version__= "0.0.7"
+__version__= "0.0.8"
|
Changed to version <I>
|
py
|
diff --git a/identify/identify.py b/identify/identify.py
index <HASH>..<HASH> 100644
--- a/identify/identify.py
+++ b/identify/identify.py
@@ -24,6 +24,7 @@ ALL_TAGS = {DIRECTORY, SYMLINK, FILE, EXECUTABLE, NON_EXECUTABLE, TEXT, BINARY}
ALL_TAGS.update(*extensions.EXTENSIONS.values())
ALL_TAGS.update(*extensions.NAMES.values())
ALL_TAGS.update(*interpreters.INTERPRETERS.values())
+ALL_TAGS = frozenset(ALL_TAGS)
def tags_from_path(path):
|
Make ALL_TAGS a frozenset
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -21,7 +21,7 @@ requirements = [
"beautifulsoup4>=4.6.3",
"pressurecooker>=0.0.27",
"selenium==3.0.1",
- "youtube-dl",
+ "youtube-dl>=2020.03.24",
"html5lib",
"cachecontrol==0.12.0",
"lockfile==0.12.2",
|
Bump youtube_dl version number; pip was having trouble
|
py
|
diff --git a/openquake/calculators/event_based.py b/openquake/calculators/event_based.py
index <HASH>..<HASH> 100644
--- a/openquake/calculators/event_based.py
+++ b/openquake/calculators/event_based.py
@@ -386,7 +386,7 @@ def get_ruptures_by_grp(dstore):
ruptures_by_grp = AccumDict(accum=[])
for grp in dstore['ruptures']:
grp_id = int(grp[4:]) # strip 'grp-'
- for serial in list(dstore['ruptures/' + grp]):
+ for serial in (dstore['ruptures/' + grp]:
sr = dstore['ruptures/%s/%s' % (grp, serial)]
ruptures_by_grp[grp_id].append(sr)
return ruptures_by_grp
|
Cleanup [skip CI]
|
py
|
diff --git a/winrm/transport.py b/winrm/transport.py
index <HASH>..<HASH> 100644
--- a/winrm/transport.py
+++ b/winrm/transport.py
@@ -146,6 +146,8 @@ class Transport(object):
session = requests.Session()
session.verify = self.server_cert_validation == 'validate'
+ if session.verify and self.ca_trust_path:
+ session.verify = self.ca_trust_path
# configure proxies from HTTP/HTTPS_PROXY envvars
session.trust_env = True
|
pass ca_trust_path to requests module
|
py
|
diff --git a/geocoder/tamu.py b/geocoder/tamu.py
index <HASH>..<HASH> 100644
--- a/geocoder/tamu.py
+++ b/geocoder/tamu.py
@@ -7,6 +7,7 @@ from __future__ import absolute_import
from geocoder.base import Base
from geocoder.keys import tamu_key
+
class Tamu(Base):
"""
TAMU Geocoding Services
@@ -27,7 +28,9 @@ class Tamu(Base):
provider = 'tamu'
method = 'geocode'
- def __init__(self, location, censusYears=('1990','2000','2010'), **kwargs):
+ def __init__(
+ self, location, censusYears=('1990', '2000', '2010'), **kwargs):
+
# city, state, zip
city = kwargs.get('city', '')
state = kwargs.get('state', '')
@@ -41,7 +44,8 @@ class Tamu(Base):
raise ValueError("Provide key")
self.location = location
- self.url = 'https://geoservices.tamu.edu/Services/Geocode/WebService/GeocoderWebServiceHttpNonParsed_V04_01.aspx'
+ self.url = 'https://geoservices.tamu.edu/Services/Geocode/WebService/'\
+ 'GeocoderWebServiceHttpNonParsed_V04_01.aspx'
self.params = {
'streetAddress': location,
'city': city,
|
make pep8 happy @DenisCarrier not sure if you prefer to break long url or use #noqa?
|
py
|
diff --git a/tests/test_cmd2.py b/tests/test_cmd2.py
index <HASH>..<HASH> 100644
--- a/tests/test_cmd2.py
+++ b/tests/test_cmd2.py
@@ -1288,6 +1288,20 @@ def test_multiline_complete_statement_with_unclosed_quotes(multiline_app):
assert statement.multiline_command == 'orate'
assert statement.terminator == ';'
+def test_multiline_input_line_to_statement(multiline_app):
+ # Verify _input_line_to_statement saves the fully entered input line for multiline commands
+
+ # Mock out the input call so we don't actually wait for a user's response
+ # on stdin when it looks for more input
+ m = mock.MagicMock(name='input', side_effect=['person', '\n'])
+ builtins.input = m
+
+ line = 'orate hi'
+ statement = multiline_app._input_line_to_statement(line)
+ assert statement.raw == 'orate hi\nperson\n'
+ assert statement == 'hi person'
+ assert statement.command == 'orate'
+ assert statement.multiline_command == 'orate'
def test_clipboard_failure(base_app, capsys):
# Force cmd2 clipboard to be disabled
|
Added unit test for _input_line_to_statement
|
py
|
diff --git a/tests/integration/standard/test_connection.py b/tests/integration/standard/test_connection.py
index <HASH>..<HASH> 100644
--- a/tests/integration/standard/test_connection.py
+++ b/tests/integration/standard/test_connection.py
@@ -143,7 +143,7 @@ class HeartbeatTest(unittest.TestCase):
for conn in holders:
if host == str(getattr(conn, 'host', '')):
if isinstance(conn, HostConnectionPool):
- if conn._connections is not None:
+ if conn._connections is not None and len(conn._connections) > 0:
connections.append(conn._connections)
else:
if conn._connection is not None:
@@ -162,7 +162,7 @@ class HeartbeatTest(unittest.TestCase):
def wait_for_no_connections(self, host, cluster):
retry = 0
- while(retry < 200):
+ while(retry < 100):
retry += 1
connections = self.fetch_connections(host, cluster)
if len(connections) is 0:
|
Tweaking heartbeat timeout test to not fail with protocol v2
|
py
|
diff --git a/salt/cloud/utils/__init__.py b/salt/cloud/utils/__init__.py
index <HASH>..<HASH> 100644
--- a/salt/cloud/utils/__init__.py
+++ b/salt/cloud/utils/__init__.py
@@ -550,13 +550,19 @@ def deploy_script(host, port=22, timeout=900, username='root',
root_cmd(subsys_command, tty, sudo, **kwargs)
root_cmd('service sshd restart', tty, sudo, **kwargs)
- #root_cmd('mkdir -p {0}'.format(tmp_dir), tty, sudo, **kwargs)
- #root_cmd('chmod 700 {0}'.format(tmp_dir), tty, sudo, **kwargs)
root_cmd(
'[ ! -d {0} ] && (mkdir -p {0}; chown 700 {0}) || '
'echo "Directory {0!r} already exists..."'.format(tmp_dir),
tty, sudo, **kwargs
)
+ if sudo:
+ comps = tmp_dir.lstrip('/').rstrip('/').split('/')
+ if len(comps) > 0:
+ if len(comps) > 1 or comps[0] == 'tmp':
+ root_cmd(
+ 'chown {0}. {1}'.format(username, tmp_dir),
+ tty, sudo, **kwargs
+ )
# Minion configuration
if minion_pem:
|
Make sure that tmp_dir is owned by sudo user
|
py
|
diff --git a/marathon_acme/cli.py b/marathon_acme/cli.py
index <HASH>..<HASH> 100644
--- a/marathon_acme/cli.py
+++ b/marathon_acme/cli.py
@@ -102,7 +102,7 @@ def main(reactor, raw_args=sys.argv[1:]):
('group', args.group),
('endpoint-description', endpoint_description),
]
- log_args = ["{}='{}'".format(k, v) for k, v in log_args]
+ log_args = ['{}={!r}'.format(k, v) for k, v in log_args]
log.info('Running marathon-acme with: ' + ', '.join(log_args))
return marathon_acme.run(endpoint_description)
|
Tweak startup log format to use repr
|
py
|
diff --git a/horizon/utils/secret_key.py b/horizon/utils/secret_key.py
index <HASH>..<HASH> 100644
--- a/horizon/utils/secret_key.py
+++ b/horizon/utils/secret_key.py
@@ -13,6 +13,7 @@
# under the License.
+import logging
import os
import random
import string
@@ -32,8 +33,12 @@ def generate_key(key_length=64):
see http://docs.python.org/library/random.html#random.SystemRandom.
"""
if hasattr(random, 'SystemRandom'):
+ logging.info('Generating a secure random key using SystemRandom.')
choice = random.SystemRandom().choice
else:
+ msg = "WARNING: SystemRandom not present. Generating a random "\
+ "key using random.choice (NOT CRYPTOGRAPHICALLY SECURE)."
+ logging.warning(msg)
choice = random.choice
return ''.join(map(lambda x: choice(string.digits + string.ascii_letters),
range(key_length)))
|
Add warning when falling back to insecure key generation When secret_key.py generates the key, it silently regresses when SystemRandom isn't present. We need the reversion for non-production environments, but we need to warn in environments when SystemRandom isn't being used. See the bug report for more details. Change-Id: Ibed0a<I>d<I>db9bdfa1c9a<I>eb<I>e7 Closes-Bug: <I>
|
py
|
diff --git a/spyder/workers/updates.py b/spyder/workers/updates.py
index <HASH>..<HASH> 100644
--- a/spyder/workers/updates.py
+++ b/spyder/workers/updates.py
@@ -56,9 +56,12 @@ class WorkerUpdates(QObject):
if 'dev' in version:
return (False, latest_release)
+ # Filter releases
if is_stable_version(version):
- # Remove non stable versions from the list
releases = [r for r in releases if is_stable_version(r)]
+ else:
+ releases = [r for r in releases
+ if not is_stable_version(r) or r in version]
if github:
latest_release = releases[0]
@@ -107,7 +110,7 @@ class WorkerUpdates(QObject):
if is_anaconda():
releases = []
for item in data['packages']:
- if 'spyder' in item and 'spyder-kernels' not in item:
+ if 'spyder' in item and 'spyder-' not in item:
releases.append(item.split('-')[1])
result = self.check_update_available(self.version,
releases)
|
Updates: Try to work with updates for beta versions
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,7 @@ if sys.version_info[:2] < (2, 7):
setup(
name='django-local-settings',
- version='1.0a9',
+ version='1.0a10',
author='Wyatt Baldwin',
author_email='[email protected]',
url='https://github.com/PSU-OIT-ARC/django-local-settings',
|
Prepare release <I>a<I>
|
py
|
diff --git a/tests/schemas.py b/tests/schemas.py
index <HASH>..<HASH> 100644
--- a/tests/schemas.py
+++ b/tests/schemas.py
@@ -2,8 +2,9 @@ from marshmallow import Schema, fields
class PetSchema(Schema):
- id = fields.Int(dump_only=True)
- name = fields.Str()
+ description = dict(id="Pet id", name="Pet name")
+ id = fields.Int(dump_only=True, description=description['id'])
+ name = fields.Str(description=description['name'], required=True, deprecated=False, allowEmptyValue=False)
class SampleSchema(Schema):
|
test(schemas): add additional fields in PetSchema for test Deprecated and allowEmptyValue keyword is not supported yet. And the better way maybe is to provide different function of schema2fiels for different version, respectively.
|
py
|
diff --git a/nes_py/_app/play_human.py b/nes_py/_app/play_human.py
index <HASH>..<HASH> 100644
--- a/nes_py/_app/play_human.py
+++ b/nes_py/_app/play_human.py
@@ -78,7 +78,9 @@ def play(env, transpose=True, fps=30, callback=None, plot_reward=False, nop_=0):
running = True
env_done = True
# setup the screen using pygame
- screen = pygame.display.set_mode(video_size)
+ flags = pygame.RESIZABLE | pygame.HWSURFACE | pygame.DOUBLEBUF
+ screen = pygame.display.set_mode(video_size, flags)
+ pygame.event.set_blocked(pygame.MOUSEMOTION)
# set the caption for the pygame window. if the env has a spec use its id
if env.spec is not None:
pygame.display.set_caption(env.spec.id)
|
allow resizable play window
|
py
|
diff --git a/salt/cloud/clouds/vsphere.py b/salt/cloud/clouds/vsphere.py
index <HASH>..<HASH> 100644
--- a/salt/cloud/clouds/vsphere.py
+++ b/salt/cloud/clouds/vsphere.py
@@ -367,6 +367,9 @@ def _deploy(vm_):
),
'sudo_password': config.get_cloud_config_value(
'sudo_password', vm_, __opts__, default=None
+ ),
+ 'key_filename': config.get_cloud_config_value(
+ 'key_filename', vm_, __opts__, default=None
)
}
|
adding key_filename param to vsphere provider
|
py
|
diff --git a/networking_arista/ml2/mechanism_arista.py b/networking_arista/ml2/mechanism_arista.py
index <HASH>..<HASH> 100644
--- a/networking_arista/ml2/mechanism_arista.py
+++ b/networking_arista/ml2/mechanism_arista.py
@@ -396,7 +396,10 @@ class AristaDriver(driver_api.MechanismDriver):
If this port was the last port using a segment and the segment was
allocated by this driver, it should be released
"""
- binding_levels = context.binding_levels
+ if migration:
+ binding_levels = context.original_binding_levels
+ else:
+ binding_levels = context.binding_levels
LOG.debug("_try_release_dynamic_segment: "
"binding_levels=%(bl)s", {'bl': binding_levels})
if not binding_levels:
|
Use original_binding_levels when current binding_levels is not set The binding_levels may be cleared when update_port is called (e.g. in case of deleting a baremetal instance). In such a case, original_binding_levels can be used to get required binding levels. Change-Id: Ibab2c9d<I>c<I>ce0b9d<I>fdafaf0aadf8c9
|
py
|
diff --git a/route.py b/route.py
index <HASH>..<HASH> 100644
--- a/route.py
+++ b/route.py
@@ -107,9 +107,10 @@ def validate(command, func):
def unpack(prefix, command, params, message):
try:
route = get_route(command)
+ return route.command.upper(), route.unpack(prefix, params, message)
except ValueError:
- logger.debug("---UNPACK--- {} {} {} {}".format(prefix, command, params, message))
- return route.command.upper(), route.unpack(prefix, params, message)
+ logger.info("---UNPACK--- {} {} {} {}".format(prefix, command, params, message))
+ return command.upper(), {}
def register(route):
|
Temporarily suppress unpack errors in route
|
py
|
diff --git a/photutils/segmentation/tests/test_deblend.py b/photutils/segmentation/tests/test_deblend.py
index <HASH>..<HASH> 100644
--- a/photutils/segmentation/tests/test_deblend.py
+++ b/photutils/segmentation/tests/test_deblend.py
@@ -206,3 +206,9 @@ class TestDeblendSources:
with catch_warnings(NoDetectionsWarning) as warning_lines:
deblend_sources(data, self.segm, self.npixels)
assert len(warning_lines) == 0
+
+ def test_nonconsecutive_labels(self):
+ segm = self.segm.copy()
+ segm.reassign_label(1, 1000)
+ result = deblend_sources(self.data, segm, self.npixels)
+ assert result.nlabels == 2
|
Add test for deblend_sources
|
py
|
diff --git a/juju/model.py b/juju/model.py
index <HASH>..<HASH> 100644
--- a/juju/model.py
+++ b/juju/model.py
@@ -1521,7 +1521,7 @@ class Model:
if raw:
return result_status
-
+
result_str = self._print_status_model(result_status)
result_str += '\n'
result_str += self._print_status_apps(result_status)
@@ -1560,7 +1560,7 @@ class Model:
apps = result_status.applications
if apps is None or len(apps) == 0:
return ''
-
+
limits = '{:<25} {:<10} {:<10} {:<5} {:<20} {:<8}'
# print header
result_str = limits.format(
@@ -1597,12 +1597,12 @@ class Model:
addr = unit.public_address
if addr is None:
addr = ''
-
+
if unit.opened_ports is None:
opened_ports = ''
else:
opened_ports = ','.join(unit.opened_ports)
-
+
info = unit.workload_status.info
if info is None:
info = ''
|
Changes for lint.
|
py
|
diff --git a/librosa/core/constantq.py b/librosa/core/constantq.py
index <HASH>..<HASH> 100644
--- a/librosa/core/constantq.py
+++ b/librosa/core/constantq.py
@@ -124,10 +124,10 @@ def cqt(y, sr=22050, hop_length=512, fmin=None, n_bins=84,
tuning = estimate_tuning(y=y, sr=sr)
# First thing, get the fmin of the top octave
- freqs = time_frequency.cqt_frequencies(n_bins + 1, fmin,
+ freqs = time_frequency.cqt_frequencies(n_bins, fmin,
bins_per_octave=bins_per_octave)
- fmin_top = freqs[-bins_per_octave-1]
+ fmin_top = freqs[-bins_per_octave]
# Generate the basis filters
basis, lengths = filters.constant_q(sr,
|
no reason to generate an extra cqt frequency
|
py
|
diff --git a/synapse/tests/test_cryotank.py b/synapse/tests/test_cryotank.py
index <HASH>..<HASH> 100644
--- a/synapse/tests/test_cryotank.py
+++ b/synapse/tests/test_cryotank.py
@@ -230,3 +230,12 @@ class CryoTest(SynTest):
# ensure dmon cell processes are fini'd
for celldir, proc in dmon.cellprocs.items():
self.false(proc.is_alive())
+
+ def test_cryo_cryouser_timeout(self):
+ with self.getTestDir() as dirn:
+ conf = {'host': '127.0.0.1'}
+ with s_cryotank.CryoCell(dirn, conf) as cell:
+ port = cell.getCellPort()
+ auth = cell.genUserAuth('[email protected]')
+ addr = ('127.0.0.1', port)
+ self.raises(CellUserErr, s_cryotank.CryoUser, auth, addr, timeout=-1)
|
Adds test for timing out while starting a CryoUser session
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -14,7 +14,6 @@ setup(name='mmpy',
package_dir={'mmpy':'src'},
provides=['mmpy'],
license='ISCL',
- requires=['simplejson'],
install_requires=['simplejson'],
classifiers=['Development Status :: 3 - Alpha',
'License :: OSI Approved :: ISC License (ISCL)',
|
remove the requires= keyword per @mattjeffery's suggestion
|
py
|
diff --git a/actstream/models.py b/actstream/models.py
index <HASH>..<HASH> 100644
--- a/actstream/models.py
+++ b/actstream/models.py
@@ -179,9 +179,3 @@ model_stream = Action.objects.model_actions
any_stream = Action.objects.any
followers = Follow.objects.followers
following = Follow.objects.following
-
-
-if django.VERSION[:2] < (1, 7):
- from actstream.apps import ActstreamConfig
-
- ActstreamConfig().ready()
|
remove less then <I> monkey path from models
|
py
|
diff --git a/see.py b/see.py
index <HASH>..<HASH> 100644
--- a/see.py
+++ b/see.py
@@ -68,8 +68,8 @@ def see(obj):
('__pow__', '**'),
('__ipow__', '**='),
('__lshift__', '<<'),
- ('__rshift__', '>>'),
('__ilshift__', '<<='),
+ ('__rshift__', '>>'),
('__irshift__', '>>='),
('__and__', '&'),
('__iand__', '&='),
|
swapped positions of <<= and >>
|
py
|
diff --git a/mongu.py b/mongu.py
index <HASH>..<HASH> 100644
--- a/mongu.py
+++ b/mongu.py
@@ -72,8 +72,9 @@ class Model(ObjectDict):
def by_id(cls, oid):
"""Find a model object by its ``ObjectId``,
``oid`` can be string or ObjectId"""
- if oid:
- return cls.from_dict(cls.collection.find_one(ObjectId(oid)))
+ d = cls.collection.find_one(ObjectId(oid))
+ if d:
+ return cls.from_dict(d)
@classmethod
def from_dict(cls, d):
|
fix `by_id` can not return None bug
|
py
|
diff --git a/test/test_search_server_coarse.py b/test/test_search_server_coarse.py
index <HASH>..<HASH> 100644
--- a/test/test_search_server_coarse.py
+++ b/test/test_search_server_coarse.py
@@ -54,7 +54,6 @@ class SearchServerTestCaseCoarse(unittest.TestCase):
def test_coarse_6(self):
results_6 = self.helper_test_coarse(6)
self.assertEquals(len(results_6), 1)
- self.assertEquals(len(results_6[0]["result"]["hits"]["hits"]), 1)
self.assertEquals(results_6[0]["result"]["aggregations"]["?ethnicity"]["buckets"][0]["doc_count"], 1)
if __name__ == '__main__':
|
Fix bad test that checks for hits in agg query
|
py
|
diff --git a/shinken/webui/plugins/depgraph/depgraph.py b/shinken/webui/plugins/depgraph/depgraph.py
index <HASH>..<HASH> 100644
--- a/shinken/webui/plugins/depgraph/depgraph.py
+++ b/shinken/webui/plugins/depgraph/depgraph.py
@@ -79,7 +79,12 @@ def get_depgraph_widget():
search = app.request.GET.get('search', '').strip()
if not search:
- search = 'localhost'
+ # Ok look for the first host we can find
+ hosts = app.datamgr.get_hosts()
+ for h in hosts:
+ search = h.get_name()
+ break
+
elts = search.split('/', 1)
if len(elts) == 1:
|
Fix : webui widget depgraph was buggy when there was no localhost object, so it took first found host.
|
py
|
diff --git a/hwt/interfaces/agents/handshaked.py b/hwt/interfaces/agents/handshaked.py
index <HASH>..<HASH> 100644
--- a/hwt/interfaces/agents/handshaked.py
+++ b/hwt/interfaces/agents/handshaked.py
@@ -92,4 +92,4 @@ class HandshakeSyncAgent(HandshakedAgent):
pass
def doRead(self, s):
- raise NotImplementedError()
+ return s.now
|
read time in monitor of HandshakeSyncAgent
|
py
|
diff --git a/vtki/plotting.py b/vtki/plotting.py
index <HASH>..<HASH> 100644
--- a/vtki/plotting.py
+++ b/vtki/plotting.py
@@ -54,6 +54,7 @@ rcParams = {
'position_y' : 0.02,
},
'show_edges' : False,
+ 'lighting': True,
}
def set_plot_theme(theme):
@@ -438,7 +439,7 @@ class BasePlotter(object):
def add_mesh(self, mesh, color=None, style=None,
scalars=None, rng=None, stitle=None, show_edges=None,
point_size=5.0, opacity=1, line_width=None, flip_scalars=False,
- lighting=True, n_colors=256, interpolate_before_map=False,
+ lighting=None, n_colors=256, interpolate_before_map=False,
cmap=None, label=None, reset_camera=None, scalar_bar_args=None,
multi_colors=False, name=None, texture=None,
render_points_as_spheres=False, render_lines_as_tubes=False,
@@ -553,6 +554,9 @@ class BasePlotter(object):
if show_edges is None:
show_edges = rcParams['show_edges']
+ if lighting is None:
+ lighting = rcParams['lighting']
+
if name is None:
name = '{}({})'.format(type(mesh).__name__, str(hex(id(mesh))))
|
Add rcParam for lighting
|
py
|
diff --git a/JSAnimation/examples.py b/JSAnimation/examples.py
index <HASH>..<HASH> 100644
--- a/JSAnimation/examples.py
+++ b/JSAnimation/examples.py
@@ -30,8 +30,9 @@ def lorenz_animation(N_trajectories=20, rseed=1, frames=200, interval=30):
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.colors import cnames
- def lorentz_deriv((x, y, z), t0, sigma=10., beta=8./3, rho=28.0):
+ def lorentz_deriv(coords, t0, sigma=10., beta=8./3, rho=28.0):
"""Compute the time-derivative of a Lorentz system."""
+ x, y, z = coords
return [sigma * (y - x), x * (rho - z) - y, x * y - beta * z]
# Choose random starting points, uniformly distributed from -15 to 15
|
[py3k] removed use of tuple unpacking (see PEP <I>)
|
py
|
diff --git a/openquake/qa_tests_data/event_based/case_8/__init__.py b/openquake/qa_tests_data/event_based/case_8/__init__.py
index <HASH>..<HASH> 100644
--- a/openquake/qa_tests_data/event_based/case_8/__init__.py
+++ b/openquake/qa_tests_data/event_based/case_8/__init__.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
-# Copyright (C) 2014-2016 GEM Foundation
+# Copyright (C) 2014-2017 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
@@ -15,4 +15,3 @@
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
-
|
Fixed copyright [skip CI]
|
py
|
diff --git a/salt/modules/kubernetes.py b/salt/modules/kubernetes.py
index <HASH>..<HASH> 100644
--- a/salt/modules/kubernetes.py
+++ b/salt/modules/kubernetes.py
@@ -49,6 +49,7 @@ from salt.exceptions import CommandExecutionError
from salt.ext.six import iteritems
import salt.utils
import salt.utils.templates
+from salt.ext.six.moves import range # pylint: disable=import-error
try:
import kubernetes # pylint: disable=import-self
@@ -719,9 +720,9 @@ def delete_deployment(name, namespace='default', **kwargs):
if not platform.startswith("win"):
try:
with _time_limit(POLLING_TIME_LIMIT):
- while show_deployment(name, namespace) is not None: # pylint: disable=useless-else-on-loop
+ while show_deployment(name, namespace) is not None:
sleep(1)
- else:
+ else: # pylint: disable=useless-else-on-loop
mutable_api_response['code'] = 200
except TimeoutException:
pass
|
Added pylint-disable statements and import for salt.ext.six.moves.range
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@ from setuptools import setup
setup(
name='sphinx-git',
description='git Changelog for Sphinx',
- version='5',
+ version='6',
author='Daniel Watkins',
author_email='[email protected]',
install_requires=['sphinx', 'GitPython'],
|
Bump to v6 (for development).
|
py
|
diff --git a/astrobase/varclass/fakelcrecovery.py b/astrobase/varclass/fakelcrecovery.py
index <HASH>..<HASH> 100644
--- a/astrobase/varclass/fakelcrecovery.py
+++ b/astrobase/varclass/fakelcrecovery.py
@@ -986,9 +986,11 @@ def get_recovered_variables_for_magbin(simbasedir,
# this is the index of the matching magnitude bin for the magbinmedian
# provided
magbinind = np.where(
- varthresh[magcol]['binned_sdssr_median'] == magbinmedian
+ np.array(varthresh[magcol]['binned_sdssr_median']) == magbinmedian
)
+ magbinind = np.asscalar(magbinind[0])
+
# get the objectids, actual vars and actual notvars in this magbin
thisbin_objectids = binned_objectids[magbinind]
thisbin_actualvars = binned_actualvars[magbinind]
|
fakelcrecovery: working on per magbin recovery stats
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -18,7 +18,7 @@ setup(
url = "https://github.com/Baguage/django-auth-pubtkt",
packages=['django_auth_pubtkt', 'tests'],
long_description=read('README'),
- install_requires=[" M2Crypto"],
+ install_requires=[" M2Crypto", "Django >= 1.3",],
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
|
Added Django as a dependency
|
py
|
diff --git a/openquake/hazardlib/__init__.py b/openquake/hazardlib/__init__.py
index <HASH>..<HASH> 100644
--- a/openquake/hazardlib/__init__.py
+++ b/openquake/hazardlib/__init__.py
@@ -26,5 +26,5 @@ from openquake.hazardlib import (
tom, near_fault)
# the version is managed by packager.sh with a sed
-__version__ = '0.19.0'
+__version__ = '0.20.0'
__version__ += git_suffix(__file__)
|
update development version to <I>
|
py
|
diff --git a/pymola/backends/casadi/api.py b/pymola/backends/casadi/api.py
index <HASH>..<HASH> 100644
--- a/pymola/backends/casadi/api.py
+++ b/pymola/backends/casadi/api.py
@@ -125,16 +125,11 @@ def _save_model(model_folder: str, model_name: str, model: Model):
library_name = '{}_{}'.format(model_name, o)
cg = ca.CodeGenerator(library_name)
-
- # Create temporary variables to keep the functions in scope
- # cfr casadi bug #2059
- fr = f.reverse(1)
- ff = f.forward(1)
- frf = fr.forward(1)
- cg.add(f)
- cg.add(ff)
- cg.add(fr)
- cg.add(frf)
+ cg.add(f) # Nondifferentiated function
+ cg.add(f.forward(1)) # Jacobian-times-vector product
+ cg.add(f.reverse(1)) # vector-times-Jacobian product
+ cg.add(f.reverse(1).forward(1)) # Hessian-times-vector product
+ cg.add(f.jacobian()) # Jacobian
cg.generate(model_folder + '/')
compiler = distutils.ccompiler.new_compiler()
|
Included Jacobian information in generated code Note that this commit requires CasADi at revision <I>fd4c or newer (Sep <I>, <I>)
|
py
|
diff --git a/nationstates/NSback/__init__.py b/nationstates/NSback/__init__.py
index <HASH>..<HASH> 100644
--- a/nationstates/NSback/__init__.py
+++ b/nationstates/NSback/__init__.py
@@ -1,7 +1,7 @@
import requests
-try:
+if __name__ == "__main__":
import bs4parser
-except:
+else:
from . import bs4parser
|
Removed Try/except from nationstates/NSback/__init__.py
|
py
|
diff --git a/bypy.py b/bypy.py
index <HASH>..<HASH> 100755
--- a/bypy.py
+++ b/bypy.py
@@ -1566,11 +1566,14 @@ class ByPy(object):
def __get_json(self, r, defaultec = ERequestFailed):
try:
j = r.json()
+ self.pd("Website returned JSON: {}".foramt(j))
if 'error_code' in j:
return j['error_code']
else:
return defaultec
except ValueError:
+ if hasattr(r, 'text'):
+ self.pd("Website Response: {}".format(rb(r.text)))
return defaultec
def __request_work_die(self, ex, url, pars, r, act):
|
Trivial: Add in more printing for JSON response
|
py
|
diff --git a/ishell/console.py b/ishell/console.py
index <HASH>..<HASH> 100644
--- a/ishell/console.py
+++ b/ishell/console.py
@@ -100,7 +100,8 @@ class Console:
break
else:
self.walk_and_run(input_)
- except KeyboardInterrupt:
+ except (KeyboardInterrupt, EOFError):
+ print "exit"
break
except Exception:
|
Fixes #9, print 'exit' and exit cleanly on KeyboardInterrupt or EOFError
|
py
|
diff --git a/bika/lims/content/sample.py b/bika/lims/content/sample.py
index <HASH>..<HASH> 100644
--- a/bika/lims/content/sample.py
+++ b/bika/lims/content/sample.py
@@ -333,7 +333,7 @@ class Sample(BaseFolder, HistoryAwareMixin):
ar_ids = [AR.id for AR in ARs if AR.id.startswith(prefix)]
ar_ids.sort()
try:
- last_ar_number = int(ar_ids[-1].split("-")[-1])
+ last_ar_number = int(ar_ids[-1].split("-R")[-1])
except:
return 0
return last_ar_number
|
Secondary AR ID generation handles new AR ids
|
py
|
diff --git a/chess/gaviota.py b/chess/gaviota.py
index <HASH>..<HASH> 100644
--- a/chess/gaviota.py
+++ b/chess/gaviota.py
@@ -131,7 +131,7 @@ class NativeTablebases(object):
pliestomate = ctypes.c_uint()
if not wdl_only:
ret = self.libgtb.tb_probe_hard(stm, ep_square, castling, c_ws, c_bs, c_wp, c_bp, ctypes.byref(info), ctypes.byref(pliestomate))
- dtm = pliestomate.value
+ dtm = int(pliestomate.value)
else:
ret = self.libgtb.tb_probe_WDL_hard(stm, ep_square, castling, c_ws, c_bs, c_wp, c_bp, ctypes.byref(info))
dtm = 1
|
Python 2: Never return longs as DTMs
|
py
|
diff --git a/api/models.py b/api/models.py
index <HASH>..<HASH> 100644
--- a/api/models.py
+++ b/api/models.py
@@ -188,10 +188,9 @@ class Formation(UuidAuditedModel):
return
def destroy(self, *args, **kwargs):
- app_tasks = [tasks.destroy_app.si(a) for a in self.app_set.all()]
node_tasks = [tasks.destroy_node.si(n) for n in self.node_set.all()]
layer_tasks = [tasks.destroy_layer.si(l) for l in self.layer_set.all()]
- group(app_tasks + node_tasks).apply_async().join()
+ group(node_tasks).apply_async().join()
group(layer_tasks).apply_async().join()
CM.purge_formation(self.flat())
self.delete()
|
Removed ref to obsolete app_tasks in formation.destroy.
|
py
|
diff --git a/openfisca_web_api/controllers.py b/openfisca_web_api/controllers.py
index <HASH>..<HASH> 100644
--- a/openfisca_web_api/controllers.py
+++ b/openfisca_web_api/controllers.py
@@ -26,9 +26,12 @@
"""Root controllers"""
+from __future__ import division
+
import collections
import copy
import datetime
+import multiprocessing
import os
import xml.etree
@@ -38,6 +41,7 @@ from openfisca_core import decompositions, decompositionsxml, legislations, simu
from . import conf, contexts, conv, urls, wsgihelpers
+cpu_count = multiprocessing.cpu_count()
N_ = lambda message: message
router = None
@@ -268,7 +272,7 @@ def api1_simulate(req):
# When load average is not available, always accept request.
pass
else:
- if load_average[0] > 0.75:
+ if load_average[0] / cpu_count > 0.75:
return wsgihelpers.respond_json(ctx,
collections.OrderedDict(sorted(dict(
apiVersion = '1.0',
|
Use CPU count to measure serveur load.
|
py
|
diff --git a/src/hupper/reloader.py b/src/hupper/reloader.py
index <HASH>..<HASH> 100644
--- a/src/hupper/reloader.py
+++ b/src/hupper/reloader.py
@@ -167,6 +167,11 @@ class Reloader(object):
pass
else:
self.monitor.add_path(path)
+ except KeyboardInterrupt:
+ if self.worker.is_alive():
+ self.out('Waiting for server to exit ...')
+ time.sleep(self.reload_interval)
+ raise
finally:
if self.worker.is_alive():
self.out('Killing server with PID %s.' % self.worker.pid)
|
add a delay before killing the server due to a SIGINT fixes #<I>
|
py
|
diff --git a/split_folders/split.py b/split_folders/split.py
index <HASH>..<HASH> 100644
--- a/split_folders/split.py
+++ b/split_folders/split.py
@@ -130,7 +130,7 @@ def split_files(files, split_train, split_val, use_test):
"""Splits the files along the provided indices
"""
files_train = files[:split_train]
- files_val = files[split_train:split_val]
+ files_val = files[split_train:split_val] if use_test else files[split_train:]
li = [(files_train, 'train'), (files_val, 'val')]
|
fix for 1 file going missing everytime ;)
|
py
|
diff --git a/tests/float/cmath_fun_special.py b/tests/float/cmath_fun_special.py
index <HASH>..<HASH> 100644
--- a/tests/float/cmath_fun_special.py
+++ b/tests/float/cmath_fun_special.py
@@ -29,4 +29,4 @@ for f_name, f, test_vals in functions:
print(f_name)
for val in test_vals:
ret = f(val)
- print("complex(%.5g, %.5g)" % (ret.real, ret.imag))
+ print("complex(%.4g, %.4g)" % (ret.real, ret.imag))
|
tests/float: Fix cmath_fun_special for MICROPY_FLOAT_IMPL_FLOAT. When the unix and windows ports use MICROPY_FLOAT_IMPL_FLOAT instead of MICROPY_FLOAT_IMPL_DOUBLE, the test output has for example complex(-<I>, <I>) instead of the expected complex(-<I>, <I>). Use one decimal place less for the output printing to fix this.
|
py
|
diff --git a/test/test_http_client.py b/test/test_http_client.py
index <HASH>..<HASH> 100644
--- a/test/test_http_client.py
+++ b/test/test_http_client.py
@@ -47,6 +47,10 @@ class Test_Alignak_Http_Client(unittest.TestCase):
self.__thread.join(15)
if self.__thread.isAlive():
print("warn: http thread still alive", file=sys.stderr)
+ try:
+ self.__thread._Thread__stop()
+ except Exception:
+ pass
self.__thread = None
self.__server = None
|
Enh: Test - Try to shutdown thread
|
py
|
diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py
index <HASH>..<HASH> 100644
--- a/satpy/readers/yaml_reader.py
+++ b/satpy/readers/yaml_reader.py
@@ -171,15 +171,15 @@ class AbstractYAMLReader(six.with_metaclass(ABCMeta, object)):
def select_files_from_pathnames(self, filenames):
"""Select the files from *filenames* this reader can handle."""
- filenames = []
+ selected_filenames = []
for pattern in self.file_patterns:
matching = match_filenames(filenames, pattern)
- filenames.extend(matching)
- if len(filenames) == 0:
+ selected_filenames.extend(matching)
+ if len(selected_filenames) == 0:
logger.warning("No filenames found for reader: %s", self.name)
- return filenames
+ return selected_filenames
def get_dataset_key(self,
key,
|
Fix bug in FileYAMLReader when filenames are provided
|
py
|
diff --git a/salt/engines/stalekey.py b/salt/engines/stalekey.py
index <HASH>..<HASH> 100644
--- a/salt/engines/stalekey.py
+++ b/salt/engines/stalekey.py
@@ -40,6 +40,7 @@ log = logging.getLogger(__name__)
def __virtual__():
if not __opts__.get('minion_data_cache'):
return (False, 'stalekey engine requires minion_data_cache to be enabled')
+ return True
def _get_keys():
|
Stop returning `None` Prevents: ``` <I>:<I>:<I>,<I> [salt.loader :<I>][WARNING ] salt.loaded.int.engines.stalekey.__virtual__() is wrongly returning `None`. It should either return `True`, `False` or a new name. If you're the developer of the module 'stalekey', please fix this. ```
|
py
|
diff --git a/telethon/client/auth.py b/telethon/client/auth.py
index <HASH>..<HASH> 100644
--- a/telethon/client/auth.py
+++ b/telethon/client/auth.py
@@ -155,7 +155,7 @@ class AuthMethods:
'not login to the bot account using the provided '
'bot_token (it may not be using the user you expect)'
)
- elif not callable(phone) and phone != me.phone:
+ elif phone and not callable(phone) and utils.parse_phone(phone) != me.phone:
warnings.warn(
'the session already had an authorized user so it did '
'not login to the user account using the provided '
|
Fix warning when using formatted phones in start (#<I>)
|
py
|
diff --git a/netmiko/ssh_dispatcher.py b/netmiko/ssh_dispatcher.py
index <HASH>..<HASH> 100644
--- a/netmiko/ssh_dispatcher.py
+++ b/netmiko/ssh_dispatcher.py
@@ -110,12 +110,12 @@ CLASS_MAPPER_BASE = {
}
FILE_TRANSFER_MAP = {
- 'arista_eos': AristaFileTransfer,
+# 'arista_eos': AristaFileTransfer,
'cisco_asa': CiscoAsaFileTransfer,
'cisco_ios': CiscoIosFileTransfer,
'cisco_xe': CiscoIosFileTransfer,
'cisco_nxos': CiscoNxosFileTransfer,
- 'juniper_junos': JuniperFileTransfer,
+# 'juniper_junos': JuniperFileTransfer,
}
# Also support keys that end in _ssh
|
Arista and Juniper SCP is not fully implemented
|
py
|
diff --git a/src/techbubbleiotjumpwaymqtt/application.py b/src/techbubbleiotjumpwaymqtt/application.py
index <HASH>..<HASH> 100644
--- a/src/techbubbleiotjumpwaymqtt/application.py
+++ b/src/techbubbleiotjumpwaymqtt/application.py
@@ -149,7 +149,7 @@ class JumpWayPythonMQTTApplicationConnection():
return False
else:
deviceWarningTopic = '%s/Devices/%s/%s/Warnings' % (self._configs['locationID'], zoneID, deviceID)
- self.mqttClient.subscribe(deviceDataTopic, qos=qos)
+ self.mqttClient.subscribe(deviceWarningTopic, qos=qos)
print("Subscribed to Device Warnings " + deviceWarningTopic)
return True
|
Update application.py Line <I>: changed var name from "deviceDataTopic" to "deviceWarningTopic"
|
py
|
diff --git a/tweepy/streaming.py b/tweepy/streaming.py
index <HASH>..<HASH> 100644
--- a/tweepy/streaming.py
+++ b/tweepy/streaming.py
@@ -153,8 +153,10 @@ class Stream(object):
delimited_string = c
# read rest of delimiter length..
+ d = ''
while d != '\n' and self.running and not resp.isclosed():
- delimited_string += resp.read(1)
+ d = resp.read(1)
+ delimited_string += d
# read the next twitter status object
if delimited_string.isdigit():
|
Update tweepy/streaming.py
|
py
|
diff --git a/pykeepass/entry.py b/pykeepass/entry.py
index <HASH>..<HASH> 100644
--- a/pykeepass/entry.py
+++ b/pykeepass/entry.py
@@ -34,17 +34,17 @@ class Entry(BaseElement):
password = xmlfactory.create_password_element(password)
times = xmlfactory.create_times_element(expires, expiration)
if url:
- url = xmlfactory.create_url_element(url)
- element.append(url)
+ url_el = xmlfactory.create_url_element(url)
+ element.append(url_el)
if notes:
- notes = xmlfactory.create_notes_element(notes)
- element.append(notes)
+ notes_el = xmlfactory.create_notes_element(notes)
+ element.append(notes_el)
if tags:
- tags = xmlfactory.create_tags_element(tags)
- element.append(tags)
+ tags_el = xmlfactory.create_tags_element(tags)
+ element.append(tags_el)
if icon:
- icon = xmlfactory.create_icon_element(icon)
- element.append(icon)
+ icon_el = xmlfactory.create_icon_element(icon)
+ element.append(icon_el)
element.append(title)
element.append(uuid)
element.append(username)
|
Don't override arg vars
|
py
|
diff --git a/tests/test.py b/tests/test.py
index <HASH>..<HASH> 100755
--- a/tests/test.py
+++ b/tests/test.py
@@ -12,13 +12,7 @@ sys.path.insert(0, pkg_root)
import dss # noqa
-class TestRequest:
- def call(self, method, path, json={}, headers={}, **kwargs):
- headers = [(k, v) for k, v in headers.items()]
- return self.app.open(path, method=method, headers=headers, data=json.dumps(json),
- content_type="application/json", **kwargs)
-
-class TestDSS(unittest.TestCase, TestRequest):
+class TestDSS(unittest.TestCase):
def setUp(self):
self.app = dss.create_app().app.test_client()
|
Remove unused test fixture (#<I>)
|
py
|
diff --git a/tests/test_alldiff.py b/tests/test_alldiff.py
index <HASH>..<HASH> 100644
--- a/tests/test_alldiff.py
+++ b/tests/test_alldiff.py
@@ -1,8 +1,12 @@
-import networkx
-from networkx.algorithms import bipartite
-from pyscipopt import Model, Conshdlr, SCIP_RESULT, SCIP_PARAMEMPHASIS, SCIP_PARAMSETTING
-from types import SimpleNamespace
-import matplotlib.pyplot as plt
+try:
+ import networkx
+ from networkx.algorithms import bipartite
+ from pyscipopt import Model, Conshdlr, SCIP_RESULT, SCIP_PARAMEMPHASIS, SCIP_PARAMSETTING
+ from types import SimpleNamespace
+ import matplotlib.pyplot as plt
+except:
+ import pytest
+ pytest.skip()
#initial Sudoku values
init = [5, 3, 0, 0, 7, 0, 0, 0, 0,
|
skip failing "test" for alldiff example
|
py
|
diff --git a/bin/summit.py b/bin/summit.py
index <HASH>..<HASH> 100755
--- a/bin/summit.py
+++ b/bin/summit.py
@@ -325,6 +325,10 @@ def event(options, session):
print_exception(action, e)
sys.exit(1)
+ print_status(action, r, options)
+ print_response(r, options)
+ exit_for_http_status(r)
+
def ack(api, options):
action = inspect.stack()[0][3]
k = 'EventService:ack'
|
Missing print and exit functions after event poll().
|
py
|
diff --git a/src/parse_mapqtl_file.py b/src/parse_mapqtl_file.py
index <HASH>..<HASH> 100644
--- a/src/parse_mapqtl_file.py
+++ b/src/parse_mapqtl_file.py
@@ -26,9 +26,11 @@
import logging
import os
try:
- from pymq2 import read_input_file, MQ2Exception, MQ2NoMatrixException
+ from pymq2 import (read_input_file, MQ2Exception,
+ MQ2NoMatrixException, MQ2NoSuchSessionException)
except ImportError:
- from src import read_input_file, MQ2Exception, MQ2NoMatrixException
+ from src import (read_input_file, MQ2Exception,
+ MQ2NoMatrixException, MQ2NoSuchSessionException)
LOG = logging.getLogger('pymq2')
@@ -164,7 +166,8 @@ def parse_mapqtl_file(inputfolder, sessionid, lodthreshold=3,
filelist = get_files_to_read(inputfolder, sessionid)
if not filelist:
- raise MQ2Exception('No file corresponds to the session "%s"\
+ raise MQ2NoSuchSessionException(
+ 'No file corresponds to the session "%s"\
' % sessionid)
qtls = []
qtl_matrix = []
|
Raise the MQ2NoSuchSessionException when the session asked isn't part of the data provided
|
py
|
diff --git a/src/hypercorn/protocol/h2.py b/src/hypercorn/protocol/h2.py
index <HASH>..<HASH> 100755
--- a/src/hypercorn/protocol/h2.py
+++ b/src/hypercorn/protocol/h2.py
@@ -213,12 +213,13 @@ class H2Protocol:
await self.stream_buffers[event.stream_id].drain()
elif isinstance(event, StreamClosed):
await self._close_stream(event.stream_id)
- await self.send(
- Updated(
- idle=len(self.streams) == 0
- or all(stream.idle for stream in self.streams.values())
- )
+ idle = len(self.streams) == 0 or all(
+ stream.idle for stream in self.streams.values()
)
+ if idle and self.context.terminated:
+ self.connection.close_connection()
+ await self._flush()
+ await self.send(Updated(idle=idle))
elif isinstance(event, Request):
await self._create_server_push(event.stream_id, event.raw_path, event.headers)
except (
|
Emit a goaway for HTTP/2 for a graceful shutdown This ensures that when the server is terminated and it is idle the HTTP/2 connection is closed by sending a GOAWAY frame to the client.
|
py
|
diff --git a/dallinger/deployment.py b/dallinger/deployment.py
index <HASH>..<HASH> 100644
--- a/dallinger/deployment.py
+++ b/dallinger/deployment.py
@@ -74,6 +74,7 @@ def new_webbrowser_profile():
def _local_root_files():
"""Return an iterable of filenames which should be copied from the
experiment root directory to the generated temp directory.
+
Assumes the experiment root directory is the current working directory.
"""
good_types = ("*.py", "*.txt")
|
Switch to allow-list paradigm for copying local experiment files
|
py
|
diff --git a/glue/lal.py b/glue/lal.py
index <HASH>..<HASH> 100644
--- a/glue/lal.py
+++ b/glue/lal.py
@@ -387,7 +387,7 @@ class CacheEntry(object):
Returns a string, with the format of a line in a LAL cache,
containing the contents of this cache entry.
"""
- if self.segment != None:
+ if self.segment is not None:
start = self.segment[0]
duration = abs(self.segment)
else:
|
Replace != with "is not" in a comparison to None.
|
py
|
diff --git a/kana2/store.py b/kana2/store.py
index <HASH>..<HASH> 100644
--- a/kana2/store.py
+++ b/kana2/store.py
@@ -6,8 +6,7 @@ from . import Post, info, utils
POST_USABLE_FUNCTIONS = [
"get_all", "get_extra", "get_media", "get_artcom", "get_notes",
- "set_paths", "write", "load",
- "verify_media", "verify_media_by_md5", "verify_media_by_filesize"
+ "set_paths", "write", "verify_media"
]
@@ -56,7 +55,7 @@ class Store(dict):
for value in to_merge:
store = value if isinstance(value, Store) else Store(value)
self.update(store)
- return self
+ return self
# Store items removals:
|
Fix Store.merge() only merging the first arg
|
py
|
diff --git a/androguard/core/bytecodes/dvm.py b/androguard/core/bytecodes/dvm.py
index <HASH>..<HASH> 100644
--- a/androguard/core/bytecodes/dvm.py
+++ b/androguard/core/bytecodes/dvm.py
@@ -2740,11 +2740,17 @@ class EncodedField:
:rtype: string
"""
if self.access_flags_string is None:
- self.access_flags_string = get_access_flags_string(
- self.get_access_flags())
+ if self.get_access_flags() == 0:
+ # No access flags, i.e. Java defaults apply
+ self.access_flags_string = ""
+ return self.access_flags_string
+ # Try to parse the string
+ self.access_flags_string = get_access_flags_string(self.get_access_flags())
+
+ # Fallback for unknown strings
if self.access_flags_string == "":
- self.access_flags_string = "0x%x" % self.get_access_flags()
+ self.access_flags_string = "0x{:06x}".format(self.get_access_flags())
return self.access_flags_string
def set_name(self, value):
|
fix bug where field with no defined access string would print 0x0
|
py
|
diff --git a/ecell4/deprecated.py b/ecell4/deprecated.py
index <HASH>..<HASH> 100644
--- a/ecell4/deprecated.py
+++ b/ecell4/deprecated.py
@@ -16,7 +16,11 @@ def deprecated(suggest=None):
doc = "[Deprecated]\n"
else:
doc = "[Deprecated] Use '" + suggest + "' instead.\n"
- wrapper.__doc__ = doc + wrapper.__doc__
+
+ if wrapper.__doc__ is None:
+ wrapper.__doc__ = doc
+ else:
+ wrapper.__doc__ = doc + wrapper.__doc__
return wrapper
return decorator
|
fix: Check whether the original docstring is not None when deprecated
|
py
|
diff --git a/tests/test_simple.py b/tests/test_simple.py
index <HASH>..<HASH> 100644
--- a/tests/test_simple.py
+++ b/tests/test_simple.py
@@ -484,4 +484,18 @@ class TestSimple(TestCase):
}
self.assertEqual(result, expected)
+ def test_unicode_strings(self):
+ result = parse("select '0:普通,1:旗舰' from mobile")
+ expected = {
+ 'select': {'value': {"literal": '0:普通,1:旗舰'}},
+ 'from': "mobile"
+ }
+ self.assertEqual(result, expected)
+ def test_issue68(self):
+ result = parse("select deflate(sum(int(mobile_price.price))) from mobile")
+ expected = {
+ 'select': {'value': {"deflate": {"sum": {"int": "mobile_price.price"}}}},
+ 'from': "mobile"
+ }
+ self.assertEqual(result, expected)
|
Enable test_issue<I>, test already passes
|
py
|
diff --git a/pipeline_live/data/sources/iex.py b/pipeline_live/data/sources/iex.py
index <HASH>..<HASH> 100644
--- a/pipeline_live/data/sources/iex.py
+++ b/pipeline_live/data/sources/iex.py
@@ -9,7 +9,7 @@ from .util import (
def list_symbols():
return [
- symbol['symbol'] for symbol in refdata.get_iex_symbols()
+ symbol['symbol'] for symbol in refdata.get_symbols()
]
|
fixing iex load_symbols get_iex_symbols returns symbols that are not eligible for api access. The get_symbols method should play nice with all IEX api endpoints.
|
py
|
diff --git a/lib/webinterface_handler.py b/lib/webinterface_handler.py
index <HASH>..<HASH> 100644
--- a/lib/webinterface_handler.py
+++ b/lib/webinterface_handler.py
@@ -30,8 +30,16 @@ import os
import urlparse
import base64
+# The following mod_python imports are done separately in a particular
+# order (util first) because I was getting sometimes publisher import
+# error when testing weird situations, preventing util from being
+# imported and leading to a traceback later. When this happened,
+# importing util was okay, only publisher import caused troubles, so
+# that importing in special order prevents these problems.
try:
- from mod_python import apache, publisher, util
+ from mod_python import util
+ from mod_python import apache
+ from mod_python import publisher
except ImportError:
pass
|
When importing mod_python stuff, import util first (before publisher), fixing some weird publisher importing problems. See the code comment for more.
|
py
|
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -100,5 +100,6 @@ setuptools.setup(
"requests"
],
url="https://github.com/CellProfiler/prokaryote",
- version=version
+ version=version,
+ zip_safe = False
)
|
Flailing - guess what, pip decided to install as zipped egg, lot of good that does!
|
py
|
diff --git a/test/jupyter/test_jupyter_convert.py b/test/jupyter/test_jupyter_convert.py
index <HASH>..<HASH> 100644
--- a/test/jupyter/test_jupyter_convert.py
+++ b/test/jupyter/test_jupyter_convert.py
@@ -21,6 +21,7 @@
#
import os
+import sys
import unittest
import shutil
import subprocess
@@ -87,6 +88,7 @@ report('this is action report')
self.assertTrue(os.path.isfile('test_wf.html'))
os.chdir(olddir)
+ @unittest.skipIf(sys.platform == 'win32', 'No XeLatex under windows to compile pdf')
def testConvertPDF(self):
olddir = os.getcwd()
os.chdir(file_dir)
|
Disable pdf test under windows because of lack of xelatex
|
py
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.