diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/indra/tests/test_bbn.py b/indra/tests/test_bbn.py index <HASH>..<HASH> 100644 --- a/indra/tests/test_bbn.py +++ b/indra/tests/test_bbn.py @@ -9,9 +9,9 @@ from indra.statements import * path_this = os.path.dirname(os.path.abspath(__file__)) test_file_simple = os.path.join(path_this, 'bbn_test_simple.json-ld') test_file_negatedCause = os.path.join(path_this, - 'bbn_test_negatedCause.json-ld') + 'bbn_test_negatedCause.json-ld') test_file_negatedEffect = os.path.join(path_this, - 'bbn_test_negatedEffect.json-ld') + 'bbn_test_negatedEffect.json-ld') def test_simple_extraction(): @@ -47,3 +47,10 @@ def test_negated_effect(): The processor should give no statements for a negated effect.""" bp = process_json_file_old(test_file_negatedEffect) assert(len(bp.statements) == 0) + + +def test_on_ten_doc_file(): + """Test on the extraction from the WM 10 documents.""" + bp = process_jsonld_file(join(path_this, 'wm_10_doc.json-ld')) + assert bp is not None + assert bp.statements
Add a new test for bbn processor.
py
diff --git a/pyocd/gdbserver/gdbserver.py b/pyocd/gdbserver/gdbserver.py index <HASH>..<HASH> 100644 --- a/pyocd/gdbserver/gdbserver.py +++ b/pyocd/gdbserver/gdbserver.py @@ -760,7 +760,7 @@ class GDBServer(threading.Thread): # v_cont capabilities query. elif b'Cont?' == cmd: - return self.create_rsp_packet(b"v_cont;c;C;s;S;t") + return self.create_rsp_packet(b"vCont;c;C;s;S;t") # v_cont, thread action command. elif cmd.startswith(b'Cont'):
Fixed gdbserver response to 'vCont?' command. Search-replace error introduced during PEP8 rename.
py
diff --git a/hydpy/core/autodoctools.py b/hydpy/core/autodoctools.py index <HASH>..<HASH> 100644 --- a/hydpy/core/autodoctools.py +++ b/hydpy/core/autodoctools.py @@ -147,11 +147,10 @@ def autodoc_basemodel(): namespace['__doc__'] = doc -def _number_of_line(member): +def _number_of_line(member_tuple): """Try to return the number of the first line of the definition of a member of a module.""" - if isinstance(member, tuple): - member = member[1] + member = member_tuple[1] try: return member.__code__.co_firstlineno except AttributeError:
Refactor method '_number_of_line' of module `autodoctools`. The `isinstance` check was not required anymore.
py
diff --git a/pyvera/__init__.py b/pyvera/__init__.py index <HASH>..<HASH> 100755 --- a/pyvera/__init__.py +++ b/pyvera/__init__.py @@ -119,7 +119,7 @@ class VeraController: """Perform a data_request and return the result.""" request_url = self.base_url + "/data_request" response = requests.get(request_url, timeout=timeout, params=payload) - response.encoding = "utf-8" + response.encoding = response.encoding if response.encoding else "utf-8" return response def get_simple_devices_info(self) -> None:
Setting the encoding only if it wasn't already set.
py
diff --git a/test/legacy_resharding.py b/test/legacy_resharding.py index <HASH>..<HASH> 100755 --- a/test/legacy_resharding.py +++ b/test/legacy_resharding.py @@ -121,11 +121,14 @@ class TestResharding(unittest.TestCase, base_sharding.BaseShardingTest): t = 'varbinary(64)' else: t = 'bigint(20) unsigned' + # Note that the primary key columns are not defined first on purpose to test + # that a reordered column list is correctly used everywhere in vtworker. create_table_template = '''create table %s( -id bigint not null, msg varchar(64), custom_ksid_col ''' + t + ''' not null, -primary key (id), +id bigint not null, +parent_id bigint not null, +primary key (parent_id, id), index by_msg (msg) ) Engine=InnoDB''' create_view_template = (
test: Adapt legacy_resharding.py to new schema (multi-column primary key) as well.
py
diff --git a/cumulus/storage.py b/cumulus/storage.py index <HASH>..<HASH> 100644 --- a/cumulus/storage.py +++ b/cumulus/storage.py @@ -266,15 +266,18 @@ class SwiftclientStorage(CumulusStorage): def __init__(self, *args, **kwargs): warnings.warn("SwiftclientStorage is deprecated and will be removed in django-cumulus==1.3: \ Use CumulusStorage instead.", DeprecationWarning) + super(SwiftclientStorage, self).__init__() class SwiftclientStaticStorage(CumulusStaticStorage): def __init__(self, *args, **kwargs): warnings.warn("SwiftclientStaticStorage is deprecated and will be removed in django-cumulus==1.3: \ Use CumulusStaticStorage instead.", DeprecationWarning) + super(SwiftclientStaticStorage, self).__init__() class ThreadSafeSwiftclientStorage(ThreadSafeCumulusStorage): def __init__(self, *args, **kwargs): warnings.warn("ThreadSafeSwiftclientStorage is deprecated and will be removed in django-cumulus==1.3: \ Use ThreadSafeCumulusStorage instead.", DeprecationWarning) + super(ThreadSafeSwiftclientStorage, self).__init__()
don't forget to call parent class
py
diff --git a/opentrons/drivers/motor.py b/opentrons/drivers/motor.py index <HASH>..<HASH> 100644 --- a/opentrons/drivers/motor.py +++ b/opentrons/drivers/motor.py @@ -407,13 +407,8 @@ class CNCDriver(object): def wait(self, sec): ms = int((sec % 1.0) * 1000) s = int(sec) - - # splitting delay messages into seconds increments - # so stop signals can interrupt - for i in range(s): - self.check_paused_stopped() - self.send_command(self.DWELL, S=1) - res = self.send_command(self.DWELL, P=ms) + self.check_paused_stopped() + res = self.send_command(self.DWELL, S=s, P=ms) return res == b'ok' def calm_down(self):
removing delay splitting, needs more thought put into it
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -59,7 +59,7 @@ install_requires = [ 'shapely >=1.3, <1.6', 'docutils >=0.11, <0.14', 'decorator >=3.4, <4.1', - 'django >=1.6, <1.11', + 'django >=1.6, <1.12', 'matplotlib >=1.5, <2.0', 'requests >=2.2, <2.13', # pyshp is fragile, we want only versions we have tested
Allow installation of Django <I> LTS
py
diff --git a/zipline/lines.py b/zipline/lines.py index <HASH>..<HASH> 100644 --- a/zipline/lines.py +++ b/zipline/lines.py @@ -83,11 +83,6 @@ import zipline.protocol as zp log = Logger('Lines') -class CancelSignal(Exception): - def __init__(self): - pass - - class SimulatedTrading(object): def __init__(self, @@ -184,12 +179,7 @@ class SimulatedTrading(object): os.kill(ppid, SIGINT) def handle_exception(self, exc): - if isinstance(exc, CancelSignal): - # signal from monitor of an orderly shutdown, - # do nothing. - pass - else: - self.signal_exception(exc) + self.signal_exception(exc) def signal_exception(self, exc=None): """
Removes unused CancelSignal. This was only triggered by the now removed Monitor.
py
diff --git a/headnode_notifier.py b/headnode_notifier.py index <HASH>..<HASH> 100755 --- a/headnode_notifier.py +++ b/headnode_notifier.py @@ -74,11 +74,12 @@ def send_mail(to_addr, def main(): parser = argparse.ArgumentParser(prog = "headnode notifier", + usage = "headnode_notifier [address] [OPTION]", description = "Simple script for email\ - notifications. Uses gmail.", - version = "testing") - parser.add_argument("--to", - metavar = "", + notifications. Uses gmail\ + by default.", + version = "1.0") + parser.add_argument(metavar = "", action = "store", dest = "to", help = "Recipient address")
UPD:changed --to to positional argument
py
diff --git a/examples/dep_dsl/repo.py b/examples/dep_dsl/repo.py index <HASH>..<HASH> 100644 --- a/examples/dep_dsl/repo.py +++ b/examples/dep_dsl/repo.py @@ -57,5 +57,5 @@ def define_dep_dsl_pipeline(): @repository -def my_repository(): +def define_repository(): return {'pipelines': {'some_example': define_dep_dsl_pipeline}}
Rename repository definition function (#<I>)
py
diff --git a/vis/optimizer.py b/vis/optimizer.py index <HASH>..<HASH> 100644 --- a/vis/optimizer.py +++ b/vis/optimizer.py @@ -142,7 +142,7 @@ class Optimizer(object): # 0 learning phase for 'test' computed_values = self.compute_fn([seed_input, 0]) losses = computed_values[:len(self.loss_names)] - named_losses = zip(self.loss_names, losses) + named_losses = list(zip(self.loss_names, losses)) overall_loss, grads, wrt_value = computed_values[len(self.loss_names):] # TODO: theano grads shape is inconsistent for some reason. Patch for now and investigate later.
Updating the named_losses assignment so that it will properly display when running the library with Python 3+.
py
diff --git a/src/main/python/succubus/daemonize.py b/src/main/python/succubus/daemonize.py index <HASH>..<HASH> 100644 --- a/src/main/python/succubus/daemonize.py +++ b/src/main/python/succubus/daemonize.py @@ -138,8 +138,9 @@ class Daemon(object): pid = os.getpid() file(self.pid_file, 'w+').write("%s\n" % pid) - # Handle SIGTERM the same as SIGINT: raise a KeyboardInterrupt. - signal.signal(signal.SIGTERM, signal.getsignal(signal.SIGINT)) + def handler(*args): + raise BaseException("SIGTERM was caught") + signal.signal(signal.SIGTERM, handler) # atexit functions are "not called when the program is killed by a # signal not handled by Python". But since SIGTERM is now handled, the # atexit functions do get called.
Do not rely on SIGINT handler, use own handler instead In some scenarios (e.g. our internal build server), the handler for SIGINT is signal.SIG_IGN, i.e. the signal is ignored. In this case, the daemon will not stop and the init script will eventually send SIGKILL, which prevents a clean shutdown.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -18,7 +18,6 @@ setup( keywords = "google gmail", url = "https://github.com/charlierguo/gmail", packages=['gmail'], - package_dir={'gmail': ''}, long_description=read('README.md'), classifiers=[ "Development Status :: 3 - Alpha",
No need for package_dir in setup.py
py
diff --git a/pymatgen/entries/compatibility.py b/pymatgen/entries/compatibility.py index <HASH>..<HASH> 100644 --- a/pymatgen/entries/compatibility.py +++ b/pymatgen/entries/compatibility.py @@ -554,11 +554,10 @@ class CorrectionsList(Compatibility): # try: corrections = self.get_corrections_dict(entry) for k, v in corrections.items(): - adjustment_list.append(CompositionEnergyAdjustment(v, - name=k, - cls=self.as_dict(), - description=k - ) + adjustment_list.append(ConstantEnergyAdjustment(v, + name=k, + cls=self.as_dict(), + ) ) return adjustment_list
updated Compat classes with new EnergyAdjustment
py
diff --git a/moto/utilities/utils.py b/moto/utilities/utils.py index <HASH>..<HASH> 100644 --- a/moto/utilities/utils.py +++ b/moto/utilities/utils.py @@ -1,6 +1,10 @@ import json import random import string +import six + +if six.PY2: + from io import open def random_string(length=None): @@ -18,5 +22,5 @@ def load_resource(filename): from pkg_resources import resource_filename load_resource(resource_filename(__name__, "resources/file.json")) """ - with open(filename, "r") as f: - return json.load(f, encoding="utf-8") + with open(filename, "r", encoding="utf-8") as f: + return json.load(f)
Add encoding-param to open(), in case the underlying OS has a different default (#<I>)
py
diff --git a/cherrypy/_cphttptools.py b/cherrypy/_cphttptools.py index <HASH>..<HASH> 100644 --- a/cherrypy/_cphttptools.py +++ b/cherrypy/_cphttptools.py @@ -412,9 +412,12 @@ def getObjFromPath(objPathList, objCache): """ # Let cpg be the first valid object. validObjects = ["cpg"] - + # Scan the objPathList in order from left to right for index, obj in enumerate(objPathList): + # maps virtual filenames to Python identifiers (substitutes '.' for '_') + obj = obj.replace('.', '_') + # currentObjStr holds something like 'cpg.root.something.else' currentObjStr = ".".join(validObjects) @@ -470,9 +473,6 @@ def mapPathToObject(path = None): if path.startswith('/'): path = path[1:] # Remove leading slash if path.endswith('/'): path = path[:-1] # Remove trailing slash - # So that requests like robots.txt can be handled by a method called robots_txt - path = path.replace('.', '_') - if not path: objectPathList = [] else:
Changed the behavior introduced by ticket #<I>. It broke some code that relied on getting the unmangled filename on default(). Now it just searches for the objects with the mangled name, but doesnt keep the URL mangled.
py
diff --git a/benchexec/tools/cbmc.py b/benchexec/tools/cbmc.py index <HASH>..<HASH> 100644 --- a/benchexec/tools/cbmc.py +++ b/benchexec/tools/cbmc.py @@ -34,7 +34,8 @@ class Tool(benchexec.tools.template.BaseTool): REQUIRED_PATHS = [ "cbmc", - "cbmc-binary" + "cbmc-binary", + "goto-cc" ] def executable(self): return util.find_executable('cbmc')
Add goto-cc to required paths for CBMC goto-cc is now required by the wrapper script, and seemingly isn't actually found at runtime (although I don't understand where "required_files" would even be used in a meaningful way).
py
diff --git a/pyrogram/client/methods/bots/request_callback_answer.py b/pyrogram/client/methods/bots/request_callback_answer.py index <HASH>..<HASH> 100644 --- a/pyrogram/client/methods/bots/request_callback_answer.py +++ b/pyrogram/client/methods/bots/request_callback_answer.py @@ -25,9 +25,8 @@ class RequestCallbackAnswer(BaseClient): chat_id: int or str, message_id: int, callback_data: str): - """Use this method to request a callback answer from bots. This is the equivalent of clicking an inline button - containing callback data. The answer contains info useful for clients to display a notification at the top of - the chat screen or as an alert. + """Use this method to request a callback answer from bots. This is the equivalent of clicking an + inline button containing callback data. Args: chat_id (``int`` | ``str``): @@ -41,6 +40,14 @@ class RequestCallbackAnswer(BaseClient): callback_data (``str``): Callback data associated with the inline button you want to get the answer from. + + Returns: + The answer containing info useful for clients to display a notification at the top of the chat screen + or as an alert. + + Raises: + :class:`Error <pyrogram.Error>` + ``TimeoutError``: If the bot fails to answer within 10 seconds """ return self.send( functions.messages.GetBotCallbackAnswer(
Mention timeout and retries in docs for request_callback_answer
py
diff --git a/jishaku/cog_base.py b/jishaku/cog_base.py index <HASH>..<HASH> 100644 --- a/jishaku/cog_base.py +++ b/jishaku/cog_base.py @@ -21,6 +21,7 @@ import itertools import os import os.path import re +import sys import time import traceback import typing @@ -90,7 +91,13 @@ class JishakuBase(commands.Cog): # pylint: disable=too-many-public-methods """ self.task_count += 1 - cmdtask = CommandTask(self.task_count, ctx, asyncio.Task.current_task()) + + # 3.6 shim + if sys.version_info < (3, 7, 0): + cmdtask = CommandTask(self.task_count, ctx, asyncio.Task.current_task()) + else: + cmdtask = CommandTask(self.task_count, ctx, asyncio.current_task()) + self.tasks.append(cmdtask) try:
Add <I> shim for asyncio.current_task(), since it is deprecated in <I>
py
diff --git a/nifgafix.py b/nifgafix.py index <HASH>..<HASH> 100755 --- a/nifgafix.py +++ b/nifgafix.py @@ -7,6 +7,8 @@ PREFIXES = makePrefixes('NIFGA', 'NIFSTD', 'owl') g = rdflib.Graph() g.parse('http://purl.obolibrary.org/obo/uberon/bridge/uberon-bridge-to-nifstd.owl', format='xml') -ng = makeGraph('NIFGA-Equivs', PREFIXES) +name = 'NIFGA-Equivs' +ng = makeGraph(name, PREFIXES) [ng.g.add(t) for t in ((rdflib.URIRef(PREFIXES['NIFGA'] + o.rsplit('/',1)[-1]), p, o) for s, p, o in g.triples((None, rdflib.OWL.equivalentClass, None)))] +ng.add_ont('http://ontology.neuinfo.org/NIF/ttl/generated/' + name + '.ttl', 'NIFGA to NIFSTD mappings') ng.write()
nifgafix added ontology line
py
diff --git a/pyemma/msm/analysis/dense/pcca.py b/pyemma/msm/analysis/dense/pcca.py index <HASH>..<HASH> 100644 --- a/pyemma/msm/analysis/dense/pcca.py +++ b/pyemma/msm/analysis/dense/pcca.py @@ -566,6 +566,6 @@ class PCCA: """ res = [] assignment = self.metastable_assignment - for i in self.m: + for i in range(self.m): res.append(np.where(assignment == i)[0]) return res \ No newline at end of file
[msm.analysis.dense.pcca]: corrected iteration
py
diff --git a/craftai/__init__.py b/craftai/__init__.py index <HASH>..<HASH> 100644 --- a/craftai/__init__.py +++ b/craftai/__init__.py @@ -1,7 +1,7 @@ """craft ai API python 2/3 client""" __title__ = "craft-ai" -__version__ = "1.4.0" +__version__ = "1.4.1" __author__ = "craft ai" __license__ = "BSD-3-Clause" __copyright__ = "Copyright (c) 2016, craft ai"
Bumping from <I> to <I>
py
diff --git a/lazysignup/views.py b/lazysignup/views.py index <HASH>..<HASH> 100644 --- a/lazysignup/views.py +++ b/lazysignup/views.py @@ -12,7 +12,9 @@ from django.contrib.auth.decorators import login_required from django.contrib.auth.forms import UserCreationForm from django.contrib.auth.models import User -@login_required +from lazysignup.decorators import allow_lazy + +@allow_lazy def convert(request, form_class=UserCreationForm, redirect_field_name='redirect_to'): """ Convert a temporary user to a real one. Reject users who don't appear to be temporary users (ie. they have a usable password)
The convert view itself needs to allow a lazy signup, as it might be the first authenticated view the user hits.
py
diff --git a/discord/gateway.py b/discord/gateway.py index <HASH>..<HASH> 100644 --- a/discord/gateway.py +++ b/discord/gateway.py @@ -63,6 +63,7 @@ class KeepAliveHandler(threading.Thread): self.msg = 'Keeping websocket alive with sequence %s.' self._stop_ev = threading.Event() self._last_ack = time.time() + self._last_send = time.time() def run(self): while not self._stop_ev.wait(self.interval): @@ -88,6 +89,8 @@ class KeepAliveHandler(threading.Thread): f.result() except Exception: self.stop() + else: + self._last_send = time.time() def get_payload(self): return { @@ -408,6 +411,12 @@ class DiscordWebSocket(websockets.client.WebSocketClientProtocol): for index in reversed(removed): del self._dispatch_listeners[index] + @property + def latency(self): + """float: Measures latency between a HEARTBEAT and a HEARTBEAT_ACK in seconds.""" + heartbeat = self._keep_alive + return float('inf') if heartbeat is None else heartbeat._last_ack - heartbeat._last_send + def _can_handle_close(self, code): return code not in (1000, 4004, 4010, 4011)
Add DiscordWebSocket.latency to measure discord heartbeat latency.
py
diff --git a/librosa/tf_agc.py b/librosa/tf_agc.py index <HASH>..<HASH> 100644 --- a/librosa/tf_agc.py +++ b/librosa/tf_agc.py @@ -79,8 +79,7 @@ def tf_agc(frame_iterator, sample_rate=22050, **kwargs): # initialize the mel filter bank after grabbing the first frame f2a = _mfcc.melfb(sample_rate, len(frame), num_frequency_bands, mel_filter_width) -# f2a = f2a[:,:(round(len(frame)/2) + 1)] - + f2a = f2a[:,:(round(len(frame)/2) + 1)] #% map back to FFT grid, flatten bark loop gain #sf2a = sum(f2a); @@ -96,7 +95,7 @@ def tf_agc(frame_iterator, sample_rate=22050, **kwargs): # FFT each frame D = scipy.fft(frame) - + D = D[:(1+int(len(D)/2))] # multiply by f2a audiogram = numpy.dot(f2a, numpy.abs(D))
closer match to dpwe's code
py
diff --git a/multiqc/utils/config.py b/multiqc/utils/config.py index <HASH>..<HASH> 100755 --- a/multiqc/utils/config.py +++ b/multiqc/utils/config.py @@ -133,6 +133,9 @@ def mqc_load_userconfig(): # Load and parse a user config file if we find it mqc_load_config(os.path.expanduser('~/.multiqc_config.yaml')) + + # Load and parse a config file in this working directory if we find it + mqc_load_config('.multiqc_config.yaml') def mqc_load_config(yaml_config): """ Load and parse a config file if we find it """
Also load config file from working directory if we find one.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -28,7 +28,6 @@ setup( url='https://github.com/vmware/column', license='BSD-2', author='VMware', - author_email='[email protected]', description='A thin wrapper on top of ansible with a stable API.', long_description=__doc__, packages=['column'],
Remove unrelated email address The email address of <EMAIL> is unrelated to the column project.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -63,7 +63,8 @@ setup( 'font-v', 'defcon', 'ufolint', - 'ttfautohint-py' + 'ttfautohint-py', + 'opentype-sanitizer', ], entry_points={ 'console_scripts': ['fontbakery=fontbakery.cli:main'],
setup.py: add opentype-sanitizer to install_requires
py
diff --git a/pifpaf/drivers/mysql.py b/pifpaf/drivers/mysql.py index <HASH>..<HASH> 100644 --- a/pifpaf/drivers/mysql.py +++ b/pifpaf/drivers/mysql.py @@ -20,12 +20,18 @@ class MySQLDriver(drivers.Driver): super(MySQLDriver, self)._setUp() self.socket = os.path.join(self.tempdir, "mysql.socket") pidfile = os.path.join(self.tempdir, "mysql.pid") + tempdir = os.path.join(self.tempdir, "tmp") + os.mkdir(tempdir) self._exec(["mysqld", + "--no-defaults", + "--tmpdir=" + tempdir, "--initialize-insecure", "--datadir=" + self.tempdir], ignore_failure=True, path=["/usr/libexec"]) self._exec(["mysqld", + "--no-defaults", + "--tmpdir=" + tempdir, "--datadir=" + self.tempdir, "--pid-file=" + pidfile, "--socket=" + self.socket,
mysql: add --no-defaults and --tmpdir to mysqld If MySQL is compiled with some default flags, this can interfere with running mysqld locally. We also specify the tmpdir, in case the default is not writable by pifpaf.
py
diff --git a/phoebe/kelly.py b/phoebe/kelly.py index <HASH>..<HASH> 100644 --- a/phoebe/kelly.py +++ b/phoebe/kelly.py @@ -309,13 +309,13 @@ def legacy_to_phoebe(inputfile, create_body=False, mesh='wd'): lcdep1[index]['passband'] = val lcdep2[index]['passband'] = val - if key == 'phoebe_hla.VAL': - lcdep1[index]['pblum'] = val / (4*np.pi) + if key == 'phoebe_hla.VAL': + lcdep1[index]['pblum'] = float(val)/(4*np.pi) if key == 'phoebe_cla.VAL': - lcdep2[index]['pblum'] = val / (4*np.pi) + lcdep2[index]['pblum'] = float(val)/(4*np.pi) if key == 'phoebe_el3.VAL': - lcdep1[index]['l3'] = val - lcdep2[index]['l3'] = val + lcdep1[index]['l3'] = float(val) + lcdep2[index]['l3'] = float(val) if key == 'phoebe_ld_rvy1':
Importing legacy files did not convert passband luminosities and third light contamination from str to float; fixed.
py
diff --git a/travis_docs_builder.py b/travis_docs_builder.py index <HASH>..<HASH> 100755 --- a/travis_docs_builder.py +++ b/travis_docs_builder.py @@ -14,6 +14,7 @@ import subprocess import shutil import sys import uuid +import shlex def encrypt_variable(variable, repo, public_key=None): """ @@ -96,6 +97,8 @@ def generate_GitHub_token(username, password=None, OTP=None, note=None, headers= # XXX: Do this in a way that is streaming def run_command_hiding_token(args, token): + command = ' '.join(map(shlex.quote, args)) + command = command.replace(token.decode('utf-8'), '~'*len(token)) p = subprocess.run(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.stdout, p.stderr out = out.replace(token, b"~"*len(token))
Print the command itself, with token removed
py
diff --git a/niworkflows/reports/core.py b/niworkflows/reports/core.py index <HASH>..<HASH> 100644 --- a/niworkflows/reports/core.py +++ b/niworkflows/reports/core.py @@ -135,8 +135,9 @@ class Reportlet(Element): if not config: raise RuntimeError('Reportlet must have a config object') + # PY35: Sorted config dict for consistent behavior self.name = config.get( - 'name', '_'.join('%s-%s' % i for i in config['bids'].items())) + 'name', '_'.join('%s-%s' % i for i in sorted(config['bids'].items()))) self.title = config.get('title') self.subtitle = config.get('subtitle') self.description = config.get('description')
PY<I>: Sort dictionary for consistent behavior
py
diff --git a/hedgehog/client/async_client.py b/hedgehog/client/async_client.py index <HASH>..<HASH> 100644 --- a/hedgehog/client/async_client.py +++ b/hedgehog/client/async_client.py @@ -175,10 +175,14 @@ class AsyncClient: # either, all messages are replies corresponding to the previous requests, # or all messages are asynchronous updates if msgs[0].is_async: + assert all(msg.is_async for msg in msgs) + # handle asynchronous messages logger.debug("Receive updates: %s", msgs) self.registry.handle_async(msgs) else: + assert not any(msg.is_async for msg in msgs) + # handle synchronous messages handlers = self._handlers.popleft() self.registry.register(handlers, msgs)
add assertions about asynchronity of updates
py
diff --git a/salt/modules/virt.py b/salt/modules/virt.py index <HASH>..<HASH> 100644 --- a/salt/modules/virt.py +++ b/salt/modules/virt.py @@ -438,6 +438,42 @@ def start(vm_): return create(vm_) +def reboot(vm_): + ''' + Reboot a domain via ACPI request + + CLI Example:: + + salt '*' virt.reboot <vm name> + ''' + dom = _get_dom(vm_) + return dom.reboot() == 0 + + +def reset(vm_): + ''' + Reset a VM by emulating the reset button on a physical machine + + CLI Example:: + + salt '*' virt.reset <vm name> + ''' + dom = _get_dom(vm_) + return dom.reset() == 0 + + +def ctrl_alt_del(vm_): + ''' + Sends CTRL+ALT+DEL to a VM + + CLI Example:: + + salt '*' virt.ctrl_alt_del <vm name> + ''' + dom = _get_dom(vm_) + return dom.sendKey(0, 0, [29, 56, 111], 3, 0) + + def create_xml_str(xml): ''' Start a domain based on the xml passed to the function
Add functions for rebooting/resetting VMs
py
diff --git a/setuptools/tests/test_build_meta.py b/setuptools/tests/test_build_meta.py index <HASH>..<HASH> 100644 --- a/setuptools/tests/test_build_meta.py +++ b/setuptools/tests/test_build_meta.py @@ -256,3 +256,16 @@ class TestBuildMetaBackend: build_backend = self.get_build_backend() with pytest.raises(ImportError): build_backend.build_sdist("temp") + + [email protected] +class TestBuildMetaLegacyBackend(TestBuildMetaBackend): + backend_name = 'setuptools.build_meta_legacy' + + # build_meta_legacy-specific tests + def test_build_sdist_relative_path_import(self, tmpdir_cwd): + # This must fail in build_meta, but must pass in build_meta_legacy + build_files(self._relative_path_import_files) + + build_backend = self.get_build_backend() + build_backend.build_sdist("temp")
Add failing test suite for build_meta_legacy This runs all build_meta tests, plus a test that it is possible to import from the directory containing `setup.py` when using the build_meta_legacy backend.
py
diff --git a/pycomb/test/test_combinators.py b/pycomb/test/test_combinators.py index <HASH>..<HASH> 100644 --- a/pycomb/test/test_combinators.py +++ b/pycomb/test/test_combinators.py @@ -72,6 +72,12 @@ class TestCombinators(TestCase): self.assertEqual(StructType, type(r)) + def test_struct_of_struct(self): + r = c.struct({'name': c.String, 'data': c.struct({'age': c.Int})})({'name': 'Mirko', 'data': {'age': 36}}) + + self.assertEqual('Mirko', r.name) + self.assertEqual(36, r.data.age) + def test_maybe(self): with(self.assertRaises(ValueError)): c.String(None)
Preliminary commits for context support
py
diff --git a/knox/views.py b/knox/views.py index <HASH>..<HASH> 100644 --- a/knox/views.py +++ b/knox/views.py @@ -1,3 +1,4 @@ +from django.contrib.auth.signals import user_logged_in, user_logged_out from rest_framework import status from rest_framework.authentication import BasicAuthentication from rest_framework.permissions import IsAuthenticated, AllowAny @@ -17,6 +18,7 @@ class LoginView(APIView): def post(self, request, format=None): token = AuthToken.objects.create(request.user) + user_logged_in.send(sender=request.user.__class__, request=request, user=request.user) return Response({ "user": UserSerializer(request.user).data, "token": token, @@ -28,6 +30,7 @@ class LogoutView(APIView): def post(self, request, format=None): request._auth.delete() + user_logged_out.send(sender=request.user.__class__, request=request, user=request.user) return Response(None, status=status.HTTP_204_NO_CONTENT) class LogoutAllView(APIView): @@ -40,4 +43,5 @@ class LogoutAllView(APIView): def post(self, request, format=None): request.user.auth_token_set.all().delete() + user_logged_out.send(sender=request.user.__class__, request=request, user=request.user) return Response(None, status=status.HTTP_204_NO_CONTENT)
Send out signals upon login and logout
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -57,6 +57,18 @@ setup(name='python-bitcoin', 'Topic :: Office/Business :: Financial', 'Topic :: Software Development :: Libraries :: Python Modules', ], + install_requires=[ + 'bigfloat>=0.3.0a2', + 'blist>=1.3.4', + 'cdecimal>=2.3', + 'ecdsa>=0.8', + 'gmpy2>=2.0.0', + 'isodate>=0.4.8', + 'recordtype>=1.1', + 'requests>=0.14.0', + 'simplejson>=2.6.2', + 'six>=1.3.0', + ], ) #
Add requirements to setup.py.
py
diff --git a/pages/admin/__init__.py b/pages/admin/__init__.py index <HASH>..<HASH> 100644 --- a/pages/admin/__init__.py +++ b/pages/admin/__init__.py @@ -21,6 +21,7 @@ from pages.admin.utils import get_connected, make_inline_admin from pages.admin.forms import PageForm from pages.admin.views import traduction, get_content, sub_menu from pages.admin.views import change_status, modify_content, delete_content +import pages.admin.widgets class PageAdmin(admin.ModelAdmin): """Page Admin class."""
Import admin.widgets to make sure they are registered
py
diff --git a/AegeanTools/cluster.py b/AegeanTools/cluster.py index <HASH>..<HASH> 100644 --- a/AegeanTools/cluster.py +++ b/AegeanTools/cluster.py @@ -72,10 +72,10 @@ def pairwise_ellpitical_binary(sources, eps, far=None): if far is None: far = max(a.a/3600 for a in sources) l = len(sources) - distances = np.ones((l, l), dtype=bool) + distances = np.zeros((l, l), dtype=bool) for i in range(l): - for j in range(i,l): - if j<i: + for j in range(i, l): + if j < i: continue if i == j: distances[i, j] = False
fix bug in pairwise_epplitical_binary that causes distant sources to be associated
py
diff --git a/dfply/summarize.py b/dfply/summarize.py index <HASH>..<HASH> 100644 --- a/dfply/summarize.py +++ b/dfply/summarize.py @@ -8,8 +8,6 @@ def summarize(df, **kwargs): @dfpipe def summarize_each(df, functions, *args): - print functions - #print args columns, values = [], [] for arg in args: if isinstance(arg, pd.Series):
removed print statement causing trouble for py3
py
diff --git a/rest_auth/serializers.py b/rest_auth/serializers.py index <HASH>..<HASH> 100644 --- a/rest_auth/serializers.py +++ b/rest_auth/serializers.py @@ -135,7 +135,16 @@ class JWTSerializer(serializers.Serializer): Serializer for JWT authentication. """ token = serializers.CharField() - user = UserDetailsSerializer() + + def __init__(self, *args, **kwargs): + """ + Need to add `user` field dynamically, to allow using + custom UserDetailsSerializer + """ + from app_settings import UserDetailsSerializer + + super(JWTSerializer, self).__init__(*args, **kwargs) + self.fields['user'] = UserDetailsSerializer() class PasswordResetSerializer(serializers.Serializer):
Allow using custom UserDetailsSerializer with JWTSerializer
py
diff --git a/pyautogui/__init__.py b/pyautogui/__init__.py index <HASH>..<HASH> 100644 --- a/pyautogui/__init__.py +++ b/pyautogui/__init__.py @@ -742,15 +742,15 @@ def _mouseMoveDragTo(moveOrDrag, x, y, duration, tween, button=None): # Non-instant moving/dragging involves tweening: segments = max(width, height) - timeSegment = duration / segments + timeSegment = float(duration) / segments while timeSegment < 0.05: # if timeSegment is too short, let's decrease the amount we divide it by. Otherwise the time.sleep() will be a no-op and the mouse cursor moves there instantly. segments = int(segments * 0.9) # decrease segments by 90%. - timeSegment = duration / segments + timeSegment = float(duration) / segments for n in range(segments): time.sleep(timeSegment) _failSafeCheck() - pointOnLine = tween(n / segments) + pointOnLine = tween(float(n) / segments) tweenX, tweenY = getPointOnLine(startx, starty, x, y, pointOnLine) tweenX, tweenY = int(tweenX), int(tweenY) if moveOrDrag == 'move':
Added float() calls so the code works with Python 2.
py
diff --git a/remi/gui.py b/remi/gui.py index <HASH>..<HASH> 100644 --- a/remi/gui.py +++ b/remi/gui.py @@ -1737,15 +1737,15 @@ class CheckBox(Input): self.set_value(checked) def onchange(self, value): - self.set_value(value in ('True', 'true')) + self.set_value(value in ('True', 'true'), 0) return self.eventManager.propagate(self.EVENT_ONCHANGE, [value]) - def set_value(self, checked): + def set_value(self, checked, update_ui = 1): if checked: - self.attributes.__setitem__('checked', 'checked', 0) + self.attributes.__setitem__('checked', 'checked', update_ui) else: if 'checked' in self.attributes: - self.attributes.__delitem__('checked', 0) + self.attributes.__delitem__('checked', update_ui) def get_value(self): """
Checkbox set_value, updates by default. Otherwise, setting value programmatically does not shows.
py
diff --git a/openpnm/utils/misc.py b/openpnm/utils/misc.py index <HASH>..<HASH> 100644 --- a/openpnm/utils/misc.py +++ b/openpnm/utils/misc.py @@ -9,6 +9,7 @@ import scipy.sparse import time as _time from collections import OrderedDict from docrep import DocstringProcessor +from IPython.core.magics.execution import _format_time __all__ = [ "Docorator", @@ -301,9 +302,8 @@ def toc(quiet=False): if "_startTime_for_tictoc" in globals(): t = _time.time() - _startTime_for_tictoc if quiet is False: - print(f"Elapsed time in seconds: {t:0.2f}") - else: - return t + print(f"Elapsed time: {_format_time(t)}") + return t else: raise Exception("Start time not set, call tic first")
Use IPython's timeit backend to format time duration + toc always returns time even in ~quiet mode
py
diff --git a/facebook.py b/facebook.py index <HASH>..<HASH> 100755 --- a/facebook.py +++ b/facebook.py @@ -381,7 +381,7 @@ def parse_signed_request(signed_request, app_secret): return False # raise ValueError('signed_request had corrupted payload') data = _parse_json(data) - if data.get('algorithm').upper() != 'HMAC-SHA256': + if data.get('algorithm', '').upper() != 'HMAC-SHA256': return False # raise ValueError('signed_request used unknown algorithm') expected_sig = hmac.new(app_secret, msg=payload, digestmod=hashlib.sha256).digest()
It's possible to receive a signed_request which doesn't include an algorithm
py
diff --git a/test/acceptance/test_cli.py b/test/acceptance/test_cli.py index <HASH>..<HASH> 100644 --- a/test/acceptance/test_cli.py +++ b/test/acceptance/test_cli.py @@ -118,7 +118,6 @@ class TestCLI(unittest.TestCase): self.assertIsInstance(json.loads(got_output), list) - @unittest.skip('refactoring now') def test_exec_vint_with_verbose_flag(self): valid_file = str(Path('test', 'fixture', 'cli', 'valid1.vim')) cmd = ['vint', '--verbose', valid_file]
Expect to fail until --verbose option activated
py
diff --git a/tcex/threat_intelligence/mappings/tag.py b/tcex/threat_intelligence/mappings/tag.py index <HASH>..<HASH> 100644 --- a/tcex/threat_intelligence/mappings/tag.py +++ b/tcex/threat_intelligence/mappings/tag.py @@ -39,7 +39,10 @@ class Tag: params: group_type: """ - group = self._tcex.ti.group(group_type) + if group_type and group_type.lower() == 'task': + group = self._tcex.ti.task() + else: + group = self._tcex.ti.group(group_type) return self.tc_requests.groups_from_tag( group, self.name, filters=filters, owner=owner, params=params )
adding support for getting tasks from tags in the TI module
py
diff --git a/amqpconsumer/events.py b/amqpconsumer/events.py index <HASH>..<HASH> 100644 --- a/amqpconsumer/events.py +++ b/amqpconsumer/events.py @@ -187,7 +187,7 @@ class EventConsumer(object): When completed, the on_queue_declareok method will be invoked by pika. """ logger.debug("Declaring queue %s" % self._queue) - self._channel.queue_declare(self.on_queue_declareok, self._queue, durable=True) + self._channel.queue_declare(queue=self._queue, callback=self.on_queue_declareok, durable=True) def on_queue_declareok(self, _): """Invoked by pika when queue is declared
Set correct parameters for `queue_declare` Between pika <<I> and pika><I> there's a signature change for `queue_declare`. To prevent the parameter order from messing with our versions just declare everything as a kwarg since these remain the same between versions. See current: <URL>
py
diff --git a/imagemounter/mount_images.py b/imagemounter/mount_images.py index <HASH>..<HASH> 100644 --- a/imagemounter/mount_images.py +++ b/imagemounter/mount_images.py @@ -113,12 +113,6 @@ def main(): help="prevent trying to mount the image as a single volume if no volume system was found") args = parser.parse_args() - # Check some prerequisites - if os.geteuid(): # Not run as root - print('[-] This program needs to be run as root!') - #os.execvp('sudo', ['sudo'] + sys.argv) - sys.exit(1) - if not args.color: #noinspection PyUnusedLocal,PyShadowingNames def col(s, *args, **kwargs): @@ -127,6 +121,10 @@ def main(): from termcolor import colored col = colored + # Check some prerequisites + if os.geteuid(): # Not run as root + print(col('[!] Not running as root!', 'yellow')) + if __version__.endswith('a') or __version__.endswith('b'): print(col("Development release v{0}. Please report any bugs you encounter.".format(__version__), attrs=['dark']))
Warn on not running as root. Will probably crash later in execution.
py
diff --git a/ratcave/shader.py b/ratcave/shader.py index <HASH>..<HASH> 100644 --- a/ratcave/shader.py +++ b/ratcave/shader.py @@ -139,6 +139,4 @@ class Shader(ugl.BindingContextMixin, ugl.BindNoTargetMixin): # obtain the uniform location if not loc: loc = self.get_uniform_location(name) - gl.glUniformMatrix4fv(loc, 1, False, (c_float * 16)(*mat.ravel('F'))) # uplaod the 4x4 floating point matrix - # cmat = mat.T.ctypes.data_as(POINTER(c_float * 16)).contents - # gl.glUniformMatrix4fv(loc, 1, False, cmat) # uplaod the 4x4 floating point matrix + gl.glUniformMatrix4fv(loc, 1, True, (c_float * 16)(*mat.ravel())) # uplaod the 4x4 floating point matrix
Performed transpose directly in OpenGL function.
py
diff --git a/wandb/apis/internal.py b/wandb/apis/internal.py index <HASH>..<HASH> 100644 --- a/wandb/apis/internal.py +++ b/wandb/apis/internal.py @@ -783,7 +783,7 @@ class Api(object): ), 'Content-Length': str(total - completed) } - elif status.status_code in (500, 502, 503, 504): + elif status.status_code in (408, 500, 502, 503, 504): time.sleep(random.randint(1, 10)) else: raise e
File syncing: retry on client timeouts
py
diff --git a/tensorflow_datasets/core/features/features_dict.py b/tensorflow_datasets/core/features/features_dict.py index <HASH>..<HASH> 100644 --- a/tensorflow_datasets/core/features/features_dict.py +++ b/tensorflow_datasets/core/features/features_dict.py @@ -189,11 +189,15 @@ class FeaturesDict(top_level_feature.TopLevelFeature): def encode_example(self, example_dict): """See base class for details.""" - return { - k: feature.encode_example(example_value) - for k, (feature, example_value) - in utils.zip_dict(self._feature_dict, example_dict) - } + example = {} + for k, (feature, example_value) in utils.zip_dict(self._feature_dict, + example_dict): + try: + example[k] = feature.encode_example(example_value) + except Exception as e: # pylint: disable=broad-except + utils.reraise(e, prefix=f'In <{feature.__class__.__name__}>' + + f' with name "{k}":\n') + return example def _flatten(self, x): """See base class for details."""
adds some helpful information to errors raised during encode_example PiperOrigin-RevId: <I>
py
diff --git a/outbox.py b/outbox.py index <HASH>..<HASH> 100644 --- a/outbox.py +++ b/outbox.py @@ -139,12 +139,17 @@ class Outbox(object): if mode == 'TLS': smtp.starttls() - smtp.login(self.username, self.password) + self.authenticate(smtp) + return smtp def connect(self): self._conn = self._login() + def authenticate(self, smtp): + """Perform login with the given smtplib.SMTP instance.""" + smtp.login(self.username, self.password) + def disconnect(self): self._conn.quit() @@ -181,6 +186,18 @@ class Outbox(object): return self.username +class AnonymousOutbox(Outbox): + """Outbox subclass suitable for SMTP servers that do not (or will not) + perform authentication. + """ + def __init__(self, *args, **kwargs): + super(AnonymousOutbox, self).__init__('', '', *args, **kwargs) + + def authenticate(self, smtp): + """Perform no authentication as the server does not require it.""" + pass + + def add_attachment(message, attachment): '''Attach an attachment to a message as a side effect.
Add an Outbox subclass for making authentication conditional.
py
diff --git a/Malmo/samples/Python_examples/tabular_q_learning.py b/Malmo/samples/Python_examples/tabular_q_learning.py index <HASH>..<HASH> 100755 --- a/Malmo/samples/Python_examples/tabular_q_learning.py +++ b/Malmo/samples/Python_examples/tabular_q_learning.py @@ -283,7 +283,7 @@ class TabQAgent(object): if not s in self.q_table: continue value = self.q_table[s][action] - color = 255 * ( value - min_value ) / ( max_value - min_value ) # map value to 0-255 + color = int( 255 * ( value - min_value ) / ( max_value - min_value )) # map value to 0-255 color = max( min( color, 255 ), 0 ) # ensure within [0,255] color_string = '#%02x%02x%02x' % (255-color, color, 0) self.canvas.create_oval( (world_x - 1 - x + action_positions[action][0] - action_radius ) *scale,
Fixed float division error for color in tabular_q_learning.py caused by Python 3 division
py
diff --git a/dvc/output/base.py b/dvc/output/base.py index <HASH>..<HASH> 100644 --- a/dvc/output/base.py +++ b/dvc/output/base.py @@ -104,8 +104,7 @@ class OutputBase(object): if remote: parsed = urlparse(path) return remote.path_info / parsed.path.lstrip("/") - else: - return self.REMOTE.path_cls(path) + return self.REMOTE.path_cls(path) def __repr__(self): return "{class_name}: '{def_path}'".format(
output: base: remove unnecessary 'else' Reported by DeepSource.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -56,5 +56,6 @@ setup( test_suite='tests', classifiers=CLASSIFIERS, scripts=['bin/num2words'], - install_requires=["docopt>=0.6.2"] + install_requires=["docopt>=0.6.2"], + tests_require=['delegator.py'], )
Add Delegator.py to test dependencies (#<I>) Delegator.py is used for cli tests but isn't included in dependencies.
py
diff --git a/tests/test_deserialization.py b/tests/test_deserialization.py index <HASH>..<HASH> 100644 --- a/tests/test_deserialization.py +++ b/tests/test_deserialization.py @@ -18,7 +18,6 @@ from tests.base import ( central, ALL_FIELDS, User, - DummyModel, ) class TestDeserializingNone: @@ -514,6 +513,11 @@ class TestFieldDeserialization: assert isinstance(result, uuid.UUID) assert str(result) == uuid_str + uuid4 = uuid.uuid4() + result = field.deserialize(uuid4) + assert isinstance(result, uuid.UUID) + assert result == uuid4 + @pytest.mark.parametrize('in_value', [ 'malformed',
Add test for deserializing UUID objects
py
diff --git a/datahandling/datahandling/datahandling.py b/datahandling/datahandling/datahandling.py index <HASH>..<HASH> 100644 --- a/datahandling/datahandling/datahandling.py +++ b/datahandling/datahandling/datahandling.py @@ -1552,6 +1552,31 @@ def IFFT_filter(Signal, SampleFreq, lowerFreq, upperFreq): print("done") return _np.real(FilteredSignal) +def butterworth_filter(Signal, SampleFreq, lowerFreq, upperFreq): + """ + Filters data using fft -> zeroing out fft bins -> ifft + + Parameters + ---------- + Signal : ndarray + Signal to be filtered + SampleFreq : float + Sample frequency of signal + lowerFreq : float + Lower frequency of bandpass to allow through filter + upperFreq : float + Upper frequency of bandpass to allow through filter + + Returns + ------- + FilteredData : ndarray + Array containing the filtered data + """ + b, a = make_butterworth_b_a(lowerFreq, upperFreq, SampleFreq) + FilteredSignal = scipy.signal.filtfilt(b, a, Signal) + return _np.real(FilteredSignal) + + def make_butterworth_b_a(lowcut, highcut, SampleFreq, order=5, btype='band'): """ Generates the b and a coefficients for a butterworth IIR filter.
added a butterworth filter func
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ from distutils.core import setup setup( name='py-august', - version='0.10.1', + version='0.11.0', packages=['august'], url='https://github.com/snjoetw/py-august', license='MIT',
- bumped version to <I>
py
diff --git a/jacquard/jacquard.py b/jacquard/jacquard.py index <HASH>..<HASH> 100644 --- a/jacquard/jacquard.py +++ b/jacquard/jacquard.py @@ -169,7 +169,8 @@ def _dispatch(modules, arguments): command_validator.preflight(command, args) logger.info("Jacquard begins (v{})", __version__) - logger.info("Saving log to [{}]", logger.log_filename) + logger.info("Saving log to [{}]", + os.path.basename(logger.log_filename)) logger.debug("Writing output to tmp directory [{}]", args.temp_working_dir)
ex-<I> (cgates): Adjusted to show log basename instead of full path
py
diff --git a/salt/utils/event.py b/salt/utils/event.py index <HASH>..<HASH> 100644 --- a/salt/utils/event.py +++ b/salt/utils/event.py @@ -160,10 +160,13 @@ class EventPublisher(multiprocessing.Process): epub_sock.bind(epub_uri) epull_sock.bind(epull_uri) # Restrict access to the sockets + pub_mode = 448 + if self.opts.get('client_acl'): + pub_mode = 511 os.chmod( os.path.join(self.opts['sock_dir'], 'master_event_pub.ipc'), - 448 + pub_mode ) os.chmod( os.path.join(self.opts['sock_dir'],
Enable global event communication when the client_acl is turned on
py
diff --git a/djstripe/settings.py b/djstripe/settings.py index <HASH>..<HASH> 100644 --- a/djstripe/settings.py +++ b/djstripe/settings.py @@ -54,9 +54,6 @@ INVOICE_FROM_EMAIL = getattr(settings, "DJSTRIPE_INVOICE_FROM_EMAIL", "billing@e PAYMENTS_PLANS = getattr(settings, "DJSTRIPE_PLANS", {}) PLAN_HIERARCHY = getattr(settings, "DJSTRIPE_PLAN_HIERARCHY", {}) -PASSWORD_INPUT_RENDER_VALUE = getattr(settings, 'DJSTRIPE_PASSWORD_INPUT_RENDER_VALUE', False) -PASSWORD_MIN_LENGTH = getattr(settings, 'DJSTRIPE_PASSWORD_MIN_LENGTH', 6) - PRORATION_POLICY = getattr(settings, 'DJSTRIPE_PRORATION_POLICY', False) PRORATION_POLICY_FOR_UPGRADES = getattr(settings, 'DJSTRIPE_PRORATION_POLICY_FOR_UPGRADES', False) CANCELLATION_AT_PERIOD_END = not getattr(settings, 'DJSTRIPE_PRORATION_POLICY', False)
Remove completely unused PASSWORD settings
py
diff --git a/python/proton/_handlers.py b/python/proton/_handlers.py index <HASH>..<HASH> 100644 --- a/python/proton/_handlers.py +++ b/python/proton/_handlers.py @@ -1082,7 +1082,7 @@ class Handshaker(Handler): @staticmethod def on_session_remote_open(event): ssn = event.session - if ssn.state() & Endpoint.LOCAL_UNINIT: + if ssn.state & Endpoint.LOCAL_UNINIT: ssn.open() @staticmethod
PROTON-<I>: session state is a property, not a method This closes #<I>.
py
diff --git a/simplesteem/simplesteem.py b/simplesteem/simplesteem.py index <HASH>..<HASH> 100644 --- a/simplesteem/simplesteem.py +++ b/simplesteem/simplesteem.py @@ -478,4 +478,24 @@ class SimpleSteem: return True + def dex_ticker(self): + d = Dex(self.steem_instance()) + self.ticker = d.get_ticker(); + return self.ticker + + + def steem_to_sbd(self, steem, account=None): + if not account: + account = self.mainaccount + best_price = self.dex_ticker()['highest_bid'] + try: + d.sell(steem, "STEEM", best_price, account=account) + except Exception as e: + self.msg.error_message("COULD NOT SELL STEEM FOR SBD") + return False + else: + return True + + + # EOF
Added new method for checking Dex ticker and selling steem for sbd
py
diff --git a/consoleprinter/__init__.py b/consoleprinter/__init__.py index <HASH>..<HASH> 100644 --- a/consoleprinter/__init__.py +++ b/consoleprinter/__init__.py @@ -2487,9 +2487,10 @@ def remove_extra_indentation(doc, stop_looking_when_encountered=None, padding=0, return doc newdoc = "" - whitespacecount = 0 + whitespacecount = -1 keeplookingforindention = True + for line in doc.split("\n"): line = line.rstrip() @@ -2497,11 +2498,17 @@ def remove_extra_indentation(doc, stop_looking_when_encountered=None, padding=0, if line.lower().startswith(stop_looking_when_encountered): keeplookingforindention = False - if keeplookingforindention is True: - if whitespacecount == 0: - whitespacecount = len(line) - len(line.lstrip()) + if keeplookingforindention is True and '"""' not in line and len(line.strip())>0: + whitespacecount2 = len(line) - len(line.lstrip()) + + if (whitespacecount2 < whitespacecount) or (whitespacecount<0): + whitespacecount = whitespacecount2 + + + for line in doc.split("\n"): line = str(" " * padding) + line[whitespacecount:] + newdoc += line + "\n" newdoc = newdoc.strip()
Emily Dickinson: My friends are my estate. Tuesday <I> June <I> (week:<I> day:<I>), <I>:<I>:<I>
py
diff --git a/msvccompiler.py b/msvccompiler.py index <HASH>..<HASH> 100644 --- a/msvccompiler.py +++ b/msvccompiler.py @@ -187,6 +187,19 @@ def get_build_architecture(): j = string.find(sys.version, ")", i) return sys.version[i+len(prefix):j] +def normalize_and_reduce_paths(paths): + """Return a list of normalized paths with duplicates removed. + + The current order of paths is maintained. + """ + # Paths are normalized so things like: /a and /a/ aren't both preserved. + reduced_paths = [] + for p in paths: + np = os.path.normpath(p) + # XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set. + if np not in reduced_paths: + reduced_paths.append(np) + return reduced_paths class MSVCCompiler (CCompiler) : @@ -270,6 +283,7 @@ class MSVCCompiler (CCompiler) : self.__paths.append(p) except KeyError: pass + self.__paths = normalize_and_reduce_paths(self.__paths) os.environ['path'] = string.join(self.__paths, ';') self.preprocess_options = None
Backport <I>: SF #<I>, MSVCCompiler creates redundant and long PATH strings If MSVCCompiler.initialize() was called multiple times, the path would get duplicated. On Windows, this is a problem because the path is limited to 4k. There's no benefit in adding a path multiple times, so prevent that from occuring. We also normalize the path before checking for duplicates so things like /a and /a/ won't both be stored.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ with open('LICENSE') as fl: setup( name='CurrencyConverter', - version='0.14.2', + version='0.14.3', author='Alex Prengère', author_email='[email protected]', url='https://github.com/alexprengere/currencyconverter',
Bump to version <I>
py
diff --git a/polyaxon/api/utils/views.py b/polyaxon/api/utils/views.py index <HASH>..<HASH> 100644 --- a/polyaxon/api/utils/views.py +++ b/polyaxon/api/utils/views.py @@ -125,7 +125,7 @@ class AuditorMixinView(object): def perform_update(self, serializer): instance = serializer.save() - auditor.record(event_type=self.delete_event, + auditor.record(event_type=self.update_event, instance=instance, actor_id=self.request.user.id, actor_name=self.request.user.username)
Fix auditor mixing view; was the wrong event for update
py
diff --git a/dashboard/modules/job/sdk.py b/dashboard/modules/job/sdk.py index <HASH>..<HASH> 100644 --- a/dashboard/modules/job/sdk.py +++ b/dashboard/modules/job/sdk.py @@ -252,7 +252,7 @@ class JobSubmissionClient(SubmissionClient): >>> from ray.job_submission import JobSubmissionClient >>> client = JobSubmissionClient("http://127.0.0.1:8265") # doctest: +SKIP >>> client.submit_job(entrypoint="echo hello") # doctest: +SKIP - >>> client.get_job_info("raysubmit_4LamXRuQpYdSMg7J") # doctest: +SKIP + >>> client.get_job_status("raysubmit_4LamXRuQpYdSMg7J") # doctest: +SKIP 'SUCCEEDED' Args:
[Job] Fix typo in job sdk docstring (#<I>)
py
diff --git a/src/pytrackr/device.py b/src/pytrackr/device.py index <HASH>..<HASH> 100644 --- a/src/pytrackr/device.py +++ b/src/pytrackr/device.py @@ -1,3 +1,4 @@ +import datetime class trackrDevice(object): @@ -12,13 +13,21 @@ class trackrDevice(object): return self.json_state.get('trackerId', None) def last_time_seen(self): + "ex. Mon Dec 19 17:57:06 UTC 2016" return self.json_state.get('lastTimeSeen', None) - def type(self): - return self.json_state.get('type', None) - def last_updated(self): - return self.json_state.get('lastUpdated', None) + # This is in Epoch time * 1000. (milliseconds) + # ex. 1482007969200 + # Converting to match format of last_time_seen. + last_update = int(self.json_state.get('lastUpdated', None)) + dt = datetime.datetime.utcfromtimestamp(last_update/1000.) + return dt.strftime('%a %b %d %H:%M:%S UTC %Y') + + def trackr_type(self): + # Not sure what this is? TrackR Bravo responses with + # a value of bluetooth. + return self.json_state.get('type', None) def battery_level(self): return self.json_state.get('batteryLevel', None)
Covert last updated timestamp to same format as last seen.
py
diff --git a/src/google_music_proto/mobileclient/schemas.py b/src/google_music_proto/mobileclient/schemas.py index <HASH>..<HASH> 100644 --- a/src/google_music_proto/mobileclient/schemas.py +++ b/src/google_music_proto/mobileclient/schemas.py @@ -897,7 +897,7 @@ class RadioStationSchema(MobileClientSchema): recentTimestamp = fields.Str() seed = fields.Nested(RadioSeedSchema) sessionToken = fields.Str() - skipEventHistory = fields.List(fields.Field) # TODO What's in this array? + skipEventHistory = fields.List(fields.Dict) # TODO What's in this array? stationSeeds = fields.Nested(RadioSeedSchema, many=True) tracks = fields.Nested(TrackSchema, many=True)
skipEventHistory is probably a list of dicts
py
diff --git a/marionette_tg/plugins/_tg.py b/marionette_tg/plugins/_tg.py index <HASH>..<HASH> 100644 --- a/marionette_tg/plugins/_tg.py +++ b/marionette_tg/plugins/_tg.py @@ -408,8 +408,7 @@ class AmazonMsgLensHandler(FteHandler): lens += [key] * amazon_msg_lens[key] target_len_in_bytes = random.choice(lens) - target_len_in_bytes -= - fte.encoder.DfaEncoderObject._COVERTEXT_HEADER_LEN_CIPHERTTEXT + target_len_in_bytes -= fte.encoder.DfaEncoderObject._COVERTEXT_HEADER_LEN_CIPHERTTEXT target_len_in_bytes -= fte.encrypter.Encrypter._CTXT_EXPANSION target_len_in_bits = target_len_in_bytes * 8.0
Update _tg.py Fixing unfortunate line break from Sublime Text
py
diff --git a/amino/logging.py b/amino/logging.py index <HASH>..<HASH> 100644 --- a/amino/logging.py +++ b/amino/logging.py @@ -115,17 +115,18 @@ _file_fmt = ('{asctime} [{levelname} @ {name}:{funcName}:{lineno}] {message}') def amino_file_logging(logger: logging.Logger, level: int=logging.DEBUG, logfile: Path=default_logfile, - fmt: str=None) -> None: + fmt: str=None) -> logging.Handler: logfile.parent.mkdir(exist_ok=True) formatter = logging.Formatter(fmt or _file_fmt, style='{') handler = logging.FileHandler(str(logfile)) handler.setFormatter(formatter) logger.addHandler(handler) init_loglevel(handler, level) + return handler -def amino_root_file_logging(level: int=logging.DEBUG, **kw: Any) -> None: - amino_file_logging(amino_root_logger, level, **kw) +def amino_root_file_logging(level: int=logging.DEBUG, **kw: Any) -> logging.Handler: + return amino_file_logging(amino_root_logger, level, **kw) class Logging:
return file handler from `amino_file_logging`
py
diff --git a/salt/states/mssql_user.py b/salt/states/mssql_user.py index <HASH>..<HASH> 100644 --- a/salt/states/mssql_user.py +++ b/salt/states/mssql_user.py @@ -74,7 +74,7 @@ def present(name, login=None, domain=None, database=None, roles=None, options=No domain=domain, database=database, roles=roles, - options=_normalize_options(user_options), + options=_normalize_options(options), **kwargs) if user_created is not True: # Non-empty strings are also evaluated to True, so we cannot use if not user_created: ret['result'] = False
typo in mssql_user state
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -47,7 +47,6 @@ setup( 'matplotlib'], dependency_links=[ 'git+https://github.com/NeuralEnsemble/libNeuroML.git@development#egg=libNeuroML', - 'git+https://github.com/rgerkin/pylems.git@master#egg=pylems-0.4.9.1' ], classifiers = [ 'Intended Audience :: Science/Research',
Removed dependency link to GitHub now that pylems-<I> is on PyPI
py
diff --git a/facepy/graph_api.py b/facepy/graph_api.py index <HASH>..<HASH> 100755 --- a/facepy/graph_api.py +++ b/facepy/graph_api.py @@ -134,7 +134,7 @@ class GraphAPI(object): yield data - def _query(self, method, path, data={}, page=False): + def _query(self, method, path, data=None, page=False): """ Fetch an object from the Graph API and parse the output, returning a tuple where the first item is the object yielded by the Graph API and the second is the URL for the next page of results, or @@ -145,6 +145,7 @@ class GraphAPI(object): :param data: A dictionary of HTTP GET parameters (for GET requests) or POST data (for POST requests). :param page: A boolean describing whether to return an iterator that iterates over each page of results. """ + data = data or {} def load(method, url, data): if method in ['GET', 'DELETE']:
Avoid using mutables as default arguments See 'Principle of Least Astonishment in Python: The Mutable Default Argument' for really good references on why you should avoid assigning a mutable as a default argument unless you really really know what you are doing. * <URL>
py
diff --git a/stdeb/util.py b/stdeb/util.py index <HASH>..<HASH> 100644 --- a/stdeb/util.py +++ b/stdeb/util.py @@ -843,9 +843,6 @@ class DebianInfo: mime_desktop_files = parse_vals(cfg,module_name,'MIME-Desktop-Files') if len(mime_desktop_files): need_custom_binary_target = True - self.dh_desktop_indep_line = '\tdh_desktop' - else: - self.dh_desktop_indep_line = '' # E. any mime .desktop files self.install_file_lines = [] @@ -1548,5 +1545,4 @@ RULES_BINARY_INDEP_TARGET = """ binary-indep: build %(dh_binary_indep_lines)s %(dh_installmime_indep_line)s -%(dh_desktop_indep_line)s """
remove deprecated dh_desktop call (#<I>)
py
diff --git a/raiden/tests/integration/test_matrix_transport.py b/raiden/tests/integration/test_matrix_transport.py index <HASH>..<HASH> 100644 --- a/raiden/tests/integration/test_matrix_transport.py +++ b/raiden/tests/integration/test_matrix_transport.py @@ -403,8 +403,7 @@ def test_matrix_tx_error_handling( ) # FIXME: TransactionExecutionStatus is not yet set to FAILURE, so asserting no SUCCESS assert channelstate.close_transaction.result != TransactionExecutionStatus.SUCCESS - # assert transport is still healthy - assert not app0.raiden.transport._stop_event.ready() + assert app0.raiden.transport._stop_event.ready() def test_matrix_message_retry(
the node is killed when an error occurs
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ setup( description='User interface widgets for use with Pygame', py_modules=['pygwidgets'], package_dir = {'': 'src'}, - url='http://www.furrypants.com/', + url='https://github.com/IrvKalb/pygwidgets', classifiers=[ "Programming Language :: Python :: 3", "License :: BSD2-Clause License",
Update setup.py Changed project location to github
py
diff --git a/test/mri/test_diffusion.py b/test/mri/test_diffusion.py index <HASH>..<HASH> 100755 --- a/test/mri/test_diffusion.py +++ b/test/mri/test_diffusion.py @@ -27,7 +27,7 @@ BASE_WORK_PATH = os.path.abspath(os.path.join( class TestDiffusion(TestCase): - ARCHIVE_PATH = '/Users/tclose/Data/MBI/noddi' + ARCHIVE_PATH = os.path.join(os.environ['HOME'], 'Data', 'MBI', 'noddi') PROJECT = 'pilot' DATASET_NAME = 'diffusion' SUBJECT = 'SUBJECT1' @@ -71,7 +71,7 @@ class TestDiffusion(TestCase): class TestNODDI(TestCase): - ARCHIVE_PATH = '/Users/tclose/Data/MBI/noddi' + ARCHIVE_PATH = os.path.join(os.environ['HOME'], 'Data', 'MBI', 'noddi') WORK_PATH = os.path.abspath(os.path.join(BASE_WORK_PATH, 'noddi')) DATASET_NAME = 'noddi' EXAMPLE_INPUT_PROJECT = 'example_input'
slight modifications to get tests to run on nitrous
py
diff --git a/nba_py/team.py b/nba_py/team.py index <HASH>..<HASH> 100644 --- a/nba_py/team.py +++ b/nba_py/team.py @@ -292,11 +292,8 @@ class TeamShootingSplits(_TeamDashboard): def shot_type_summary(self): return _api_scrape(self.json, 5) - def shot_type_detail(self): - return _api_scrape(self.json, 6) - def assissted_by(self): - return _api_scrape(self.json, 7) + return _api_scrape(self.json, 6) class TeamPerformanceSplits(_TeamDashboard):
Fix index of teamdashboardbysplits #<I>
py
diff --git a/scapy/utils6.py b/scapy/utils6.py index <HASH>..<HASH> 100644 --- a/scapy/utils6.py +++ b/scapy/utils6.py @@ -252,9 +252,10 @@ def in6_addrtovendor(addr): """ Extract the MAC address from a modified EUI-64 constructed IPv6 address provided and use the IANA oui.txt file to get the vendor. - The database used for the conversion is the one loaded by Scapy, - based on Wireshark (/usr/share/wireshark/wireshark/manuf) None - is returned on error, "UNKNOWN" if the vendor is unknown. + The database used for the conversion is the one loaded by Scapy + from a Wireshark installation if discovered in a well-known + location. None is returned on error, "UNKNOWN" if the vendor is + unknown. """ mac = in6_addrtomac(addr) if mac is None or conf.manufdb is None:
utils6: Updates in6_addrtovendor docstring When loading the manuf database, Scapy will now attempt to discover where the database is located in the filesystem, not attempt to find it by the absolute path that was referenced previously in the docstring.
py
diff --git a/pypsa/linopf.py b/pypsa/linopf.py index <HASH>..<HASH> 100644 --- a/pypsa/linopf.py +++ b/pypsa/linopf.py @@ -241,7 +241,7 @@ def define_nodal_balance_constraints(n, sns): lhs = (pd.concat([bus_injection(*args) for args in args], axis=1) .groupby(axis=1, level=0) .agg(lambda x: ''.join(x.values)) - .reindex(columns=n.buses.index)) + .reindex(columns=n.buses.index, fill_value='')) sense = '=' rhs = ((- n.loads_t.p_set.loc[sns] * n.loads.sign) .groupby(n.loads.bus, axis=1).sum()
linopf.py: fill_value for buses without any components in KVL constraint
py
diff --git a/spyderlib/utils/codeanalysis.py b/spyderlib/utils/codeanalysis.py index <HASH>..<HASH> 100644 --- a/spyderlib/utils/codeanalysis.py +++ b/spyderlib/utils/codeanalysis.py @@ -69,10 +69,11 @@ def check_with_pyflakes(source_code, filename=None): w = Checker(tree, filename) w.messages.sort(key=lambda x: x.lineno) results = [] + coding = encoding.get_coding(source_code) lines = source_code.splitlines() for warning in w.messages: if 'analysis:ignore' not in \ - to_text_string(lines[warning.lineno-1]): + to_text_string(lines[warning.lineno-1], coding): results.append((warning.message % warning.message_args, warning.lineno)) return results
Correctly decode source code after flake check
py
diff --git a/rest_mock_server/core/extractor.py b/rest_mock_server/core/extractor.py index <HASH>..<HASH> 100644 --- a/rest_mock_server/core/extractor.py +++ b/rest_mock_server/core/extractor.py @@ -9,13 +9,15 @@ Given a view:: class SomeView(APIView): """ - URL: /api/some/view/__key[id:int]?param1[int]&param2[str] + URL: /api/some/view/?param1[int]&param2[str] """ def get(self, request, *args, **kwargs): """ ``` { + "__key": "<id:int>", + "__key_position": "url", "__mockcount": 5, "data": "Hello, world", "id": "<sha256::10>"
Updated how keys should work in extractor docstring
py
diff --git a/pynnotator/install.py b/pynnotator/install.py index <HASH>..<HASH> 100755 --- a/pynnotator/install.py +++ b/pynnotator/install.py @@ -83,6 +83,7 @@ class Installer(object): self.install_snpeff() self.install_gemini() self.install_vep() + self.install_vcf_anno() def build_datasets(self): print("Building Datasets") @@ -222,6 +223,16 @@ class Installer(object): os.chdir('vep') command = 'bash {}/scripts/install_vep.sh'.format(settings.BASE_DIR) call(command, shell=True) + def install_vcf_anno(self): + + os.chdir(libs_dir) + if not os.path.exists('vcfanno'): + os.makedirs('vcfanno') + os.chdir('vcfanno') + command = 'wget https://github.com/brentp/vcfanno/releases/download/v0.2.9/vcfanno_linux64' + call(command, shell=True) + command = 'chmod +x vcfanno_linux64' + call(command, shell=True) # if not os.path.isfile('%s.zip' % (settings.vep_release)): # command = """
finally fix installation for ubuntu
py
diff --git a/cdpybio/bedtools.py b/cdpybio/bedtools.py index <HASH>..<HASH> 100644 --- a/cdpybio/bedtools.py +++ b/cdpybio/bedtools.py @@ -1,3 +1,4 @@ +import copy import pandas as pd import pybedtools as pbt @@ -46,6 +47,7 @@ def beds_to_boolean(beds, ref=None, beds_sorted=False, ref_sorted=False, that overlaps each interval in the reference bed file. """ + beds = copy.deepcopy(beds) fns = [] for i,v in enumerate(beds): if type(v) == str: @@ -101,6 +103,7 @@ def combine(beds, beds_sorted=False, postmerge=True): New sorted BedTool with intervals from all input beds. """ + beds = copy.deepcopy(beds) for i,v in enumerate(beds): if type(v) == str: beds[i] = pbt.BedTool(v)
Update Make deepcopy of input bed file list to avoid changing values.
py
diff --git a/furious/async.py b/furious/async.py index <HASH>..<HASH> 100644 --- a/furious/async.py +++ b/furious/async.py @@ -180,6 +180,12 @@ class Async(object): """job is stored as a (function path, args, kwargs) tuple.""" return self._options['job'] + @property + def recursion_depth(self): + """Get the current recursion depth.""" + recursion_options = self._options.get('_recursion') + return recursion_options.get('current', 0) + def _update_job(self, target, args, kwargs): """Specify the function this async job is to execute when run.""" target_path, options = get_function_path_and_options(target)
Add a property to get the current recursion depth
py
diff --git a/lime/submodular_pick.py b/lime/submodular_pick.py index <HASH>..<HASH> 100644 --- a/lime/submodular_pick.py +++ b/lime/submodular_pick.py @@ -122,3 +122,4 @@ class SubmodularPick(object): remaining_indices -= {best_ind} self.sp_explanations = [self.explanations[i] for i in V] + self.V=V
saves picked indices close issue #<I>
py
diff --git a/airtest/core/android/adb.py b/airtest/core/android/adb.py index <HASH>..<HASH> 100644 --- a/airtest/core/android/adb.py +++ b/airtest/core/android/adb.py @@ -1535,7 +1535,8 @@ class ADB(object): m = packageRE.findall(ret) if m: return m[-1] - return "" + else: + return self.get_top_activity()[0] def cleanup_adb_forward():
fix get top activity in dumpsys window windows (cherry picked from commit c8f6e2fdaffc0d4b<I>b6b<I>ffadddaadb1c<I>fc)
py
diff --git a/azure/servicemanagement/servicemanagementservice.py b/azure/servicemanagement/servicemanagementservice.py index <HASH>..<HASH> 100644 --- a/azure/servicemanagement/servicemanagementservice.py +++ b/azure/servicemanagement/servicemanagementservice.py @@ -1197,9 +1197,13 @@ class ServiceManagementService(_ServiceManagementClient): for a virtual machine. role_size: The size of the virtual machine to allocate. The default value is - Small. Possible values are: ExtraSmall, Small, Medium, Large, - ExtraLarge. The specified value must be compatible with the disk - selected in the OSVirtualHardDisk values. + Small. Possible values are: ExtraSmall,Small,Medium,Large, + ExtraLarge,A5,A6,A7,A8,A9,Basic_A0,Basic_A1,Basic_A2,Basic_A3, + Basic_A4,Standard_D1,Standard_D2,Standard_D3,Standard_D4, + Standard_D11,Standard_D12,Standard_D13,Standard_D14,Standard_G1, + Standard_G2,Sandard_G3,Standard_G4,Standard_G5. The specified + value must be compatible with the disk selected in the + OSVirtualHardDisk values. role_type: The type of the role for the virtual machine. The only supported value is PersistentVMRole.
Update the list of possible role_size
py
diff --git a/openquake/hazardlib/tests/gsim/arroyo_2010_test.py b/openquake/hazardlib/tests/gsim/arroyo_2010_test.py index <HASH>..<HASH> 100644 --- a/openquake/hazardlib/tests/gsim/arroyo_2010_test.py +++ b/openquake/hazardlib/tests/gsim/arroyo_2010_test.py @@ -19,6 +19,9 @@ from openquake.hazardlib.gsim.arroyo_2010 import ArroyoEtAl2010SInter from openquake.hazardlib.tests.gsim.utils import BaseGSIMTestCase +# The verification tables were generated using a Visual Basic code +# provided by Danny Arroyo. The format of the tables was adapted to +# the requirements of the OQ tests. class ArroyoEtAl2010SInterTestCase(BaseGSIMTestCase): """
Update arroyo_<I>_test.py Added a short note to explain the creation of the validation tables.
py
diff --git a/src/python/pants/backend/codegen/tasks/wire_gen.py b/src/python/pants/backend/codegen/tasks/wire_gen.py index <HASH>..<HASH> 100644 --- a/src/python/pants/backend/codegen/tasks/wire_gen.py +++ b/src/python/pants/backend/codegen/tasks/wire_gen.py @@ -39,7 +39,6 @@ class WireGen(CodeGen, JvmToolTaskMixin): def __init__(self, *args, **kwargs): """Generates Java files from .proto files using the Wire protobuf compiler.""" super(WireGen, self).__init__(*args, **kwargs) - self.wire_version = self.context.config.get('wire-gen', 'version', default='1.6.0') self.java_out = os.path.join(self.workdir, 'gen-java') def resolve_deps(self, key, default=None):
Remove an unused config reference. If needed in the future, it can be added back as an option. Testing Done: CI passes: <URL>
py
diff --git a/trimesh/exchange/gltf.py b/trimesh/exchange/gltf.py index <HASH>..<HASH> 100644 --- a/trimesh/exchange/gltf.py +++ b/trimesh/exchange/gltf.py @@ -1140,14 +1140,14 @@ def _read_buffers(header, buffers, mesh_kwargs, merge_primitives=False, resolver else: name = "GLTF_geometry" - # By default the created mesh is not from primitive, in case it is the value will be updated + # By default the created mesh is not from primitive, + # in case it is the value will be updated kwargs['metadata']['from_gltf_primitive'] = False # each primitive gets it's own Trimesh object if len(m["primitives"]) > 1: kwargs['metadata']['from_gltf_primitive'] = True name += "_{}".format(j) - # make name unique across multiple meshes if name in meshes: name += "_{}".format(unique)
Making the code flake8 compliant
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -74,7 +74,7 @@ configuration = { 'packages': ['hdbscan', 'hdbscan.tests'], 'install_requires': ['numpy', 'scikit-learn>=0.16', - 'cython >= 0.17'], + 'cython >= 0.26'], 'ext_modules': [_hdbscan_tree, _hdbscan_linkage, _hdbscan_boruvka,
Update cython install requirement
py
diff --git a/gwpy/tests/test_plotter.py b/gwpy/tests/test_plotter.py index <HASH>..<HASH> 100644 --- a/gwpy/tests/test_plotter.py +++ b/gwpy/tests/test_plotter.py @@ -81,14 +81,14 @@ class Mixin(object): fig = self.FIGURE_CLASS() return fig, fig.gca() - -class PlotTestCase(Mixin, unittest.TestCase): - """`TestCase` for the `gwpy.plotter` module - """ @property def use_tex(self): return rcParams['text.usetex'] + +class PlotTestCase(Mixin, unittest.TestCase): + """`TestCase` for the `gwpy.plotter` module + """ def test_init(self): # test object creation fig, ax = self.new()
tests: move use_tex decorator to mixin class
py
diff --git a/i3pystatus/core/desktop.py b/i3pystatus/core/desktop.py index <HASH>..<HASH> 100644 --- a/i3pystatus/core/desktop.py +++ b/i3pystatus/core/desktop.py @@ -33,7 +33,7 @@ try: import gi gi.require_version('Notify', '0.7') from gi.repository import Notify -except ImportError: +except ImportError, ValueError: pass else: if not Notify.init("i3pystatus"):
Fix crash on import with desktop notifications when python-gobject is available, but no notification daemon is running.
py
diff --git a/wandb/internal/file_pusher.py b/wandb/internal/file_pusher.py index <HASH>..<HASH> 100644 --- a/wandb/internal/file_pusher.py +++ b/wandb/internal/file_pusher.py @@ -106,6 +106,7 @@ class FilePusher(object): if os.path.getsize(path) == 0: return + save_name = wandb.util.to_forward_slash_path(save_name) event = step_checksum.RequestUpload(path, save_name, artifact_id, copy, use_prepare_flow, save_fn, digest) self._incoming_queue.put(event) @@ -130,4 +131,4 @@ class FilePusher(object): def is_alive(self): return (self._step_checksum.is_alive() - or self._step_upload.is_alive()) \ No newline at end of file + or self._step_upload.is_alive())
On windows, save files into bucket with proper unix-like paths (#<I>) * On windows, save files into bucket with proper unix-like paths * Restore early code saving? * revert changes to meta.py
py