diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/openpnm/models/physics/meniscus.py b/openpnm/models/physics/meniscus.py index <HASH>..<HASH> 100644 --- a/openpnm/models/physics/meniscus.py +++ b/openpnm/models/physics/meniscus.py @@ -242,8 +242,8 @@ def general_toroidal(target, elif target_Pc is None: logger.exception(msg='Please supply a target capillary pressure' + ' when mode is "men"') - if np.abs(target_Pc) < 1.0: - target_Pc = 1.0 + if np.abs(target_Pc) < 1.0e-6: + target_Pc = 1.0e-6 # Find the position in-between the minima and maxima corresponding to # the target pressure inds = np.indices(np.shape(t_Pc))
Change threshold on meniscus pressure near zero
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -9,14 +9,21 @@ with open('README.rst') as readme_file: with open('HISTORY.rst') as history_file: history = history_file.read() -requirements = [] -with open('requirements.txt') as f: - for l in f.readlines(): - requirements.append(l.strip('\n')) +requirements = [ + 'cryptography', + 'hypothesis', + 'matplotlib', + 'networkx', + 'numpy', + 'palettable', + 'pandas', + 'pytest', + 'polcart', +] setup( name='nxviz', - version='0.2.10', + version='0.2.11', description="Graph Visualization Package", long_description=readme + '\n\n' + history, author="Eric J. Ma",
modified setup.py to have requirements curated
py
diff --git a/cherrypy/process/plugins.py b/cherrypy/process/plugins.py index <HASH>..<HASH> 100644 --- a/cherrypy/process/plugins.py +++ b/cherrypy/process/plugins.py @@ -277,6 +277,7 @@ class DropPrivileges(SimplePlugin): self.bus.log('Started as uid: %r gid: %r' % current_ids()) if self.gid is not None: os.setgid(self.gid) + os.setgroups([]) if self.uid is not None: os.setuid(self.uid) self.bus.log('Running as uid: %r gid: %r' % current_ids())
Fix for #<I> (Possible Security Issue: DropPrivileges Plugin Doesn't Remove Groups)
py
diff --git a/pyrogram/client/types/user_and_chats/chat.py b/pyrogram/client/types/user_and_chats/chat.py index <HASH>..<HASH> 100644 --- a/pyrogram/client/types/user_and_chats/chat.py +++ b/pyrogram/client/types/user_and_chats/chat.py @@ -252,7 +252,7 @@ class Chat(Object): if full_chat.id == c.id: chat = c - if isinstance(chat_full, types.ChannelFull): + if isinstance(full_chat, types.ChannelFull): if full_chat.linked_chat_id == c.id: linked_chat = c
Fix linked chat parsing There are two distinct ChatFull types using the same name (but different namespaces), their objects are kept in chat_full and full_chat.
py
diff --git a/distributions.py b/distributions.py index <HASH>..<HASH> 100644 --- a/distributions.py +++ b/distributions.py @@ -430,6 +430,7 @@ class GaussianNonConj(_GaussianBase, GibbsSampling): def __init__(self,mu_0,mu_sigma_0,kappa_0,sigma_sigma_0,mu=None,sigma=None): self._sigma_distn = GaussianFixedMean(mu_0,kappa_0,sigma_sigma_0,sigma) self._mu_distn = GaussianFixedCov(self._sigma_distn.sigma,mu_0,mu_sigma_0) + self.D = mu_0.shape[0] @property def mu(self):
added D member to GaussianNonConj
py
diff --git a/cassandra/metadata.py b/cassandra/metadata.py index <HASH>..<HASH> 100644 --- a/cassandra/metadata.py +++ b/cassandra/metadata.py @@ -790,7 +790,7 @@ class KeyspaceMetadata(object): self._drop_table_metadata(table_metadata.name) self.tables[table_metadata.name] = table_metadata - for index_name, index_metadata in table_metadata.indexes.iteritems(): + for index_name, index_metadata in six.iteritems(table_metadata.indexes): self.indexes[index_name] = index_metadata def _drop_table_metadata(self, table_name):
six.iteritems for newly added index dict
py
diff --git a/spock/plugins/helpers/physics.py b/spock/plugins/helpers/physics.py index <HASH>..<HASH> 100644 --- a/spock/plugins/helpers/physics.py +++ b/spock/plugins/helpers/physics.py @@ -25,6 +25,7 @@ from spock.vector import Vector3 logger = logging.getLogger('spock') +FP_MAGIC = 1e-4 class PhysicsCore(object): def __init__(self, vec, pos, bounding_box): @@ -106,11 +107,19 @@ class PhysicsPlugin(PluginBase): current_vector = Vector3() transform_vectors = [] q = collections.deque() + bail = False while all(transform_vectors) or not q: + if not q and bail: + logger.warn('Physics has failed to find an MTV, bailing out') + self.clear_velocity() + return Vector() current_vector = q.popleft() if q else current_vector + if current_vector.dist_sq() > self.vec.dist_sq() + FP_MAGIC: + continue transform_vectors = self.check_collision(pos, current_vector) for vector in transform_vectors: q.append(current_vector + vector) + bail = True possible_mtv = [current_vector] while q: current_vector = q.popleft()
Fix physics bugs when interacting with corners
py
diff --git a/salt/modules/test.py b/salt/modules/test.py index <HASH>..<HASH> 100644 --- a/salt/modules/test.py +++ b/salt/modules/test.py @@ -21,3 +21,12 @@ def ping(): salt '*' test.ping ''' return True + +def facter_data(): + ''' + Return the facter data + + CLI Example: + salt '*' test.facter_data + ''' + return facter
Add a test function to return facter data about a host
py
diff --git a/api/server.py b/api/server.py index <HASH>..<HASH> 100644 --- a/api/server.py +++ b/api/server.py @@ -120,14 +120,6 @@ def search_people(): def catch_all_get(path): API_URL = BASE_API_URL + '/' + path params = dict(request.args) - try: - if ("page" in params and len(params["page"]) > 0 and - int(params["page"][0]) >= 300): - return make_response( - jsonify({'error': - "no crawling the user list that high right now, sorry!"}), 403) - except: - pass return forwarded_get(API_URL, params = params) @app.route('/<path:path>', methods=['POST'])
no longer need that quick fix for pagination
py
diff --git a/yaspin/core.py b/yaspin/core.py index <HASH>..<HASH> 100644 --- a/yaspin/core.py +++ b/yaspin/core.py @@ -327,7 +327,6 @@ class Yaspin(object): # Wait time.sleep(self._interval) - sys.stdout.write("\b") def _compose_color_func(self): fn = functools.partial(
Remove \b from spin thread The extra \b written to stdout inside the spin thread leads on some terminals (e.g. rxvt) for some characters to be removed which are printed after the \b. This may lead to content written with write() to disappear shortly after writing.
py
diff --git a/ait/core/server/client.py b/ait/core/server/client.py index <HASH>..<HASH> 100644 --- a/ait/core/server/client.py +++ b/ait/core/server/client.py @@ -114,6 +114,7 @@ class PortOutputClient(ZMQInputClient): self.pub = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) def publish(self, msg): + msg = eval(msg) self.pub.sendto(msg, ('localhost', int(self.out_port))) log.debug('Published message from {}'.format(self))
Added eval to PortOutputClient publish
py
diff --git a/cassandra/io/asyncorereactor.py b/cassandra/io/asyncorereactor.py index <HASH>..<HASH> 100644 --- a/cassandra/io/asyncorereactor.py +++ b/cassandra/io/asyncorereactor.py @@ -278,6 +278,8 @@ class AsyncoreConnection(Connection, asyncore.dispatcher): @classmethod def handle_fork(cls): + global _dispatcher_map + _dispatcher_map = {} if cls._loop: cls._loop._cleanup() cls._loop = None
Fix asyncore re-initialization in case of a fork
py
diff --git a/flux_led/models_db.py b/flux_led/models_db.py index <HASH>..<HASH> 100755 --- a/flux_led/models_db.py +++ b/flux_led/models_db.py @@ -1155,8 +1155,9 @@ MODELS = [ ), LEDENETModel( model_num=0x97, # 0x97 + # AK001-ZJ210 = v2.28 # AK001-ZJ2146 = v3.11, 3.12 (has BLE) - models=["AK001-ZJ2134", "AK001-ZJ2146"], + models=["AK001-ZJ210", "AK001-ZJ2134", "AK001-ZJ2146"], description="Socket", # 1 channel always_writes_white_and_colors=False, # Formerly rgbwprotocol protocols=[MinVersionProtocol(0, PROTOCOL_LEDENET_SOCKET)],
Add old sockets to database (#<I>)
py
diff --git a/pyeapi/api/switchports.py b/pyeapi/api/switchports.py index <HASH>..<HASH> 100644 --- a/pyeapi/api/switchports.py +++ b/pyeapi/api/switchports.py @@ -100,7 +100,7 @@ class Switchports(EntityCollection): """ config = self.get_block('interface %s' % name) - if not re.match(r'\s{3}no\sswitchport', config, re.M): + if not re.search(r'\s{3}no\sswitchport$', config, re.M): resp = dict(name=name) resp['mode'] = MODE_RE.search(config, re.M).group('value') resp['access_vlan'] = \
fixes an issue where an interface would be identified as a switchport This commit fixes a problem where an interface would be wrongly identified as a switchport in the switchports api. This will lead to a traceback error with calling the get() method. This commit is necessary to fix the issue
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,20 +1,23 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages +from os import path -with open('README.md', 'r') as fh: - long_description = fh.read() + +this_directory = path.abspath(path.dirname(__file__)) +with open(path.join(this_directory, 'README.md')) as f: + long_description = f.read() setup( name='monkeylearn', - version='3.2.3', + version='3.2.4', author='MonkeyLearn', author_email='[email protected]', description='Official Python client for the MonkeyLearn API', long_description=long_description, long_description_content_type="text/markdown", url='https://github.com/monkeylearn/monkeylearn-python', - download_url='https://github.com/monkeylearn/monkeylearn-python/tarball/v3.2.3', + download_url='https://github.com/monkeylearn/monkeylearn-python/tarball/v3.2.4', keywords=['monkeylearn', 'machine learning', 'python'], classifiers=[ 'Development Status :: 5 - Production/Stable',
fix long_description with readme
py
diff --git a/nazs/web/forms.py b/nazs/web/forms.py index <HASH>..<HASH> 100644 --- a/nazs/web/forms.py +++ b/nazs/web/forms.py @@ -14,4 +14,4 @@ class ModelForm(ModelForm): instance = self.form_class.Meta.model.get() return self.form_class(form_data, instance=instance) else: - return super(ModelForm, self).get_form(*args, **kwargs) + return super(ModelForm, self).get_form(form_data, *args, **kwargs)
Fix form retrieval in ModelForm
py
diff --git a/scikits/audiolab/pysndfile/matapi.py b/scikits/audiolab/pysndfile/matapi.py index <HASH>..<HASH> 100644 --- a/scikits/audiolab/pysndfile/matapi.py +++ b/scikits/audiolab/pysndfile/matapi.py @@ -1,5 +1,5 @@ #! /usr/bin/env python -# Last Change: Sat Dec 06 10:00 PM 2008 J +# Last Change: Sat Dec 06 11:00 PM 2008 J # Copyright (C) 2006-2007 Cournapeau David <[email protected]> # @@ -23,7 +23,6 @@ import numpy as N from _sndfile import Format, Sndfile, available_file_formats, \ available_encodings, sndfile_version -from compat import PyaudioException, FlacUnsupported __all__ = [] _MATAPI_FORMAT = ['wav', 'aiff', 'au', 'sdif', 'flac', 'ogg'] @@ -80,8 +79,8 @@ def _reader_factory(name, filetype, descr): hdl = Sndfile(filename, 'read') try: if not hdl.format.file_format == filetype: - raise PyaudioException("%s is not a %s file (is %s)" \ - % (filename, filetype, hdl.format.file_format)) + raise ValueError, "%s is not a %s file (is %s)" \ + % (filename, filetype, hdl.format.file_format)) fs = hdl.samplerate enc = hdl.encoding
Raise ValueError when file type is not the expected one in matlab API reader.
py
diff --git a/cloudmesh/common/Shell.py b/cloudmesh/common/Shell.py index <HASH>..<HASH> 100755 --- a/cloudmesh/common/Shell.py +++ b/cloudmesh/common/Shell.py @@ -293,10 +293,11 @@ class Shell(object): v_string = [str(i) for i in python_version] + python_version_s = '.'.join(v_string) + if python_version[0] == 2: - print("You are running an unsupported version of python: {:}".format( - python_version_s)) + print("You are running an unsupported version of python: {:}".format(python_version_s)) # python_version_s = '.'.join(v_string) @@ -309,7 +310,6 @@ class Shell(object): elif python_version[0] == 3: - python_version_s = '.'.join(v_string) if (python_version[0] == 3) and (python_version[1] >= 7) and (python_version[2] >= 0): print("You are running a supported version of python: {:}".format(python_version_s))
remove python2 as supported
py
diff --git a/cumulusci/robotframework/Salesforce.py b/cumulusci/robotframework/Salesforce.py index <HASH>..<HASH> 100644 --- a/cumulusci/robotframework/Salesforce.py +++ b/cumulusci/robotframework/Salesforce.py @@ -242,7 +242,7 @@ class Salesforce(object): url += "?filterName={}".format(filter_name) self.selenium.go_to(url) - def go_to_record_home(self, obj_name, obj_id, filter_name=None): + def go_to_record_home(self, obj_name, obj_id): """ Navigates to the Home view of a Salesforce Object """ url = self.cumulusci.org.lightning_base_url url = "{}/lightning/r/{}/{}/view".format(url, obj_name, obj_id)
Remove filter_name kwarg from Go To Record Home
py
diff --git a/editor/editor.py b/editor/editor.py index <HASH>..<HASH> 100644 --- a/editor/editor.py +++ b/editor/editor.py @@ -973,8 +973,7 @@ class Editor(App): def move_widget(self, css_key, value): # css_key can be 'top' or 'left' # value (int): positive or negative value - if issubclass(self.selectedWidget.__class__, gui.Widget) and css_key in self.selectedWidget.style and \ - self.selectedWidget.css_position=='absolute': + if issubclass(self.selectedWidget.__class__, gui.Widget) and css_key in self.selectedWidget.style: self.selectedWidget.style[css_key] = gui.to_pix(gui.from_pix(self.selectedWidget.style[css_key]) + value) def onkeydown(self, emitter, key, keycode, ctrl, shift, alt):
Editor: resize and move shortcuts not limited with absolute positioning.
py
diff --git a/src/rinoh/font/google.py b/src/rinoh/font/google.py index <HASH>..<HASH> 100644 --- a/src/rinoh/font/google.py +++ b/src/rinoh/font/google.py @@ -81,7 +81,7 @@ def try_install_family(name, family_path): if download_path: print(" unpacking...", end='') family_path.mkdir(parents=True, exist_ok=True) - unpack_archive(download_path, family_path) + unpack_archive(str(download_path), str(family_path)) print(" done!") return True print("-> not found: please check the typeface name (case-sensitive!)")
try_install_family: fix Python <I> & <I> compatibility
py
diff --git a/pycdlib/pycdlib.py b/pycdlib/pycdlib.py index <HASH>..<HASH> 100644 --- a/pycdlib/pycdlib.py +++ b/pycdlib/pycdlib.py @@ -2467,7 +2467,7 @@ class PyCdlib(object): fp = open(filename, 'r+b') self.managing_fp = True try: - self.open_fp(fp) + self._open_fp(fp) except: fp.close() raise
Make open() call the internal _open_fp instead of the external one. This is ever so slightly faster.
py
diff --git a/src/pydirectory/Directory.py b/src/pydirectory/Directory.py index <HASH>..<HASH> 100644 --- a/src/pydirectory/Directory.py +++ b/src/pydirectory/Directory.py @@ -98,13 +98,11 @@ class Directory(object): " an autoDelete directory") return tempfile.mkdtemp(dir=self.path, prefix=".") - def listFilesWIN(self): + def listFilesWin(self): output = [] for dirname, dirnames, filenames in os.walk(self.path): - # print path to all subdirectories first. for subdirname in dirnames: output.append(os.path.join(dirname, subdirname)) - # print path to all filenames. for filename in filenames: output.append(os.path.join(dirname, filename)) return output @@ -118,7 +116,7 @@ class Directory(object): def scan(self): self._files = {} if system() is "Windows": - output = self.listFilesWIN() + output = self.listFilesWin() else: output = self.listFilesLinux() output = [line for line in output if "__MACOSX" not in line]
corrected listFilesWin naming convention
py
diff --git a/fleece/connexion.py b/fleece/connexion.py index <HASH>..<HASH> 100644 --- a/fleece/connexion.py +++ b/fleece/connexion.py @@ -38,14 +38,14 @@ class FleeceApp(connexion.App): If `logger` is None, a default logger object will be created. """ - super(FleeceApp, self).__init__(*args, **kwargs) - logger = kwargs.pop('logger', None) if logger is None: self.logger = fleece.log.get_logger(__name__) else: self.logger = logger + super(FleeceApp, self).__init__(*args, **kwargs) + def call_api(self, event): """Make a request against the API defined by this app.
FleeceApp: don't pass `logger` kwarg to super constructor If you specify an explicit `logger` kwarg, the call to the super constructor (connexion.App) will blow up.
py
diff --git a/twitter_scraper/modules/tweets.py b/twitter_scraper/modules/tweets.py index <HASH>..<HASH> 100644 --- a/twitter_scraper/modules/tweets.py +++ b/twitter_scraper/modules/tweets.py @@ -103,7 +103,8 @@ def get_tweets(query, pages=25): for style in styles: if style.startswith('background'): tmp = style.split('/')[-1] - video_id = tmp[:tmp.index('.jpg')] + video_id = tmp[:tmp.index('.jpg')] if 'jpg' in tmp \ + else tmp[:tmp.index('.png')] if 'png' in tmp else None videos.append({'id': video_id}) tweets.append({
fix video_id substring not found
py
diff --git a/userena/models.py b/userena/models.py index <HASH>..<HASH> 100644 --- a/userena/models.py +++ b/userena/models.py @@ -125,18 +125,18 @@ class UserenaSignup(models.Model): 'site': Site.objects.get_current()} - # Email to the old address + # Email to the old address, if present subject_old = render_to_string('userena/emails/confirmation_email_subject_old.txt', context) subject_old = ''.join(subject_old.splitlines()) message_old = render_to_string('userena/emails/confirmation_email_message_old.txt', context) - - send_mail(subject_old, - message_old, - settings.DEFAULT_FROM_EMAIL, - [self.user.email]) + if self.user.email: + send_mail(subject_old, + message_old, + settings.DEFAULT_FROM_EMAIL, + [self.user.email]) # Email to the new address subject_new = render_to_string('userena/emails/confirmation_email_subject_new.txt',
added if to sending mail to old email
py
diff --git a/nipap/nipap/xmlrpc.py b/nipap/nipap/xmlrpc.py index <HASH>..<HASH> 100644 --- a/nipap/nipap/xmlrpc.py +++ b/nipap/nipap/xmlrpc.py @@ -28,6 +28,9 @@ class NipapXMLRPC: self._cfg = NipapConfig() self.cfg_file = None + # Add dispatch entry for <ex:nil/> + xmlrpclib.Unmarshaller.dispatch["ex:nil"] = xmlrpclib.Unmarshaller.end_nil + self.init()
Added handling of <ex:nil/> to xmlrpc-class As the Apache JAVA XML-RPC client encodes null values as <ex:nil/> instead of <nil/> as xmlrpclib expects, xmlrpclib is now slighly modified runtime to also handle the ex:nil-values.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ setup( include_package_data=True, # Package dependencies. - install_requires=['simplejson', 'requests'], + install_requires=['simplejson', 'requests>=0.13.0'], # Metadata for PyPI. author='Ryan McGrath',
Need to at least have requests <I>
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ if sys.version_info < (3, 0): setup(name='picotui', - version='1.1.1', + version='1.1.2', description="""A simple text user interface (TUI) library.""", long_description=open('README.rst').read(), url='https://github.com/pfalcon/picotui',
setup.py: Release <I>.
py
diff --git a/bitex/interfaces/rocktrading.py b/bitex/interfaces/rocktrading.py index <HASH>..<HASH> 100644 --- a/bitex/interfaces/rocktrading.py +++ b/bitex/interfaces/rocktrading.py @@ -36,17 +36,17 @@ class RockTradingLtd(RockTradingREST): @return_json(None) def tickers(self, currency=None): if currency: - return self.public_query('tickers/%s' % currency) + return self.public_query('funds/%s/ticker' % currency) else: return self.public_query('tickers') @return_json(None) def order_book(self, pair): - return self.public_query('orderbook/%s' % pair) + return self.public_query('funds/%s/orderbook' % pair) @return_json(None) def trades(self, pair): - return self.public_query('trades/%s' % pair) + return self.public_query('funds/%s/trades' % pair) @return_json(None) def balance(self):
fixed url for market api endpoints
py
diff --git a/bootstrap3/renderers.py b/bootstrap3/renderers.py index <HASH>..<HASH> 100644 --- a/bootstrap3/renderers.py +++ b/bootstrap3/renderers.py @@ -214,7 +214,7 @@ class FieldRenderer(object): 'help_text_and_errors': help_text_and_errors, 'layout': self.layout, })) - html += '<span class=help-block>{help}</span>'.format(help=help_html) + html += '<span class="help-block">{help}</span>'.format(help=help_html) return html def get_field_class(self):
Added quotes around class (fixes #<I>)
py
diff --git a/ca/django_ca/models.py b/ca/django_ca/models.py index <HASH>..<HASH> 100644 --- a/ca/django_ca/models.py +++ b/ca/django_ca/models.py @@ -535,7 +535,9 @@ class CertificateAuthority(X509CertMixin): @property def key_exists(self): - if os.path.isabs(self.private_key_path): + if self._key is not None: + return True + elif os.path.isabs(self.private_key_path): log.warning('%s: CA uses absolute path. Use "manage.py migrate_ca" to update.', self.serial) return os.path.exists(self.private_key_path) else:
do not check path if key is already loaded
py
diff --git a/python/ray/worker.py b/python/ray/worker.py index <HASH>..<HASH> 100644 --- a/python/ray/worker.py +++ b/python/ray/worker.py @@ -672,11 +672,11 @@ def init( _system_config (dict): Configuration for overriding RayConfig defaults. For testing purposes ONLY. _tracing_startup_hook (str): If provided, turns on and sets up tracing - for Ray. Must be the name of a function that takes no arguments and - sets up a Tracer Provider, Remote Span Processors, and - (optional) additional instruments. See more at - docs.ray.io/tracing.html. It is currently under active development, - and the API is subject to change. + for Ray. Must be the name of a function that takes no arguments and + sets up a Tracer Provider, Remote Span Processors, and + (optional) additional instruments. See more at + docs.ray.io/tracing.html. It is currently under active development, + and the API is subject to change. Returns: Address information about the started processes.
[docs] Update API docs for ray.init (#<I>) The incorrect indentation caused the docs render weirdly: <URL>
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -45,7 +45,7 @@ setup( name='soupsieve', version=VER, keywords='CSS HTML XML selector filter query soup', - description='A CSS4 selector implementation for Beautiful Soup.', + description='A modern CSS selector implementation for Beautiful Soup.', long_description=get_description(), long_description_content_type='text/markdown', author='Isaac Muse',
Remove mention of CSS4 in PyPI short description
py
diff --git a/openquake/calculators/classical.py b/openquake/calculators/classical.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/classical.py +++ b/openquake/calculators/classical.py @@ -132,6 +132,7 @@ class ClassicalCalculator(base.HazardCalculator): ires = parallel.Starmap( self.core_task.__func__, iterargs, self.monitor() ).submit_all() + self.csm.sources_by_trt.clear() # save memory self.nsites = [] acc = ires.reduce(self.agg_dicts, self.zerodict()) if not self.nsites:
Saved memory in classical [skip hazardlib][demos] Former-commit-id: cecc<I>dc<I>b<I>abe<I>b<I>ece8a3dfb
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -65,7 +65,7 @@ setup( 'MAGICC6/MAGICC6_4Download/*.SCEN' ]}, include_package_data=True, - install_requires=['pandas', 'f90nml==0.21'], + install_requires=['pandas', 'f90nml'], tests_require=['pytest'], cmdclass=cmdclass )
Unpin `f<I>nml` New version <I> which includes bug fix for #7 was released. Closes #7.
py
diff --git a/admin_tools/dashboard/modules.py b/admin_tools/dashboard/modules.py index <HASH>..<HASH> 100644 --- a/admin_tools/dashboard/modules.py +++ b/admin_tools/dashboard/modules.py @@ -165,7 +165,7 @@ class Group(DashboardModule): Represents a group of modules, the group can be displayed in tabs, accordion, or just stacked (default). As well as the :class:`~admin_tools.dashboard.modules.DashboardModule` - properties, the :class:`~admin_tools.dashboard.modules.AppList` + properties, the :class:`~admin_tools.dashboard.modules.Group` has one extra property: ``display``
Fixed typo in Group module docstring.
py
diff --git a/src/data.py b/src/data.py index <HASH>..<HASH> 100644 --- a/src/data.py +++ b/src/data.py @@ -154,6 +154,18 @@ class Dataset(object): return fobj.getvalue() if __name__ == "__main__": + """ + For now this regression test assumes you've downloaded a sample + netCDF file and placed it in scidata/test/ + + Heres one way to get going: + + mkdir test + cd test + wget http://www.unidata.ucar.edu/software/netcdf/examples/ECMWF_ERA-40_subset.nc + + """ + base_dir = os.path.dirname(__file__) test_dir = os.path.join(base_dir, '..', 'test', ) write_test_path = os.path.join(test_dir, 'test_output.nc')
Added directions for downloading test data.
py
diff --git a/salt/states/libvirt.py b/salt/states/libvirt.py index <HASH>..<HASH> 100644 --- a/salt/states/libvirt.py +++ b/salt/states/libvirt.py @@ -2,6 +2,10 @@ Manage libvirt certs ''' +# Import python libs +import os + + def keys(name, basepath='/etc/pki'): ''' Manage libvirt keys @@ -19,7 +23,7 @@ def keys(name, basepath='/etc/pki'): pillar = __salt__['pillar.ext']({'libvirt': '_'}) paths = { 'serverkey': os.path.join( - basebath, + basepath, 'libvirt', 'private', 'serverkey.pem'), @@ -44,8 +48,8 @@ def keys(name, basepath='/etc/pki'): p_key = 'libvirt.{0}.pem'.format(key) if not p_key in pillar: continue - if not os.path.isdir(os.path.basename(paths[key])): - os.makedirs(os.path.basename(paths[key])) + if not os.path.isdir(os.path.dirname(paths[key])): + os.makedirs(os.path.dirname(paths[key])) if os.path.isfile(paths[key]): with open(paths[key], 'r') as fp_: if not fp_.read() == pillar[p_key]:
libvirt state is fully armed
py
diff --git a/devassistant/gui/run_window.py b/devassistant/gui/run_window.py index <HASH>..<HASH> 100644 --- a/devassistant/gui/run_window.py +++ b/devassistant/gui/run_window.py @@ -63,7 +63,7 @@ class RunLoggingHandler(logging.Handler): self.parent.debug_logs['logs'].append(record) # During execution if level is bigger then DEBUG # then GUI shows the message. - if int(record.levelno) > 10: + if int(record.levelno) > 10 or self.parent.debugging: event_type = getattr(record, 'event_type', '') if event_type: if event_type == 'dep_installation_start': @@ -169,7 +169,6 @@ class RunWindow(object): self.close_win = False def disable_buttons(self): - self.debug_btn.set_sensitive(False) self.main_btn.set_sensitive(False) self.back_btn.hide() self.info_label.set_label('<span color="#FFA500">In progress...</span>')
Merged pull request #<I> from jkoncick/master + resolve conflicts
py
diff --git a/openquake/baselib/parallel.py b/openquake/baselib/parallel.py index <HASH>..<HASH> 100644 --- a/openquake/baselib/parallel.py +++ b/openquake/baselib/parallel.py @@ -359,12 +359,18 @@ def safely_call(func, args): # Check is done anyway in other parts of the code # further investigation is needed # check_mem_usage(mon) # check if too much memory is used - backurl = getattr(mon, 'backurl', None) - zsocket = (Socket(backurl, zmq.PUSH, 'connect') if backurl - else mock.MagicMock()) # do nothing - with zsocket: - zsocket.send(res) - return zsocket.num_sent if backurl else res + try: + backurl = mon.backurl + except AttributeError: + return res + with Socket(backurl, zmq.PUSH, 'connect') as zsocket: + try: + zsocket.send(res) + except Exception: # like OverflowError + _etype, exc, tb = sys.exc_info() + err = Result(exc, mon, ''.join(traceback.format_tb(tb))) + zsocket.send(err) + return zsocket.num_sent if OQ_DISTRIBUTE.startswith('celery'):
Fixed OverflowError in zsocket.send [demos]
py
diff --git a/cassandra/cluster.py b/cassandra/cluster.py index <HASH>..<HASH> 100644 --- a/cassandra/cluster.py +++ b/cassandra/cluster.py @@ -629,7 +629,7 @@ class Session(object): conn_exc = ConnectionException(str(auth_exc), host=host) host.monitor.signal_connection_failure(conn_exc) return self._pools.get(host) - except ConnectionException, conn_exc: + except Exception, conn_exc: host.monitor.signal_connection_failure(conn_exc) return self._pools.get(host)
Tolerate any kind of Exception when creating host pools
py
diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py index <HASH>..<HASH> 100644 --- a/salt/client/ssh/__init__.py +++ b/salt/client/ssh/__init__.py @@ -169,14 +169,25 @@ class SSH(object): self.targets = self.roster.targets( self.opts['tgt'], self.tgt_type) - priv = self.opts.get( - 'ssh_priv', - os.path.join( - self.opts['pki_dir'], - 'ssh', - 'salt-ssh.rsa' + # If we're in a wfunc, we need to get the ssh key location from the + # top level opts, stored in __master_opts__ + if '__master_opts__' in self.opts: + priv = self.opts['__master_opts__'].get( + 'ssh_priv', + os.path.join( + self.opts['__master_opts__']['pki_dir'], + 'ssh', + 'salt-ssh.rsa' + ) ) - ) + else: + priv = self.opts.get( + 'ssh_priv', + os.path.join( + self.opts['pki_dir'], + 'ssh', + 'salt-ssh.rsa' + ) if not os.path.isfile(priv): try: salt.client.ssh.shell.gen_key(priv)
Get the ssh_priv from __master_opts__ if available
py
diff --git a/revrand/glm.py b/revrand/glm.py index <HASH>..<HASH> 100644 --- a/revrand/glm.py +++ b/revrand/glm.py @@ -392,7 +392,7 @@ def predict_interval(alpha, Xs, likelihood, basis, m, C, lparams, bparams, if multiproc: pool = Pool() - res = pool.starmap(_rootfinding, work) + res = pool.map(_star_rootfinding, work) pool.close() pool.join() else: @@ -406,6 +406,12 @@ def predict_interval(alpha, Xs, likelihood, basis, m, C, lparams, bparams, # Internal Module Utilities # +# For python 2.7 compatibility +def _star_rootfinding(args): + + return _rootfinding(*args) + + def _rootfinding(fn, likelihood, lparams, alpha): # CDF minus percentile for quantile root finding
python 2 pool.starmap compatability fix
py
diff --git a/cgroupspy/__init__.py b/cgroupspy/__init__.py index <HASH>..<HASH> 100644 --- a/cgroupspy/__init__.py +++ b/cgroupspy/__init__.py @@ -24,4 +24,4 @@ ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ -__version__ = "0.1.4" +__version__ = "0.1.5"
Bump the version <I>
py
diff --git a/scoop/__main__.py b/scoop/__main__.py index <HASH>..<HASH> 100644 --- a/scoop/__main__.py +++ b/scoop/__main__.py @@ -178,8 +178,8 @@ class ScoopApp(object): first_worker = (worker == self.worker_hosts[-1][0]) self.log.info(' {0}:\t{1} {2}'.format( worker, - number - 1 if first_worker or not headless else str(number), - "+ origin" if first_worker or not headless else "", + number - 1 if first_worker or headless else str(number), + "+ origin" if first_worker or headless else "", ) )
Fixed the origin error in the distribution reporting
py
diff --git a/dp_tornado/engine/cache.py b/dp_tornado/engine/cache.py index <HASH>..<HASH> 100644 --- a/dp_tornado/engine/cache.py +++ b/dp_tornado/engine/cache.py @@ -570,6 +570,8 @@ class Decorator(object): if not identifier: identifier = _engine_.helper.datetime.mtime() self._cache(identifier_key, identifier) + else: + identifier = identifier['val'] return identifier
fixed identifier duplicated issue.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -26,7 +26,7 @@ install_requires = [ 'django-libsass', 'django-mptt==0.8.6', 'djangorestframework==3.4.0', - 'django-phonenumber-field', + 'django-phonenumber-field==1.3.0', 'django-import-export', 'django-daterange-filter', 'elastic-git',
Pin django-phonenumber-field to <I> Version 2 was released about one day ago which drops support for the version of Django that we're using. We need to pin the version to ensure we don't get a conflict.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ from setuptools import setup def readme(): - with open('README.rst', 'r') as f: + with open('README.rst', 'r', encoding='utf-8') as f: return f.read()
update readme() The original line "with open('README.rst', 'r') as f:" caused "UnicodeDecodeError" on Windows <I>. Specify the encoding, "with open('README.rst', 'r', encoding='utf-8') as f:", solved the problem.
py
diff --git a/arthur/_version.py b/arthur/_version.py index <HASH>..<HASH> 100644 --- a/arthur/_version.py +++ b/arthur/_version.py @@ -1,2 +1,2 @@ # Versions compliant with PEP 440 https://www.python.org/dev/peps/pep-0440 -__version__ = "0.1.7" +__version__ = "0.1.8"
Update version number to <I>
py
diff --git a/tests/expectations/test_run_diagnostics_supporting_methods.py b/tests/expectations/test_run_diagnostics_supporting_methods.py index <HASH>..<HASH> 100644 --- a/tests/expectations/test_run_diagnostics_supporting_methods.py +++ b/tests/expectations/test_run_diagnostics_supporting_methods.py @@ -298,7 +298,6 @@ def test__get_test_results(): SparkDFExecutionEngine=False, ), ) - assert len(test_results) >= 6 for result in test_results: # Abe: 1/1/2022: I'm not sure this is the behavior we want long term. How does backend relate to ExecutionEngine? if result.backend == "pandas":
Remove assertion (#<I>)
py
diff --git a/spotifyconnect/metadata.py b/spotifyconnect/metadata.py index <HASH>..<HASH> 100644 --- a/spotifyconnect/metadata.py +++ b/spotifyconnect/metadata.py @@ -17,8 +17,8 @@ class Metadata(object): def __init__(self, sp_metadata): - self.playlist_name = utils.to_unicode(sp_metadata.data0) - self.playlist_uri = utils.to_unicode(sp_metadata.context_uri) + self.playlist_name = utils.to_unicode(sp_metadata.playlist_name) + self.playlist_uri = utils.to_unicode(sp_metadata.playlist_uri) self.track_name = utils.to_unicode(sp_metadata.track_name) self.track_uri = utils.to_unicode(sp_metadata.track_uri) self.artist_name = utils.to_unicode(sp_metadata.artist_name) @@ -30,7 +30,7 @@ class Metadata(object): def __repr__(self): - return 'Metadata(%r)' % self.track_uri + return 'Metadata(%s)' % self.track_uri @serialized def get_image_url(self, image_size):
Fix Metadata Fixed C metadata object properties for playlists. Fix representation
py
diff --git a/example/ssd/train.py b/example/ssd/train.py index <HASH>..<HASH> 100644 --- a/example/ssd/train.py +++ b/example/ssd/train.py @@ -72,7 +72,7 @@ def parse_args(): help='blue mean value') parser.add_argument('--lr-steps', dest='lr_refactor_step', type=str, default='80, 160', help='refactor learning rate at specified epochs') - parser.add_argument('--lr-factor', dest='lr_refactor_ratio', type=str, default=0.1, + parser.add_argument('--lr-factor', dest='lr_refactor_ratio', type=float, default=0.1, help='ratio to refactor learning rate') parser.add_argument('--freeze', dest='freeze_pattern', type=str, default="^(conv1_|conv2_).*", help='freeze layer pattern')
Change type of 'lr_refactor_ratio' to 'float' or else the learning-rate-refactor will never work (#<I>)
py
diff --git a/yelp_kafka_tool/kafka_consumer_manager/commands/offset_set.py b/yelp_kafka_tool/kafka_consumer_manager/commands/offset_set.py index <HASH>..<HASH> 100644 --- a/yelp_kafka_tool/kafka_consumer_manager/commands/offset_set.py +++ b/yelp_kafka_tool/kafka_consumer_manager/commands/offset_set.py @@ -51,7 +51,7 @@ class OffsetSet(OffsetWriter): "new offset." ) parser_offset_set.add_argument( - '--storage', + '--storage', choices=['zookeeper', 'kafka'], help="String describing where to store the committed offsets." )
KAFKA-<I>: Add choices for storage options to command line arg
py
diff --git a/tests/classifiers.py b/tests/classifiers.py index <HASH>..<HASH> 100644 --- a/tests/classifiers.py +++ b/tests/classifiers.py @@ -41,7 +41,7 @@ def optimize_final_model_classification(model_name=None): lower_bound = -0.215 if model_name == 'DeepLearningClassifier': - lower_bound = -0.25 + lower_bound = -0.24 assert lower_bound < test_score < -0.17 @@ -425,7 +425,7 @@ def feature_learning_categorical_ensembling_getting_single_predictions_classific lower_bound = -0.215 if model_name == 'DeepLearningClassifier': - lower_bound = -0.25 + lower_bound = -0.23 if model_name == 'GradientBoostingClassifier' or model_name is None: lower_bound = -0.25 if model_name == 'LGBMClassifier':
makes some tests harder for improved deep learning
py
diff --git a/filer/admin/folderadmin.py b/filer/admin/folderadmin.py index <HASH>..<HASH> 100644 --- a/filer/admin/folderadmin.py +++ b/filer/admin/folderadmin.py @@ -208,6 +208,7 @@ class FolderAdmin(PrimitivePermissionAwareModelAdmin): Folder.objects.get(id=last_folder_id) except Folder.DoesNotExist: url = reverse('admin:filer-directory_listing-root') + url = "%s%s%s" % (url, popup_param(request), selectfolder_param(request,"&")) else: url = reverse('admin:filer-directory_listing', kwargs={'folder_id': last_folder_id}) url = "%s%s%s" % (url, popup_param(request), selectfolder_param(request,"&"))
Fix popup mode when Folder doesn't exists I noticed this bug in django_cms image plugin when trying to select an image for the first time. If the last visited folder was the root folder or the unfiled folder, the popup mode doesn't trigger. It's fixed with this simple patch ;) cheers !
py
diff --git a/salt/queues/sqlite_queue.py b/salt/queues/sqlite_queue.py index <HASH>..<HASH> 100644 --- a/salt/queues/sqlite_queue.py +++ b/salt/queues/sqlite_queue.py @@ -37,7 +37,7 @@ def _conn(queue): ''' Return an sqlite connection ''' - queue_dir = __opts__['queue_dir'] + queue_dir = __opts__['sqlite_queue_dir'] db = os.path.join(queue_dir, '{0}.db'.format(queue)) log.debug('Connecting to: {0}'.format(db)) @@ -86,7 +86,7 @@ def _list_queues(): ''' Return a list of sqlite databases in the queue_dir ''' - queue_dir = __opts__['queue_dir'] + queue_dir = __opts__['sqlite_queue_dir'] files = os.path.join(queue_dir, '*.db') paths = glob.glob(files) queues = [os.path.splitext(os.path.basename(item))[0] for item in paths]
Fix the option for sqlite_queue_dir
py
diff --git a/cleo/descriptors/application_description.py b/cleo/descriptors/application_description.py index <HASH>..<HASH> 100644 --- a/cleo/descriptors/application_description.py +++ b/cleo/descriptors/application_description.py @@ -45,7 +45,7 @@ class ApplicationDescription(object): names = [] for name, command in commands: - if not command.get_name(): + if not command.get_name() or command.get_name() == '_completion': continue if command.get_name() == name:
Hides _completion command from list.
py
diff --git a/microcosm/loaders.py b/microcosm/loaders.py index <HASH>..<HASH> 100644 --- a/microcosm/loaders.py +++ b/microcosm/loaders.py @@ -62,7 +62,7 @@ def load_from_python_file(metadata): """ def load_python_module(data): module = new_module("magic") - exec data in module.__dict__, module.__dict__ + exec(data, module.__dict__, module.__dict__) return { key: value for key, value in module.__dict__.items()
updated exec statement usage to be python-3 compatible
py
diff --git a/openquake/engine/calculators/risk/event_based/core.py b/openquake/engine/calculators/risk/event_based/core.py index <HASH>..<HASH> 100644 --- a/openquake/engine/calculators/risk/event_based/core.py +++ b/openquake/engine/calculators/risk/event_based/core.py @@ -87,8 +87,8 @@ def event_based(job_id, hazard, hazard_getter, _ = hazard_data (loss_curve_id, loss_map_ids, - mean_loss_curve_id, quantile_loss_curve_ids, - insured_curve_id, aggregate_loss_curve_id) = ( + insured_curve_id, aggregate_loss_curve_id, + mean_loss_curve_id, quantile_loss_curve_ids) = ( output_containers[hazard_output_id]) seed = rnd.randint(0, models.MAX_SINT_32)
calcs/risk/event_based/core: In `output_containers`, mean and quantiles now come at the end.
py
diff --git a/python-package/xgboost/core.py b/python-package/xgboost/core.py index <HASH>..<HASH> 100644 --- a/python-package/xgboost/core.py +++ b/python-package/xgboost/core.py @@ -1448,8 +1448,7 @@ class Booster(object): importance_type: str, default 'weight' One of the importance types defined above. """ - - if getattr(self, 'booster', None) is not None and self.booster != 'gbtree': + if getattr(self, 'booster', None) is not None and self.booster not in {'gbtree', 'dart'}: raise ValueError('Feature importance is not defined for Booster type {}' .format(self.booster))
Check booster for dart in feature importance. (#<I>) * Check booster for dart in feature importance.
py
diff --git a/bcbio/variation/varscan.py b/bcbio/variation/varscan.py index <HASH>..<HASH> 100644 --- a/bcbio/variation/varscan.py +++ b/bcbio/variation/varscan.py @@ -51,7 +51,7 @@ def _get_jvm_opts(config, tmp_dir): def _varscan_options_from_config(config): """Retrieve additional options for VarScan from the configuration. """ - opts = ["--min-coverage 5", "--p-value 0.98"] + opts = ["--min-coverage 5", "--p-value 0.98", "--strand-filter 1"] resources = config_utils.get_resources("varscan", config) if resources.get("options"): opts += resources["options"] @@ -135,7 +135,7 @@ def _varscan_paired(align_bams, ref_file, items, target_regions, out_file): " <({normal_mpileup_cl} | {remove_zerocoverage}) " "<({tumor_mpileup_cl} | {remove_zerocoverage}) " "--output-snp {tx_snp} --output-indel {tx_indel} " - " --output-vcf --strand-filter 1 {opts} ") + " --output-vcf {opts} ") # add minimum AF min_af = float(utils.get_in(paired.tumor_config, ("algorithm", "min_allele_fraction"), 10)) / 100.0
Add --strand-filter for single sample calling to match the paired case; centralize setting.
py
diff --git a/phy/plot/tests/test_features.py b/phy/plot/tests/test_features.py index <HASH>..<HASH> 100644 --- a/phy/plot/tests/test_features.py +++ b/phy/plot/tests/test_features.py @@ -43,9 +43,10 @@ def _test_features(n_spikes=None, n_clusters=None): # masks[n_spikes//2:, ...] = 0 c.visual.masks = masks c.add_extra_feature('time', spike_samples) + c.add_extra_feature('test', np.sin(np.linspace(-10., 10., n_spikes))) matrix = np.empty((2, 2), dtype=object) matrix[...] = [[('time', (0, 0)), ((1, 0), (1, 1))], - [((2, 1), (1, 0)), ((1, 0), 'time')]] + [((2, 1), (1, 0)), ('time', 'test')]] c.dimensions_matrix = matrix c.visual.spike_clusters = spike_clusters c.visual.cluster_colors = np.array([_random_color()
Add test extra feature in feature view test.
py
diff --git a/wptools/core.py b/wptools/core.py index <HASH>..<HASH> 100644 --- a/wptools/core.py +++ b/wptools/core.py @@ -55,7 +55,7 @@ class WPTools: self.get_random() else: self.show() - self.verbose = verbose + self._verbose = verbose def __get_links(self, iwlinks): """ @@ -191,7 +191,7 @@ class WPTools: """ snip and base href lead HTML """ - snip = utils.snip_html(html, verbose=1 if self.verbose else 0) + snip = utils.snip_html(html, verbose=1 if self._verbose else 0) snip = "<p snipped>%s</p>" % snip url = urlparse.urlparse(self.g_rest['query'])
stow verbosity in core
py
diff --git a/pyee/__init__.py b/pyee/__init__.py index <HASH>..<HASH> 100644 --- a/pyee/__init__.py +++ b/pyee/__init__.py @@ -62,11 +62,14 @@ class EventEmitter(object): """ self._events[event].remove(f) - def remove_all_listeners(self, event): + def remove_all_listeners(self, event=None): + """Remove all listeners attached to `event`. """ - Remove all listeners attached to `event`. - """ - self._events[event] = [] + if event is not None: + self._events[event] = [] + else: + self._events = None + self._events = defaultdict(list) def listeners(self, event): return self._events[event]
Without arguments, `remove_all_listeners()` will remove all listeners for all events This is consistent with the behavior of [the Node.js counterpart](<URL>).
py
diff --git a/xmpp_backends/ejabberdctl.py b/xmpp_backends/ejabberdctl.py index <HASH>..<HASH> 100644 --- a/xmpp_backends/ejabberdctl.py +++ b/xmpp_backends/ejabberdctl.py @@ -220,6 +220,10 @@ class EjabberdctlBackend(EjabberdBackendBase): username, domain = jid.split('@', 1) domain, resource = domain.split('/', 1) + if prio == 'nil': + prio = None + else: + prio = int(prio) started = pytz.utc.localize(datetime.utcnow() - timedelta(int(uptime))) typ, encrypted, compressed = self.parse_connection_string(conn, version) @@ -228,7 +232,7 @@ class EjabberdctlBackend(EjabberdBackendBase): username=username, domain=domain, resource=resource, - priority=int(prio), + priority=prio, ip_address=self.parse_ip_address(ip, version), uptime=started, status='', # session['status'],
priority sometimes is 'nil'
py
diff --git a/gwpy/tests/test_table.py b/gwpy/tests/test_table.py index <HASH>..<HASH> 100644 --- a/gwpy/tests/test_table.py +++ b/gwpy/tests/test_table.py @@ -123,8 +123,8 @@ class TableTests(unittest.TestCase): def test_read_write_root(self): table = self.TABLE_CLASS.read( - TEST_XML_FILE, format='ligolw.sngl_burst', - columns=['peak_time', 'peak_time_ns', 'snr', 'peak_frequency']) + TEST_XML_FILE, format='ligolw.sngl_burst', + columns=['peak_time', 'peak_time_ns', 'snr', 'peak_frequency']) tempdir = tempfile.mkdtemp() try: fp = tempfile.mktemp(suffix='.root', dir=tempdir)
tests: fixed pep8 issue [ci skip]
py
diff --git a/python/ray/tests/test_k8s_operator_examples.py b/python/ray/tests/test_k8s_operator_examples.py index <HASH>..<HASH> 100644 --- a/python/ray/tests/test_k8s_operator_examples.py +++ b/python/ray/tests/test_k8s_operator_examples.py @@ -12,7 +12,7 @@ import kubernetes import pytest import yaml -from ray.autoscaler._private.kubernetes.node_provider import\ +from ray.autoscaler._private._kubernetes.node_provider import\ KubernetesNodeProvider IMAGE_ENV = "KUBERNETES_OPERATOR_TEST_IMAGE"
[kubernetes][test][minor] Fix K8s test by an adding an underscore. (#<I>)
py
diff --git a/kernel_tuner/strategies/genetic_algorithm.py b/kernel_tuner/strategies/genetic_algorithm.py index <HASH>..<HASH> 100644 --- a/kernel_tuner/strategies/genetic_algorithm.py +++ b/kernel_tuner/strategies/genetic_algorithm.py @@ -93,11 +93,12 @@ def weighted_choice(population, n): def random_population(pop_size, tune_params): """create a random population""" population = [] - for _ in range(pop_size): + while len(population) < pop_size: dna = [] for i in range(len(tune_params)): dna.append(random_val(i, tune_params)) - population.append(dna) + if not dna in population: + population.append(dna) return population def random_val(index, tune_params):
random population to always generate unique population members
py
diff --git a/pyuploadcare/dj/models.py b/pyuploadcare/dj/models.py index <HASH>..<HASH> 100644 --- a/pyuploadcare/dj/models.py +++ b/pyuploadcare/dj/models.py @@ -66,7 +66,8 @@ pattern_of_crop = re.compile(''' | # empty string \d+:\d+| # "2:3" \d+x\d+| # "200x300" - \d+x\d+\ upscale # "200x300 upscale" + \d+x\d+\ upscale| # "200x300 upscale" + \d+x\d+\ minimum # "200x300 minimum" ) $ ''', re.VERBOSE)
allow "<I>x<I>" minimum crop
py
diff --git a/labsuite/compilers/plate_map.py b/labsuite/compilers/plate_map.py index <HASH>..<HASH> 100644 --- a/labsuite/compilers/plate_map.py +++ b/labsuite/compilers/plate_map.py @@ -169,6 +169,6 @@ class Plate(): """ Returns the well position on this plate matching a particular value. """ - for pos in self.map: + for pos in sorted(self.map.keys()): if self.map[pos].strip() == value: return humanize_position(pos)
CSV Ingestion: Always return same well in value search.
py
diff --git a/piazza_api/network.py b/piazza_api/network.py index <HASH>..<HASH> 100644 --- a/piazza_api/network.py +++ b/piazza_api/network.py @@ -260,10 +260,11 @@ class Network(object): cid = post["id"] except KeyError: cid = post + except TypeError: + cid = post params = { "cid": cid, - # For updates, the content is put into the subject. "subject": content, }
Update content_update with TypeError exception
py
diff --git a/gitenberg/travis/__init__.py b/gitenberg/travis/__init__.py index <HASH>..<HASH> 100644 --- a/gitenberg/travis/__init__.py +++ b/gitenberg/travis/__init__.py @@ -1,7 +1,9 @@ +import glob import subprocess import uuid import os + BUILD_EPUB_SCRIPT = """ #!/bin/sh @@ -30,6 +32,30 @@ function build_epub_from_asciidoc { build_epub_from_asciidoc $1 $2 """ +def source_book(repo_name): + + """ + return the path of document to use as the source for building epub + """ + + repo_id = repo_name.split("_")[-1] + repo_htm_path = "{repo_id}-h/{repo_id}-h.htm".format(repo_id=repo_id) + + possible_paths = ["book.asciidoc", + repo_htm_path, + "{}-0.txt".format(repo_id), + "{}-8.txt".format(repo_id), + "{}.txt".format(repo_id), + ] + + # return the first match + + for path in possible_paths: + if os.path.exists(path): + return path + + return None + def build_epub_from_asciidoc (version, epub_title): """
add source_book to return the path for the source to use
py
diff --git a/ev3dev/ev3.py b/ev3dev/ev3.py index <HASH>..<HASH> 100644 --- a/ev3dev/ev3.py +++ b/ev3dev/ev3.py @@ -109,7 +109,7 @@ class Leds(object): # ~autogen -class Button(object): +class Button(ButtonEVIO): """ EV3 Buttons """
Fix a bug introduced in #<I> Some debug changes creeped into PR
py
diff --git a/cpenv/mappings.py b/cpenv/mappings.py index <HASH>..<HASH> 100644 --- a/cpenv/mappings.py +++ b/cpenv/mappings.py @@ -16,6 +16,7 @@ from .vendor import yaml KeyValue = collections.namedtuple('KeyValue', 'key value') +Item = collections.namedtuple('Item', 'key value') class CaseInsensitiveDict(collections.MutableMapping): @@ -32,7 +33,7 @@ class CaseInsensitiveDict(collections.MutableMapping): return '{}({!r})'.format(self.__class__.__name__, dict(self.items())) def __setitem__(self, key, value): - self._items[key.lower()] = KeyValue(key, value) + self._items[key.lower()] = Item(key, value) def __getitem__(self, key): return self._items[key.lower()].value
change: rename KeyValue to Item
py
diff --git a/_pytest/fixtures.py b/_pytest/fixtures.py index <HASH>..<HASH> 100644 --- a/_pytest/fixtures.py +++ b/_pytest/fixtures.py @@ -4,7 +4,7 @@ import functools import inspect import sys import warnings -from collections import OrderedDict, deque, defaultdict, namedtuple +from collections import OrderedDict, deque, defaultdict import attr import py @@ -23,7 +23,11 @@ from _pytest.compat import ( ) from _pytest.outcomes import fail, TEST_OUTCOME -PseudoFixtureDef = namedtuple('PseudoFixtureDef', ('cached_result', 'scope')) + [email protected](frozen=True) +class PseudoFixtureDef(object): + cached_result = attr.ib() + scope = attr.ib() def pytest_sessionstart(session):
Use a frozen attr class for PseudoFixtureDef.
py
diff --git a/keanu-python/tests/test_traceplot.py b/keanu-python/tests/test_traceplot.py index <HASH>..<HASH> 100644 --- a/keanu-python/tests/test_traceplot.py +++ b/keanu-python/tests/test_traceplot.py @@ -1,6 +1,5 @@ from keanu.plots import traceplot from keanu.vartypes import sample_types -from keanu import Model from numpy import array from numpy.testing import assert_array_equal import pytest @@ -12,13 +11,10 @@ import matplotlib.pyplot as plt @pytest.fixture def trace() -> sample_types: - with Model() as m: - trace = { - "gamma": [array([[1., 2.], [3., 4.]]), array([[2., 3.], [4., 5.]])], - "gaussian": [array([[0.1, 0.2], [0.3, 0.4]]), array([[0.2, 0.3], [0.4, 0.5]])] - } - - return trace + return { + "gamma": [array([[1., 2.], [3., 4.]]), array([[2., 3.], [4., 5.]])], + "gaussian": [array([[0.1, 0.2], [0.3, 0.4]]), array([[0.2, 0.3], [0.4, 0.5]])] + } def test_traceplot_returns_axeplot_with_correct_data(trace: sample_types) -> None:
Remove dependencies for Model in test_traceplot. We don't need that either.
py
diff --git a/demosys/scene/camera.py b/demosys/scene/camera.py index <HASH>..<HASH> 100644 --- a/demosys/scene/camera.py +++ b/demosys/scene/camera.py @@ -1,7 +1,6 @@ +import time from math import cos, radians, sin -import glfw - from demosys.opengl import Projection from pyrr import Vector3, matrix44, vector, vector3 @@ -195,11 +194,11 @@ class SystemCamera(Camera): :return: The current view matrix for the camera """ # Use separate time in camera so we can move it when the demo is paused - time = glfw.get_time() + now = time.time() # If the camera has been inactive for a while, a large time delta # can suddenly move the camera far away from the scene - t = max(time - self._last_time, 0) - self._last_time = time + t = max(now - self._last_time, 0) + self._last_time = now # X Movement if self._xdir == POSITIVE:
Don't use glfw time in camera class
py
diff --git a/pymc3/model.py b/pymc3/model.py index <HASH>..<HASH> 100644 --- a/pymc3/model.py +++ b/pymc3/model.py @@ -349,6 +349,7 @@ class ObservedRV(Factor): self.logp_elemwiset = distribution.logp(*args) self.model = model + self.distribution = distribution def Deterministic(name, var, model=None): """Create a named deterministic variable
Add distribution to ObservedRV.
py
diff --git a/KISSmetrics/tests/test_docs.py b/KISSmetrics/tests/test_docs.py index <HASH>..<HASH> 100644 --- a/KISSmetrics/tests/test_docs.py +++ b/KISSmetrics/tests/test_docs.py @@ -9,7 +9,7 @@ import unittest class DocTestCase(unittest.TestCase): def test_docs(self): failure_count, test_count \ - = doctest.testfile('../README.md', optionflags=doctest.ELLIPSIS) + = doctest.testfile('../../README.md', optionflags=doctest.ELLIPSIS) assert failure_count == 0
correct the path to README.md
py
diff --git a/tests/unit/matchers/base_test.py b/tests/unit/matchers/base_test.py index <HASH>..<HASH> 100644 --- a/tests/unit/matchers/base_test.py +++ b/tests/unit/matchers/base_test.py @@ -33,7 +33,7 @@ def test_base_matcher_exceptions(): assert _BaseMatcher('foo').match(None) is None with pytest.raises(ValueError, - message='expectation argument cannot be empty'): + match='expectation argument cannot be empty'): _BaseMatcher(None)
fix(#<I>): use match keyword in pytest.raises
py
diff --git a/openquake/baselib/parallel.py b/openquake/baselib/parallel.py index <HASH>..<HASH> 100644 --- a/openquake/baselib/parallel.py +++ b/openquake/baselib/parallel.py @@ -877,7 +877,7 @@ class Starmap(object): if len(self.busytime) > 1: times = numpy.array(list(self.busytime.values())) logging.info('Busy time in the workers: %.1fs, std=%.1fs', - times.mean(), times.std()) + times.mean(), times.std()) def sequential_apply(task, args, concurrent_tasks=CT,
Indentation fix [skip CI]
py
diff --git a/Adyen/client.py b/Adyen/client.py index <HASH>..<HASH> 100644 --- a/Adyen/client.py +++ b/Adyen/client.py @@ -303,10 +303,17 @@ class AdyenClient(object): url = self._determine_api_url(platform, service, action) - raw_response, raw_request, status_code, headers = \ - self.http_client.request(url, json=message, username=username, - password=password, headers=headers, - **kwargs) + if xapikey: + raw_response, raw_request, status_code, headers = \ + self.http_client.request(url, json=request_data, + xapikey=xapikey, headers=headers, + **kwargs) + else: + raw_response, raw_request, status_code, headers = \ + self.http_client.request(url, json=message, username=username, + password=password, + headers=headers, + **kwargs) # Creates AdyenResponse if request was successful, raises error if not. adyen_result = self._handle_response(url, raw_response, raw_request,
[PW-<I>] Added the xapikey in http client request for call api (#<I>) * Http client request for call api includes xapi key * Formatting
py
diff --git a/fault/system_verilog_target.py b/fault/system_verilog_target.py index <HASH>..<HASH> 100644 --- a/fault/system_verilog_target.py +++ b/fault/system_verilog_target.py @@ -405,7 +405,7 @@ vcs -sverilog -full64 +v2k -timescale={self.timescale} -LDFLAGS -Wl,--no-as-need logging.debug(f"Running command: {cmd}") result = subprocess.run(cmd, cwd=self.directory, shell=True, capture_output=True) - logging.debug(result.stdout.decode()) + logging.info(result.stdout.decode()) assert not result.returncode, "Error running system verilog simulator" if self.simulator == "vcs": result = subprocess.run("./simv", cwd=self.directory, shell=True, @@ -418,7 +418,7 @@ vcs -sverilog -full64 +v2k -timescale={self.timescale} -LDFLAGS -Wl,--no-as-need # VCS and iverilog do not set the return code when a # simulation exits with an error, so we check the result # of stdout to see if "Error" is present - logging.debug(result.stdout.decode()) + logging.info(result.stdout.decode()) assert not result.returncode, \ f"Running {self.simulator} binary failed" if self.simulator == "vcs":
Use logging.info for system verilog runs
py
diff --git a/openquake/hazardlib/gsim/chao_2020.py b/openquake/hazardlib/gsim/chao_2020.py index <HASH>..<HASH> 100644 --- a/openquake/hazardlib/gsim/chao_2020.py +++ b/openquake/hazardlib/gsim/chao_2020.py @@ -248,6 +248,8 @@ class ChaoEtAl2020SSlab(ChaoEtAl2020SInter): Chao et al. (2020) for Subduction Slab. """ + DEFINED_FOR_TECTONIC_REGION_TYPE = const.TRT.SUBDUCTION_INTRASLAB + CONST_FAULT = {'C4': 0.2, 'href': 35} SUFFIX = "_is" @@ -258,6 +260,8 @@ class ChaoEtAl2020Asc(ChaoEtAl2020SInter): Chao et al. (2020) for Crustal. """ + DEFINED_FOR_TECTONIC_REGION_TYPE = const.TRT.ACTIVE_SHALLOW_CRUST + # add rake to determine fault style in _ftype() REQUIRES_RUPTURE_PARAMETERS = {'mag', 'rake', 'ztor'}
Update chao_<I>.py chao subclass tectonic types
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -97,7 +97,7 @@ class FetchCommand(distutils.cmd.Command): 'bh': fetch_bh, 'iphas': fetch_iphas, 'marshall': fetch_marshall, - 'chen2014', fetch_chen2014} + 'chen2014': fetch_chen2014} def initialize_options(self): self.map_name = None
Fixed fetch code in setup.py
py
diff --git a/gears/compressors/__init__.py b/gears/compressors/__init__.py index <HASH>..<HASH> 100644 --- a/gears/compressors/__init__.py +++ b/gears/compressors/__init__.py @@ -1,2 +1,3 @@ +from .base import BaseCompressor, ExecCompressor from .cleancss import CleanCSSCompressor from .uglifyjs import UglifyJSCompressor
BaseCompressor and ExecCompressor can be imported from gears.compressors now
py
diff --git a/bika/lims/browser/batchfolder.py b/bika/lims/browser/batchfolder.py index <HASH>..<HASH> 100644 --- a/bika/lims/browser/batchfolder.py +++ b/bika/lims/browser/batchfolder.py @@ -32,7 +32,6 @@ class BatchFolderContentsView(BikaListingView): self.show_select_all_checkbox = False self.show_select_column = True self.pagesize = 25 - request.set('disable_border', 1) self.columns = { 'BatchID': {'title': _('Batch ID')}, @@ -78,6 +77,9 @@ class BatchFolderContentsView(BikaListingView): ] def __call__(self): + if self.context.absolute_url() == self.portal.batches.absolute_url(): + # in contexts other than /batches, we do want to show the edit border + request.set('disable_border', 1) if self.context.absolute_url() == self.portal.batches.absolute_url() \ and self.portal_membership.checkPermission(AddBatch, self.portal.batches): self.context_actions[_('Add')] = \
Show batch-listing edit-border outside of "/batches" context
py
diff --git a/bika/lims/content/analysisrequest.py b/bika/lims/content/analysisrequest.py index <HASH>..<HASH> 100644 --- a/bika/lims/content/analysisrequest.py +++ b/bika/lims/content/analysisrequest.py @@ -1881,8 +1881,7 @@ class AnalysisRequest(BaseFolder): """ compute default member discount if it applies """ if hasattr(self, 'getMemberDiscountApplies'): if self.getMemberDiscountApplies(): - plone = getSite() - settings = plone.bika_setup + settings = self.bika_setup return settings.getMemberDiscount() else: return "0.00"
Remove 'getSite' call from getDefaultMemberDiscount (fails during EndRequest event handler)
py
diff --git a/www/tests/brython_test_utils/unittest.py b/www/tests/brython_test_utils/unittest.py index <HASH>..<HASH> 100644 --- a/www/tests/brython_test_utils/unittest.py +++ b/www/tests/brython_test_utils/unittest.py @@ -78,12 +78,11 @@ class OneTimeTestResult(unittest.TestResult): def load_brython_test_cases(base_path=''): - return unittest.TestSuite( - NamedTestSuite('Brython : ' + label, - (BrythonModuleTestCase(filenm, caption, base_path) - for filenm, caption in options) - ) - for label, options in utils.discover_brython_test_modules() - ) - + ret = [] + for label, options in utils.discover_brython_test_modules(): + tcs = [] + for filenm, caption in options: + tcs.append(BrythonModuleTestCase(filenm, caption, base_path)) + ret.append(NamedTestSuite('Brython :' + label, tcs)) + return unittest.TestSuite(ret)
BUGFIX: Make CI tests pass again.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -82,7 +82,7 @@ setup_args = dict( long_description_content_type="text/markdown", cmdclass=cmdclass, packages=setuptools.find_packages(), - install_requires=["jupyterlab~=3.0", "notebook", "simpervisor>=0.4", "aiohttp"], + install_requires=["simpervisor>=0.4", "aiohttp"], zip_safe=False, include_package_data=True, python_requires=">=3.6",
remove jupyterlab and notebook from install_requires
py
diff --git a/kubernetes_asyncio/watch/watch_test.py b/kubernetes_asyncio/watch/watch_test.py index <HASH>..<HASH> 100644 --- a/kubernetes_asyncio/watch/watch_test.py +++ b/kubernetes_asyncio/watch/watch_test.py @@ -86,7 +86,7 @@ class WatchTest(TestCase): cnt = 0 async for _ in watch.stream(fake_api.get_namespaces): cnt += 1 - assert cnt == len(side_effects) + self.assertEqual(cnt, len(side_effects)) def test_unmarshal_with_float_object(self): w = Watch() @@ -123,8 +123,9 @@ class WatchTest(TestCase): } ret = Watch().unmarshal_event(json.dumps(k8s_err), None) - assert ret['type'] == k8s_err['type'] - assert ret['object'] == ret['raw_object'] == k8s_err['object'] + self.assertEqual(ret['type'], k8s_err['type']) + self.assertEqual(ret['object'], k8s_err['object']) + self.assertEqual(ret['object'], k8s_err['object']) async def test_watch_with_exception(self): fake_resp = CoroutineMock()
PR feedback: use `assertEqual` instead of `assert`
py
diff --git a/marshmallow_peewee/convert.py b/marshmallow_peewee/convert.py index <HASH>..<HASH> 100644 --- a/marshmallow_peewee/convert.py +++ b/marshmallow_peewee/convert.py @@ -47,11 +47,18 @@ class ModelConverter(object): return result def convert_field(self, field): + pw_fields = [x[0] for x in self.TYPE_MAPPING] + for f in pw_fields: + if isinstance(fields, f): + validate = [convert_value_validate(field.python_value)] + break + else: + validate = [] params = { 'allow_none': field.null, 'attribute': field.name, 'required': not field.null and field.default is None, - 'validate': [convert_value_validate(field.python_value)], + 'validate': validate, } if field.default is not None:
Support for custom peewee fields.
py
diff --git a/examples/app/spectrogram/main.py b/examples/app/spectrogram/main.py index <HASH>..<HASH> 100644 --- a/examples/app/spectrogram/main.py +++ b/examples/app/spectrogram/main.py @@ -69,6 +69,9 @@ freq = Slider(start=1, end=MAX_FREQ, value=MAX_FREQ, step=1, title="Frequency") gain = Slider(start=1, end=20, value=1, step=1, title="Gain") def update(): + if audio.data['values'] is None: + return + signal, spectrum, bins = audio.data['values'] # seems to be a problem with Array property, using List for now
hotfix for pyaudio None return in spectrogam
py
diff --git a/pywws/WeatherStation.py b/pywws/WeatherStation.py index <HASH>..<HASH> 100755 --- a/pywws/WeatherStation.py +++ b/pywws/WeatherStation.py @@ -176,7 +176,7 @@ def findDevice(idVendor, idProduct): if device.idVendor == idVendor and device.idProduct == idProduct: return device return None -class weather_station: +class weather_station(object): """Class that represents the weather station to user program.""" def __init__(self): """Connect to weather station and prepare to read data.""" @@ -255,7 +255,8 @@ class weather_station: old_data['delay'] = new_data['delay'] yielded = False data_changed = new_data != old_data - if ptr_changed and (new_data['delay'] == None or new_data['delay'] > 4): + if ptr_changed and (new_data['delay'] == None or + new_data['delay'] >= read_period): # picked up old data from new pointer, ignore it self.logger.info('live_data old data') pass
Fixed serious bug in 'live_log' routine when weather station is set to more than five minute logging interval.
py
diff --git a/rest_condition/permissions.py b/rest_condition/permissions.py index <HASH>..<HASH> 100644 --- a/rest_condition/permissions.py +++ b/rest_condition/permissions.py @@ -102,6 +102,8 @@ class Condition(object): if reduced_result is not _NONE: return not reduced_result if self.negated else reduced_result + return False + def has_object_permission(self, request, view, obj): return self.evaluate_permissions('has_object_permission', request, view, obj)
Return False by default in evaluate_permissions method.
py
diff --git a/passpie/cli.py b/passpie/cli.py index <HASH>..<HASH> 100644 --- a/passpie/cli.py +++ b/passpie/cli.py @@ -347,6 +347,20 @@ def reset(db, passphrase): repo.commit(message='Reset database') [email protected](help='Remove all credentials from database') [email protected]("-y", "--yes", is_flag=True, help="Skip confirmation prompt") +@pass_db +def purge(db, yes): + if db.credentials(): + if not yes: + alert = "Purge '{}' credentials".format(len(db.credentials())) + yes = click.confirm(click.style(alert, 'yellow'), abort=True) + if yes: + db.purge() + repo = Repository(db.path) + repo.commit(message='Purged database') + + @cli.command(help='Shows passpie database changes history') @click.option("--init", is_flag=True, help="Enable history tracking") @click.option("--reset-to", default=-1, help="Undo changes in database")
Add purge command to clean whole database
py
diff --git a/slacker/__init__.py b/slacker/__init__.py index <HASH>..<HASH> 100644 --- a/slacker/__init__.py +++ b/slacker/__init__.py @@ -98,6 +98,17 @@ class Groups(BaseAPI): 'count': count }) + def invite(self, channel, user): + return self.post('groups.invite', + params={'channel': channel, 'user': user}) + + def kick(self, channel, user): + return self.post('groups.kick', + params={'channel': channel, 'user': user}) + + def leave(self, channel): + return self.post('groups.leave', params={'channel': channel}) + def mark(self, channel, ts): return self.post('groups.mark', params={'channel': channel, 'ts': ts}) @@ -141,6 +152,10 @@ class Channels(BaseAPI): return self.post('channels.invite', params={'channel': channel, 'user': user}) + def kick(self, channel, user): + return self.post('channels.kick', + params={'channel': channel, 'user': user}) + def set_purpose(self, channel, purpose): return self.post('channels.setPurpose', params={'channel': channel, 'purpose': purpose})
Add groups.invite, groups.kick, groups.leave and channels.kick APIs.
py
diff --git a/GPy/models/gp_regression.py b/GPy/models/gp_regression.py index <HASH>..<HASH> 100644 --- a/GPy/models/gp_regression.py +++ b/GPy/models/gp_regression.py @@ -16,6 +16,7 @@ class GPRegression(GP): :param Y: observed values :param kernel: a GPy kernel, defaults to rbf :param Norm normalizer: [False] + :param noise_var: the noise variance for Gaussian likelhood, defaults to 1. Normalize Y with the norm given. If normalizer is False, no normalization will be done @@ -25,12 +26,12 @@ class GPRegression(GP): """ - def __init__(self, X, Y, kernel=None, Y_metadata=None, normalizer=None): + def __init__(self, X, Y, kernel=None, Y_metadata=None, normalizer=None, noise_var=1.): if kernel is None: kernel = kern.RBF(X.shape[1]) - - likelihood = likelihoods.Gaussian() + + likelihood = likelihoods.Gaussian(variance=noise_var) super(GPRegression, self).__init__(X, Y, kernel, likelihood, name='GP regression', Y_metadata=Y_metadata, normalizer=normalizer)
allowing set initial noise variance for GPRegression
py
diff --git a/passpie/database.py b/passpie/database.py index <HASH>..<HASH> 100644 --- a/passpie/database.py +++ b/passpie/database.py @@ -50,7 +50,7 @@ class PasspieStorage(Storage): for eid, cred in data["_default"].items(): credpath = self.make_credpath(cred["name"], cred["login"]) with mkdir_open(credpath, "w") as f: - f.write(yaml.dump(dict(cred), default_flow_style=False)) + f.write(yaml.safe_dump(dict(cred), default_flow_style=False)) class Database(TinyDB):
Update encrypted credential dump to yaml.safe_dump
py