diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/aiohttp/web_reqrep.py b/aiohttp/web_reqrep.py index <HASH>..<HASH> 100644 --- a/aiohttp/web_reqrep.py +++ b/aiohttp/web_reqrep.py @@ -865,4 +865,4 @@ def json_response(data=sentinel, *, text=None, body=None, status=200, else: text = dumps(data) return Response(text=text, body=body, status=status, reason=reason, - content_type=content_type) + headers=headers, content_type=content_type)
json_response: pass on headers to response object
py
diff --git a/star.py b/star.py index <HASH>..<HASH> 100644 --- a/star.py +++ b/star.py @@ -185,7 +185,6 @@ def trig_param_plot(stars, output): numpy.float) parameters = numpy.vstack(tuple( interpolation.ak_bk2Ak_Phik(star.coefficients) for star in stars)) - assert False, parameters.shape + numpy.hsplit(parameters[:,:7], 7).shape (A0, A1, Phi1, A2, Phi2, A3, Phi3) = numpy.hsplit(parameters[:,:7], 7) (R21, R31, R32) = (A2/A1, A3/A1, A3/A2) (Phi21, Phi31, Phi32) = (Phi2/Phi1, Phi3/Phi1, Phi3/Phi2)
Removing an assertion I forgot about
py
diff --git a/multiqc/modules/kraken/kraken.py b/multiqc/modules/kraken/kraken.py index <HASH>..<HASH> 100644 --- a/multiqc/modules/kraken/kraken.py +++ b/multiqc/modules/kraken/kraken.py @@ -202,7 +202,7 @@ class MultiqcModule(BaseMultiqcModule): for s_name, d in self.kraken_raw_data.items(): tdata[s_name] = {} for row in d: - percent = row["counts_rooted"] / self.kraken_sample_total_readcounts[s_name] + percent = (row["counts_rooted"] / self.kraken_sample_total_readcounts[s_name]) * 100.0 if row["rank_code"] == "U": tdata[s_name]["% Unclassified"] = percent if row["rank_code"] == top_rank_code and row["classif"] in top_five:
Multiply new percentage value by <I>
py
diff --git a/setuptools_scm/version.py b/setuptools_scm/version.py index <HASH>..<HASH> 100644 --- a/setuptools_scm/version.py +++ b/setuptools_scm/version.py @@ -5,6 +5,8 @@ from .utils import trace from pkg_resources import iter_entry_points +from distutils import log + try: from pkg_resources import parse_version, SetuptoolsVersion except ImportError as e: @@ -13,8 +15,8 @@ except ImportError as e: def _warn_if_setuptools_outdated(): if parse_version is None: - print("your setuptools is too old (<12)") - print("setuptools_scm functionality is degraded") + log.warn("your setuptools is too old (<12)") + log.warn("setuptools_scm functionality is degraded") def callable_or_entrypoint(group, callable_or_name):
Do not use stdout for reporting wrong setuptools
py
diff --git a/django_js_reverse/tests/unit_tests.py b/django_js_reverse/tests/unit_tests.py index <HASH>..<HASH> 100755 --- a/django_js_reverse/tests/unit_tests.py +++ b/django_js_reverse/tests/unit_tests.py @@ -110,12 +110,12 @@ class JSReverseStaticFileSaveTest(JSReverseViewTestCaseMinified): f = open(path) content1 = f.read() if hasattr(content1, 'decode'): - content1 = content1.decode(encoding='UTF-8') + content1 = content1.decode() r2 = self.client.get('/jsreverse/') content2 = r2.content if hasattr(content2, 'decode'): - content2 = content2.decode(encoding='UTF-8') + content2 = content2.decode() self.assertEqual(len(content1), len(content2), "Static file don't match http response content_1") self.assertEqual(content1, content2, "Static file don't match http response content_2")
reverse.js file save test fix4
py
diff --git a/polyaxon/spawner/spawner.py b/polyaxon/spawner/spawner.py index <HASH>..<HASH> 100644 --- a/polyaxon/spawner/spawner.py +++ b/polyaxon/spawner/spawner.py @@ -341,6 +341,7 @@ class K8SSpawner(K8SManager): def stop_experiment(self): self.delete_experiment_config_map() + self.delete_experiment_secret() self.delete_master() self.delete_worker() self.delete_ps()
Fix stop delete secrets in spawner
py
diff --git a/stripe/__init__.py b/stripe/__init__.py index <HASH>..<HASH> 100644 --- a/stripe/__init__.py +++ b/stripe/__init__.py @@ -556,13 +556,19 @@ class StripeObject(object): return json.dumps(self.to_dict(), sort_keys=True, indent=2, cls=StripeObjectEncoder) def to_dict(self): + def _serialize(o): + if isinstance(o, StripeObject): + return o.to_dict() + if isinstance(o, list): + return [_serialize(i) for i in o] + return o + d = dict() for k in sorted(self._values): if k in self._permanent_attributes: continue - v = getattr(self, k) - if isinstance(v, StripeObject): - v = v.to_dict() + v = getattr(self, k) + v = _serialize(v) d[k] = v return d
Fix issue #<I> by adding a recursive serialize function to handle lists and other recursive types. Any object that contains StripeObjects in a list were not being properly serialized. The recursive function now takes care of this.
py
diff --git a/fix_yahoo_finance/__init__.py b/fix_yahoo_finance/__init__.py index <HASH>..<HASH> 100644 --- a/fix_yahoo_finance/__init__.py +++ b/fix_yahoo_finance/__init__.py @@ -19,12 +19,13 @@ # limitations under the License. # +from __future__ import print_function + __version__ = "0.1.0" __author__ = "Ran Aroussi" __all__ = ['download', 'Ticker', 'pdr_override', 'get_yahoo_crumb', 'parse_ticker_csv'] - import time as _time import datetime as _datetime import requests as _requests
added __future__.print_function
py
diff --git a/sark/code/segment.py b/sark/code/segment.py index <HASH>..<HASH> 100644 --- a/sark/code/segment.py +++ b/sark/code/segment.py @@ -165,7 +165,7 @@ class Segment(object): @property def name(self): - return idaapi.get_segm_name(self.segment_t) + return idaapi.get_true_segm_name(self.segment_t) @name.setter def name(self, name):
sark.Segment: change to use true name for segment.
py
diff --git a/firecloud/api.py b/firecloud/api.py index <HASH>..<HASH> 100755 --- a/firecloud/api.py +++ b/firecloud/api.py @@ -218,7 +218,7 @@ def copy_entities(from_namespace, from_workspace, to_namespace, } return __post(uri, json=body, params={'linkExistingEntities': - str(link_existing_entities).lower()}) + str(link_existing_entities).lower()}) def get_entities(namespace, workspace, etype): """List entities of given type in a workspace.
Added link_existing_entities kwarg to copy_entities.
py
diff --git a/ipyrad/assemble/cluster_within.py b/ipyrad/assemble/cluster_within.py index <HASH>..<HASH> 100644 --- a/ipyrad/assemble/cluster_within.py +++ b/ipyrad/assemble/cluster_within.py @@ -666,6 +666,10 @@ def persistent_popen_align3(clusts, maxseqs=200): ## append aligned cluster string aligned.append("\n".join(align1)) + ## Malformed clust. Dictionary creation with only 1 element will raise. + except ValueError as inst: + LOGGER.debug("Bad PE cluster - {}\nla1 - {}\nla2 - {}".format(\ + lclust, la1, la2)) ## Either reads are SE, or at least some pairs are merged. except IndexError:
Catch a bug in alignment that would crop up intermittently.
py
diff --git a/tilequeue/wof.py b/tilequeue/wof.py index <HASH>..<HASH> 100644 --- a/tilequeue/wof.py +++ b/tilequeue/wof.py @@ -117,7 +117,9 @@ def _make_requests_session_with_retries(max_retries): 503, # Unavailable, temporarily 504, # Gateway timeout 522 # Origin connection timed out - ] + ], + backoff_factor=1.0 # back off for 0s, 1s, 3s, 7s, etc... after + # each successive failure. (factor*(2^N-1)) )) # use retry for both HTTP and HTTPS connections.
Add back-off after failures, as supported by Retry - see <URL>
py
diff --git a/zk_shell/xcmd.py b/zk_shell/xcmd.py index <HASH>..<HASH> 100644 --- a/zk_shell/xcmd.py +++ b/zk_shell/xcmd.py @@ -177,6 +177,9 @@ class XCmd(cmd.Cmd): if setup_readline: self._setup_readline(hist_file_name) + # build the list of regular commands + self._regular_commands = [name[3:] for name in self.get_names() if name[:3] == 'do_'] + # special commands dispatch map self._special_commands = { "!!": self.run_last_command, @@ -203,7 +206,7 @@ class XCmd(cmd.Cmd): @property def commands(self): """ available commands, not including the special ones """ - return [name[3:] for name in self.get_names() if name[:3] == 'do_'] + return self._regular_commands @property def special_commands(self):
xcmd: store the list of regular commands for fast access
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,7 @@ setup( author="Steven Serrata", author_email='[email protected]', url='https://github.com/PaloAltoNetworks/pancloud', - packages=find_packages(include=['pancloud']), + packages=find_packages(include=['pancloud', 'pancloud.adapters']), include_package_data=True, install_requires=requirements, license="ISC license",
Add pancloud.adapters to find_packages include list
py
diff --git a/spyder/utils/ipython/spyder_kernel.py b/spyder/utils/ipython/spyder_kernel.py index <HASH>..<HASH> 100644 --- a/spyder/utils/ipython/spyder_kernel.py +++ b/spyder/utils/ipython/spyder_kernel.py @@ -147,8 +147,8 @@ class SpyderKernel(IPythonKernel): content: dict The (JSONable) content of the message data: any - Any object that is serializable by cloudpickle (should be most things). - Will arrive as cloudpickled bytes in `.buffers[0]`. + Any object that is serializable by cloudpickle (should be most + things). Will arrive as cloudpickled bytes in `.buffers[0]`. """ import sys if content is None:
appease overly pedantic linter
py
diff --git a/redis_hashring/__init__.py b/redis_hashring/__init__.py index <HASH>..<HASH> 100644 --- a/redis_hashring/__init__.py +++ b/redis_hashring/__init__.py @@ -1,5 +1,3 @@ -from __future__ import print_function - import binascii import collections import socket
Drop future import (#<I>)
py
diff --git a/voluptuous/voluptuous.py b/voluptuous/voluptuous.py index <HASH>..<HASH> 100644 --- a/voluptuous/voluptuous.py +++ b/voluptuous/voluptuous.py @@ -104,7 +104,7 @@ else: __author__ = 'Alec Thomas <[email protected]>' -__version__ = '0.7.1' +__version__ = '0.7.2' @contextmanager
Bump to version <I>.
py
diff --git a/abydos/distance/_token_distance.py b/abydos/distance/_token_distance.py index <HASH>..<HASH> 100644 --- a/abydos/distance/_token_distance.py +++ b/abydos/distance/_token_distance.py @@ -316,7 +316,7 @@ class _TokenDistance(_Distance): self._src_tokens = ( self.params['tokenizer'].tokenize(src).get_counter() ) - if isinstance(src, Counter): + if isinstance(tar, Counter): self._tar_tokens = tar else: self._tar_tokens = (
corrected src to tar
py
diff --git a/telethon_generator/parser/source_builder.py b/telethon_generator/parser/source_builder.py index <HASH>..<HASH> 100755 --- a/telethon_generator/parser/source_builder.py +++ b/telethon_generator/parser/source_builder.py @@ -53,5 +53,4 @@ class SourceBuilder: return self def __exit__(self, exc_type, exc_val, exc_tb): - self.out_stream.flush() self.out_stream.close()
SourceBuilder: Don't perform stream flushing twice stream.close() always perform flush by itself
py
diff --git a/couchbase/tests/cases/view_iterator_t.py b/couchbase/tests/cases/view_iterator_t.py index <HASH>..<HASH> 100644 --- a/couchbase/tests/cases/view_iterator_t.py +++ b/couchbase/tests/cases/view_iterator_t.py @@ -23,6 +23,7 @@ from couchbase.views.params import Query, UNSPEC from couchbase.exceptions import CouchbaseError from couchbase.result import Result from couchbase.exceptions import ArgumentError, CouchbaseError, HTTPError +from couchbase._pyport import xrange # We'll be using the beer-sample database as it has a sufficiently large
view_iterator_t: use xrange from _pyport Py3 doesn't have xrange Change-Id: I<I>a1e<I>b<I>a<I>b<I>f0e1ac3ce9a2ca Reviewed-on: <URL>
py
diff --git a/salt/netapi/rest_cherrypy/app.py b/salt/netapi/rest_cherrypy/app.py index <HASH>..<HASH> 100644 --- a/salt/netapi/rest_cherrypy/app.py +++ b/salt/netapi/rest_cherrypy/app.py @@ -14,7 +14,8 @@ A REST API for Salt <acl-eauth>` system which requires additional configuration not described here. - Example production-ready configuration; add to the Salt master config file: + Example production-ready configuration; add to the Salt master config file + and restart the ``salt-master`` and ``salt-api`` daemons: .. code-block:: yaml
Add note to rest_cherrypy instructions: restarting daemons is required Closes #<I>
py
diff --git a/test/integration/test_build_execution.py b/test/integration/test_build_execution.py index <HASH>..<HASH> 100644 --- a/test/integration/test_build_execution.py +++ b/test/integration/test_build_execution.py @@ -103,7 +103,7 @@ def new_set(request): return set - [email protected]() def test_run_single_build(new_config): """ Run a single build configuration defined by the 'new_config' method and verify the build output """ @@ -122,6 +122,7 @@ def test_run_single_build(new_config): build_record = records_api.get_specific(triggered_build.id).content build_record_checks(build_record) [email protected]() def test_run_group_build(request, new_set, new_environment, new_project): assert (new_set is not None, 'Unable to create Build Configuration Group') @@ -168,6 +169,7 @@ def build_record_checks(build_record): build_record_artifact_checks(build_record.id) + def build_record_artifact_checks(build_record_id): ''' Check the the artifacts exist in the repository and have valid checksums''' artifacts = records_api.get_built_artifacts(build_record_id).content
skip build_execution tests since builds are not completing in a reasonable time on PNC
py
diff --git a/invocations/docs.py b/invocations/docs.py index <HASH>..<HASH> 100644 --- a/invocations/docs.py +++ b/invocations/docs.py @@ -3,12 +3,14 @@ import os from invoke import ctask as task, Collection -@task(aliases=['c']) +# Underscored func name to avoid shadowing kwargs in build() +@task(name='clean') def _clean(ctx): ctx.run("rm -rf {0}".format(ctx['sphinx.target'])) -@task(aliases=['b']) +# Ditto +@task(name='browse') def _browse(ctx): index = os.path.join(ctx['sphinx.target'], 'index.html') ctx.run("open {0}".format(index)) @@ -25,7 +27,7 @@ def build(ctx, clean=False, browse=False): if browse: _browse(ctx) -ns = Collection(clean=_clean, browse=_browse, build=build) +ns = Collection(_clean, _browse, build) ns.configure({ 'sphinx.source': 'docs', # TODO: allow lazy eval so one attr can refer to another?
Tweak docs module naming/organization: * Do away with single-letter aliases - think this is superceded by the flags in the main task now. * Perform 'real' naming at task level, not collection level, now that this is possible.
py
diff --git a/whereami/predict.py b/whereami/predict.py index <HASH>..<HASH> 100644 --- a/whereami/predict.py +++ b/whereami/predict.py @@ -1,3 +1,4 @@ +import json from collections import Counter from access_points import get_scanner @@ -12,7 +13,7 @@ from whereami.compat import cross_val_score def predict_proba(input_path=None, model_path=None, device=""): lp = get_model(model_path) data_sample = sample(device) if input_path is None else get_external_sample(input_path) - print(dict(zip(lp.classes_, lp.predict_proba(data_sample)[0]))) + print(json.dumps(dict(zip(lp.classes_, lp.predict_proba(data_sample)[0])))) def predict(input_path=None, model_path=None, device=""):
printing predict_proba as json
py
diff --git a/glooey/drawing/artists.py b/glooey/drawing/artists.py index <HASH>..<HASH> 100644 --- a/glooey/drawing/artists.py +++ b/glooey/drawing/artists.py @@ -53,8 +53,9 @@ class Artist(HoldUpdatesMixin): return self._vertex_list def hide(self): - self._vertex_list.delete() - self._vertex_list = None + if self._vertex_list: + self._vertex_list.delete() + self._vertex_list = None def show(self): if not self._vertex_list:
Don't try to delete vertex lists that don't exist.
py
diff --git a/pyinfra_cli/main.py b/pyinfra_cli/main.py index <HASH>..<HASH> 100644 --- a/pyinfra_cli/main.py +++ b/pyinfra_cli/main.py @@ -481,17 +481,6 @@ def _main( print('--> Proposed changes:') print_meta(state, inventory) - # Show warning if we detected any imbalanced operations - if state.has_imbalanced_operations: - logger.warning(''' -Imbalanced operations were detected! - -The deploy files are executed once per host; the operations need to share -the same arguments otherwise pyinfra cannot run them in a consistent order. - -Please see: http://pyinfra.readthedocs.io/page/using_python.html. - '''.rstrip()) - # If --debug-facts or --debug-operations, print and exit if debug_facts or debug_operations: if debug_facts: @@ -508,12 +497,6 @@ Please see: http://pyinfra.readthedocs.io/page/using_python.html. print() - # Confirm operation run if imbalanced - if state.has_imbalanced_operations and not click.confirm( - 'Run ops with inbalanced operations?', default=False, - ): - _exit() - # Run the before_deploy hook if provided run_hook(state, 'before_deploy', hook_data)
Remove defunct imbalanced operation warnings :)
py
diff --git a/python/l20n/format/lol/parser.py b/python/l20n/format/lol/parser.py index <HASH>..<HASH> 100644 --- a/python/l20n/format/lol/parser.py +++ b/python/l20n/format/lol/parser.py @@ -222,7 +222,9 @@ class Parser(): if self.content[:2] == '{{': self.content = self.content[2:] if buffer: - obj.append(ast.String(buffer)) + string = ast.String(buffer) + string._template = "%(content)s" + obj.append(string) buffer = '' ws_pre_exp = self.get_ws() expr = self.get_expression()
update complexstring parsing to get the first string templated
py
diff --git a/visidata/vdtui.py b/visidata/vdtui.py index <HASH>..<HASH> 100755 --- a/visidata/vdtui.py +++ b/visidata/vdtui.py @@ -81,16 +81,17 @@ class SettingsMgr(collections.OrderedDict): def get(self, k, obj=None): 'Return self[k] considering context of obj. If obj is None, traverses the entire stack.' - mappings = ['override'] if obj is None and vd: obj = vd.sheet if obj: - mappings.append(obj) + mappings = [obj] mro = inspect.getmro(type(obj)) mappings.extend(mro) + else: + mappings = [] - mappings += ['default'] + mappings += ['override', 'default'] for o in mappings: if (k, o) in self:
[options] reorder override to after sheet-specified
py
diff --git a/docs/bigquery/snippets.py b/docs/bigquery/snippets.py index <HASH>..<HASH> 100644 --- a/docs/bigquery/snippets.py +++ b/docs/bigquery/snippets.py @@ -168,7 +168,8 @@ def test_list_datasets_by_label(client, to_delete): else: print('No datasets found with this filter.') # [END bigquery_list_datasets_by_label] - assert len(datasets) == 1 + found = set([dataset.dataset_id for dataset in datasets]) + assert dataset_id in found def test_create_dataset(client, to_delete):
Harden 'test_list_datasets_by_label' against overlapping CI runs. (#<I>) Closes #<I>.
py
diff --git a/src/livestreamer/cli.py b/src/livestreamer/cli.py index <HASH>..<HASH> 100644 --- a/src/livestreamer/cli.py +++ b/src/livestreamer/cli.py @@ -167,10 +167,10 @@ def write_stream(fd, out, progress, player): except IOError as err: if player and err.errno == errno.EPIPE: logger.info("Player closed") - break else: logger.error("Error when writing to output: {0}", str(err)) - break + + break written += len(data)
cli.write_stream: No need for separate breaks on IOError.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ from setuptools import setup, find_packages VERSION = '0.1.1' -with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme: +with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme: README = readme.read() # allow setup.py to be run from any path
Transferred README.md to README.rst
py
diff --git a/tcconfig/_tc_command_helper.py b/tcconfig/_tc_command_helper.py index <HASH>..<HASH> 100644 --- a/tcconfig/_tc_command_helper.py +++ b/tcconfig/_tc_command_helper.py @@ -18,11 +18,11 @@ from ._logger import logger def check_tc_command_installation(): - try: - spr.Which("tc").verify() - except spr.CommandNotFoundError as e: - logger.error("{:s}: {}".format(e.__class__.__name__, e)) - sys.exit(errno.ENOENT) + if find_bin_path("tc"): + return + + logger.error("command not found: tc") + sys.exit(errno.ENOENT) def get_tc_base_command(tc_subcommand):
Change command installation check process To properly check even if the user is not root.
py
diff --git a/sos/policies/__init__.py b/sos/policies/__init__.py index <HASH>..<HASH> 100644 --- a/sos/policies/__init__.py +++ b/sos/policies/__init__.py @@ -768,10 +768,11 @@ any third party. for preset_path in os.listdir(presets_path): preset_path = os.path.join(presets_path, preset_path) - try: - preset_data = json.load(open(preset_path)) - except ValueError: - continue + with open(preset_path) as pf: + try: + preset_data = json.load(pf) + except ValueError: + continue for preset in preset_data.keys(): pd = PresetDefaults(preset, opts=SoSOptions())
[Policy] Wrap json.load() in with clause Wraps `json.load()` from preset loading in a `with` statement to ensure that file objects are properly closed. Closes: #<I>
py
diff --git a/htmresearch/frameworks/layers/simple_object_machine.py b/htmresearch/frameworks/layers/simple_object_machine.py index <HASH>..<HASH> 100644 --- a/htmresearch/frameworks/layers/simple_object_machine.py +++ b/htmresearch/frameworks/layers/simple_object_machine.py @@ -256,7 +256,7 @@ class SimpleObjectMachine(ObjectMachineBase): newBits = [] for bit in pattern: if random.random() < noiseLevel: - newBits.append(random.randint(0, max(pattern))) + newBits.append(random.randint(0, self.externalInputSize)) else: newBits.append(bit)
Bugfix: Allow noise to flip any bit
py
diff --git a/test/test_discovery_v1.py b/test/test_discovery_v1.py index <HASH>..<HASH> 100644 --- a/test/test_discovery_v1.py +++ b/test/test_discovery_v1.py @@ -94,7 +94,7 @@ def test_create_environment(): discovery.create_environment(name=badname) except ValueError as ve: thrown = True - errorstr_first = "description must be a string having length between" + errorstr_first = "name must be a string having length between" errorstr = "{0} 0 and 255 characters".format(errorstr_first) assert str(ve) == errorstr
fix test that got broken in the reformatting
py
diff --git a/termenu.py b/termenu.py index <HASH>..<HASH> 100644 --- a/termenu.py +++ b/termenu.py @@ -260,6 +260,8 @@ class MultiSelectMenu(SearchMenu): self.selectedItems.remove(option) else: self.selectedItems.add(option) + self.selected += 1 + self._adjust_selected() def show_menu(title, options, default=None, height=None, multiSelect=False): if multiSelect:
move down when multi-selecting
py
diff --git a/image-bundle/setup.py b/image-bundle/setup.py index <HASH>..<HASH> 100755 --- a/image-bundle/setup.py +++ b/image-bundle/setup.py @@ -41,7 +41,7 @@ setup( long_description=Read('README.md'), zip_safe=False, classifiers=[ - 'Development Status :: 4 - Beta', + 'Development Status :: 5 - Production/Stable', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators',
Set setup.py development status to Stable for Image Bundle.
py
diff --git a/salt/cli/caller.py b/salt/cli/caller.py index <HASH>..<HASH> 100644 --- a/salt/cli/caller.py +++ b/salt/cli/caller.py @@ -71,7 +71,7 @@ class Caller(object): except (TypeError, CommandExecutionError) as exc: msg = 'Error running \'{0}\': {1}\n' active_level = LOG_LEVELS.get( - self.opts['log_level'].lower, logging.ERROR) + self.opts['log_level'].lower(), logging.ERROR) if active_level <= logging.DEBUG: sys.stderr.write(traceback.format_exc()) sys.stderr.write(msg.format(fun, str(exc)))
Fix a bug of getting an incorrect active log level.
py
diff --git a/python/pyspark/mllib/_common.py b/python/pyspark/mllib/_common.py index <HASH>..<HASH> 100644 --- a/python/pyspark/mllib/_common.py +++ b/python/pyspark/mllib/_common.py @@ -454,7 +454,7 @@ def _squared_distance(v1, v2): v2 = _convert_vector(v2) if type(v1) == ndarray and type(v2) == ndarray: diff = v1 - v2 - return diff.dot(diff) + return numpy.dot(diff, diff) elif type(v1) == ndarray: return v2.squared_distance(v1) else: @@ -469,10 +469,12 @@ def _dot(vec, target): calling numpy.dot of the two vectors, but for SciPy ones, we have to transpose them because they're column vectors. """ - if type(vec) == ndarray or type(vec) == SparseVector: + if type(vec) == ndarray: + return numpy.dot(vec, target) + elif type(vec) == SparseVector: return vec.dot(target) elif type(vec) == list: - return _convert_vector(vec).dot(target) + return numpy.dot(_convert_vector(vec), target) else: return vec.transpose().dot(target)[0]
[SPARK-<I>][MLLIB] use numpy.dot instead of ndarray.dot `ndarray.dot` is not available in numpy <I>. This PR makes pyspark/mllib compatible with numpy <I>.
py
diff --git a/source/rafcon/mvc/mygaphas/items/state.py b/source/rafcon/mvc/mygaphas/items/state.py index <HASH>..<HASH> 100644 --- a/source/rafcon/mvc/mygaphas/items/state.py +++ b/source/rafcon/mvc/mygaphas/items/state.py @@ -162,11 +162,13 @@ class StateView(Element): def remove(self): """Remove recursively all children and then the StateView itself """ + self.canvas.get_first_view().unselect_item(self) children = self.canvas.get_children(self)[:] for child in children: if isinstance(child, StateView): child.remove() if isinstance(child, NameView): + self.canvas.get_first_view().unselect_item(child) self.canvas.remove(child) self.remove_keep_rect_within_constraint_from_parent() for constraint in self._constraints:
Gaphas: Remove deleted item from view selection
py
diff --git a/lib/pyfrc/sim/ui.py b/lib/pyfrc/sim/ui.py index <HASH>..<HASH> 100644 --- a/lib/pyfrc/sim/ui.py +++ b/lib/pyfrc/sim/ui.py @@ -353,7 +353,10 @@ class SimUI(object): gamedata = tk.LabelFrame(ctrl_frame, text='Game Data') self.gamedataval = tk.StringVar() - self.gamedataval.trace_add('write', self.on_gamedata_selected) + if hasattr(self.gamedataval, 'trace_add'): + self.gamedataval.trace_add('write', self.on_gamedata_selected) + else: + self.gamedataval.trace_variable('w', self.on_gamedata_selected) self.gamedatabox = Combobox(gamedata, textvariable=self.gamedataval) self.gamedatabox['width'] = 12
trace_add is a new tk api, use deprecated trace_variable when not present - Fixes #<I>
py
diff --git a/flask_security/utils.py b/flask_security/utils.py index <HASH>..<HASH> 100644 --- a/flask_security/utils.py +++ b/flask_security/utils.py @@ -17,8 +17,7 @@ import warnings from contextlib import contextmanager from datetime import timedelta -from flask import current_app, flash, render_template, request, session, \ - url_for +from flask import current_app, flash, request, session, url_for from flask_login import login_user as _login_user from flask_login import logout_user as _logout_user from flask_mail import Message @@ -389,9 +388,9 @@ def send_mail(subject, recipient, template, **context): ctx = ('security/email', template) if config_value('EMAIL_PLAINTEXT'): - msg.body = render_template('%s/%s.txt' % ctx, **context) + msg.body = _security.render_template('%s/%s.txt' % ctx, **context) if config_value('EMAIL_HTML'): - msg.html = render_template('%s/%s.html' % ctx, **context) + msg.html = _security.render_template('%s/%s.html' % ctx, **context) if _security._send_mail_task: _security._send_mail_task(msg)
Use Security.render_template in mails too Allows custom `render_template` function, mostly for theming support in mails.
py
diff --git a/examples/amqp_service.py b/examples/amqp_service.py index <HASH>..<HASH> 100644 --- a/examples/amqp_service.py +++ b/examples/amqp_service.py @@ -23,6 +23,7 @@ class ExampleAmqpService(object): @amqp('example.route1') async def route1a(self, data: Any) -> None: + banana = True self.logger.info('Received data (function: route1a) - "{}"'.format(data)) @amqp('example.route1')
Fixes param issues with AMQP transport
py
diff --git a/molo/core/content_import/api/urls.py b/molo/core/content_import/api/urls.py index <HASH>..<HASH> 100644 --- a/molo/core/content_import/api/urls.py +++ b/molo/core/content_import/api/urls.py @@ -2,6 +2,8 @@ from django.conf.urls import url from molo.core.content_import.api import admin_views + urlpatterns = [ - url(r'^test_link/$', admin_views.ImportView.as_view(), name='test-api-import-view'), + url(r"^import-articles/$", admin_views.ArticleImportView.as_view(), name="article-import"), + url(r"^parent/$", admin_views.ChooseParentView.as_view(model_admin=admin_views.ArticleModelAdmin()), name="test-parent"), ] \ No newline at end of file
Add URLs for for page parent chooser
py
diff --git a/instabot/bot/bot_get.py b/instabot/bot/bot_get.py index <HASH>..<HASH> 100644 --- a/instabot/bot/bot_get.py +++ b/instabot/bot/bot_get.py @@ -172,19 +172,15 @@ def get_media_likers(self, media_id): def get_media_comments(self, media_id, only_text=False): - self.getMediaComments(media_id) - if 'comments' not in self.LastJson: - return [] + comments = self.getMediaComments(media_id) if only_text: - return [str(item["text"]) for item in self.LastJson['comments']] - return self.LastJson['comments'] + return [str(item["text"]) for item in comments] + return comments def get_media_commenters(self, media_id): - self.getMediaComments(media_id) - if 'comments' not in self.LastJson: - return [] - return [str(item["user"]["pk"]) for item in self.LastJson['comments']] + comments = self.getMediaComments(media_id) + return [str(item["user"]["pk"]) for item in comments] def get_comment(self):
Update get_media_comments and get_media_comments to get all comments of a post.
py
diff --git a/wpull/http/web.py b/wpull/http/web.py index <HASH>..<HASH> 100644 --- a/wpull/http/web.py +++ b/wpull/http/web.py @@ -94,7 +94,8 @@ class WebSession(object): self._redirect_tracker = web_client.redirect_tracker_factory() self._loop_type = LoopType.normal - self._add_cookies(self._next_request) + if self._web_client.cookie_jar: + self._add_cookies(self._next_request) @property def redirect_tracker(self): @@ -162,7 +163,7 @@ class WebSession(object): if self._web_client.cookie_jar: self._extract_cookies(response) - if self._next_request: + if self._web_client.cookie_jar and self._next_request: self._add_cookies(self._next_request) def _process_redirect(self):
http.web: Check for None before adding cookies.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -19,10 +19,7 @@ setup( long_description='A facade for wrapping API methods for logging i/o', test_suite='test.caliendo_test', install_requires=[ - 'MySQL-python==1.2.4c1', - 'paramiko==1.9.0', - 'pycrypto==2.6', + 'MySQL-python>=1.2.3', 'pysqlite==2.6.3', - 'wsgiref==0.1.2' ], )
Removed erroneous requirements
py
diff --git a/pfr/finders/GamePlayFinder.py b/pfr/finders/GamePlayFinder.py index <HASH>..<HASH> 100644 --- a/pfr/finders/GamePlayFinder.py +++ b/pfr/finders/GamePlayFinder.py @@ -63,7 +63,12 @@ def GamePlayFinder(**kwargs): # except Exception as e: # # if parsing goes wrong, return empty DataFrame # raise e - # return pd.DataFrame() + # return pd.DataFrame(columns=cols) + + plays['Year'] = plays.Date.str[:4].astype(int) + plays['Month'] = plays.Date.str[4:6].astype(int) + plays['Date'] = plays.Date.str[6:8].astype(int) + plays = plays.rename({'Date': 'Boxscore'}) return plays
added year, month, day to GPF results
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -3,10 +3,10 @@ """Setup module for the duallog package This module configures setuptools so that it can create a distribution for the -duallog package. +package. """ -# Import required system libraries. +# Import required standard libraries. import io import os import setuptools @@ -18,7 +18,7 @@ with io.open(os.path.join(maindir, 'README.md'), encoding='utf-8') as file: # Configure setuptools. setuptools.setup(name='duallog', - version='0.13', + version='0.14', description='Parallel logging to console and logfile', long_description=readme, long_description_content_type='text/markdown',
Cosmetic changes to setup.py.
py
diff --git a/qtpy/QtCore.py b/qtpy/QtCore.py index <HASH>..<HASH> 100644 --- a/qtpy/QtCore.py +++ b/qtpy/QtCore.py @@ -71,6 +71,7 @@ elif PYSIDE6: if getattr(Qt, 'mightBeRichText', None) is None: from PySide6.QtGui import Qt as guiQt Qt.mightBeRichText = guiQt.mightBeRichText + del guiQt # obsolete in qt6 Qt.BackgroundColorRole = Qt.BackgroundRole @@ -99,6 +100,7 @@ elif PYSIDE2: try: from PySide2.QtGui import Qt as guiQt Qt.mightBeRichText = guiQt.mightBeRichText + del guiQt except ImportError: # Fails with PySide2 5.12.0 pass
QtCore: Remove guiQt after utility functions assignment
py
diff --git a/pmag.py b/pmag.py index <HASH>..<HASH> 100755 --- a/pmag.py +++ b/pmag.py @@ -8452,6 +8452,8 @@ def read_criteria_from_file(path,acceptance_criteria): acceptance_criteria[crit]['value']=rec[crit] acceptance_criteria[crit]['threshold_type']="inherited" acceptance_criteria[crit]['decimal_points']=-999 + # LJ add: + acceptance_criteria[crit]['category'] = None # bollean flag elif acceptance_criteria[crit]['threshold_type']=='bool':
(temporary?) fix for measurement_step_max key error in thellier_gui auto interpreter with older data sets
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -18,6 +18,7 @@ setup(name='gtr', install_requires=[ 'requests==2.9.1', ], + download_url = 'https://github.com/nesta/gtr/tarball/0.1' classifiers=[ "Operating System :: OS Independent", "Programming Language :: Python",
Add download url to setup.py
py
diff --git a/indra/tools/live_curation.py b/indra/tools/live_curation.py index <HASH>..<HASH> 100644 --- a/indra/tools/live_curation.py +++ b/indra/tools/live_curation.py @@ -68,10 +68,18 @@ class Corpus(object): self._s3.put_object( Body=json.dumps(self.curations), Bucket=bucket, Key=key_base + 'curations.json') - - # FixMe Use appropriate S3 exception and make the logger less verbose + keys = tuple(s + '.json' for s in ['raw_statements', + 'statements', + 'curations']) + logger.info('Corpus uploaded as %s, %s and %s at %s.' % + (*keys, key_base)) + return keys + + # Todo + # Add metadata option? except Exception as e: logger.exception('Failed to put on s3: %s' % e) + return None def s3_get(self, name, bucket='world-modelers'): key_base = 'indra_models/' + name + '/'
Return S3 keys to uploaded statements. Update ToDo.
py
diff --git a/holoviews/ipython/widgets.py b/holoviews/ipython/widgets.py index <HASH>..<HASH> 100644 --- a/holoviews/ipython/widgets.py +++ b/holoviews/ipython/widgets.py @@ -261,7 +261,7 @@ class NdWidget(param.Parameterized): initialize the plots. """ if isinstance(view, (GridLayout, AdjointLayout)): - shape = grid.shape if isinstance(grid, GridLayout) else (1,1) + shape = view.shape if isinstance(view, GridLayout) else (1, 1) grid_size = (shape[1]*get_plot_size()[1], shape[0]*get_plot_size()[0]) self.plot = GridLayoutPlot(view, **dict(size=grid_size))
Fixed bug in IPython based widget
py
diff --git a/ec2/spark_ec2.py b/ec2/spark_ec2.py index <HASH>..<HASH> 100755 --- a/ec2/spark_ec2.py +++ b/ec2/spark_ec2.py @@ -234,10 +234,10 @@ def get_spark_ami(opts): "cg1.4xlarge": "hvm", "hs1.8xlarge": "pvm", "hi1.4xlarge": "pvm", - "m3.medium": "pvm", - "m3.large": "pvm", - "m3.xlarge": "pvm", - "m3.2xlarge": "pvm", + "m3.medium": "hvm", + "m3.large": "hvm", + "m3.xlarge": "hvm", + "m3.2xlarge": "hvm", "cr1.8xlarge": "hvm", "i2.xlarge": "hvm", "i2.2xlarge": "hvm",
SPARK-<I>: [EC2] Switch back to HVM instances for m3.X. During regression tests of Spark <I> we discovered perf issues with PVM instances when running PySpark. This reverts a change added in #<I> which changed the default type for m3 instances to PVM.
py
diff --git a/discord/client.py b/discord/client.py index <HASH>..<HASH> 100644 --- a/discord/client.py +++ b/discord/client.py @@ -537,7 +537,8 @@ class Client: _log.info('logging in using static token') - await self._async_setup_hook() + if self.loop is _loop: + await self._async_setup_hook() data = await self.http.static_login(token.strip()) self._connection.user = ClientUser(state=self._connection, data=data)
Only call async setup if the loop sentinel hasn't been changed
py
diff --git a/datapackage_pipelines/web/server.py b/datapackage_pipelines/web/server.py index <HASH>..<HASH> 100644 --- a/datapackage_pipelines/web/server.py +++ b/datapackage_pipelines/web/server.py @@ -11,7 +11,8 @@ import yaml import mistune import requests -from flask import Blueprint, Flask, render_template, abort, send_file +from flask import \ + Blueprint, Flask, render_template, abort, send_file, make_response from flask_cors import CORS from flask_jsonpify import jsonify from flask_basicauth import BasicAuth @@ -255,7 +256,11 @@ def _make_badge_response(subject, text, colour): r = requests.get(image_url) buffer_image = BytesIO(r.content) buffer_image.seek(0) - return send_file(buffer_image, mimetype='image/svg+xml') + res = make_response(send_file(buffer_image, mimetype='image/svg+xml')) + res.headers['Cache-Control'] = \ + 'max-age=0, no-cache, no-store, must-revalidate' + res.headers['Expires'] = '0' + return res @blueprint.route("badge/<path:pipeline_id>")
Add caching headers to badge response. (#<I>) Don't want these badges cached, so the most recent status is served
py
diff --git a/tests/test_tcp.py b/tests/test_tcp.py index <HASH>..<HASH> 100644 --- a/tests/test_tcp.py +++ b/tests/test_tcp.py @@ -2,7 +2,6 @@ import os import socket -import sys import unittest from common import linesep, platform_skip, TestCase @@ -11,6 +10,7 @@ import pyuv TEST_PORT = 1234 + class TCPErrorTest(TestCase): def on_client_connect_error(self, client, error): @@ -35,7 +35,6 @@ class TCPErrorTest(TestCase): client.open(sock.fileno()) client.connect(("127.0.0.1", TEST_PORT), self.on_client_connect_error) self.loop.run() - sock.close() def test_raise(self): tcp = pyuv.TCP(self.loop)
tests: fix TCP test (socket is already closed)
py
diff --git a/src/python/grpcio_tests/tests/unit/_logging_test.py b/src/python/grpcio_tests/tests/unit/_logging_test.py index <HASH>..<HASH> 100644 --- a/src/python/grpcio_tests/tests/unit/_logging_test.py +++ b/src/python/grpcio_tests/tests/unit/_logging_test.py @@ -15,15 +15,28 @@ import unittest import six -import grpc +from six.moves import reload_module import logging - +import grpc +import functools +import sys class LoggingTest(unittest.TestCase): def test_logger_not_occupied(self): self.assertEqual(0, len(logging.getLogger().handlers)) + def test_handler_found(self): + old_stderr = sys.stderr + sys.stderr = six.StringIO() + try: + reload_module(logging) + logging.basicConfig() + reload_module(grpc) + self.assertFalse("No handlers could be found" in sys.stderr.getvalue()) + finally: + sys.stderr = old_stderr + reload_module(logging) if __name__ == '__main__': unittest.main(verbosity=2)
Add test for 'No handlers could be found' problem
py
diff --git a/pyrogram/client/types/bots/inline_keyboard_button.py b/pyrogram/client/types/bots/inline_keyboard_button.py index <HASH>..<HASH> 100644 --- a/pyrogram/client/types/bots/inline_keyboard_button.py +++ b/pyrogram/client/types/bots/inline_keyboard_button.py @@ -31,7 +31,7 @@ class InlineKeyboardButton(Object): text (``str``): Label text on the button. - callback_data (``str``, *optional*): + callback_data (``bytes``, *optional*): Data to be sent in a callback query to the bot when button is pressed, 1-64 bytes. url (``str``, *optional*): @@ -85,7 +85,7 @@ class InlineKeyboardButton(Object): if isinstance(b, KeyboardButtonCallback): return InlineKeyboardButton( text=b.text, - callback_data=b.data.decode() + callback_data=b.data ) if isinstance(b, KeyboardButtonSwitchInline):
Don't decode inline buttons callback data Clients are able to set any value as plain bytes, this means UTF-8 decoding could fail.
py
diff --git a/pysat/tests/test_sw.py b/pysat/tests/test_sw.py index <HASH>..<HASH> 100644 --- a/pysat/tests/test_sw.py +++ b/pysat/tests/test_sw.py @@ -264,6 +264,19 @@ class TestSwKpCombine(): del combo_in + def test_combine_kp_no_data(self): + """Test combine_kp when no data is present for specified times""" + + combo_in = {kk: self.combine[kk] for kk in + ['standard_inst', 'recent_inst', 'forecast_inst']} + combo_in['start'] = pysat.datetime(2014, 2, 19) + combo_in['stop'] = pysat.datetime(2014, 2, 24) + kp_inst = sw_meth.combine_kp(**combo_in) + + assert kp_inst.data.isnull().all()["Kp"] + + del combo_in + def test_combine_kp_inst_time(self): """Test combine_kp when times are provided through the instruments"""
TST: Add test for combine_kp with no data
py
diff --git a/moto/secretsmanager/models.py b/moto/secretsmanager/models.py index <HASH>..<HASH> 100644 --- a/moto/secretsmanager/models.py +++ b/moto/secretsmanager/models.py @@ -42,6 +42,9 @@ class SecretsManagerBackend(BaseBackend): self.__dict__ = {} self.__init__(region_name) + def _is_valid_identifier(self, identifier): + return identifier in (self.name, self.secret_id) + def get_secret_value(self, secret_id, version_id, version_stage): if self.secret_id == '': @@ -74,12 +77,12 @@ class SecretsManagerBackend(BaseBackend): return response def describe_secret(self, secret_id): - if self.secret_id == '': + if not self._is_valid_identifier(secret_id): raise ResourceNotFoundException response = json.dumps({ "ARN": secret_arn(self.region, self.secret_id), - "Name": self.secret_id, + "Name": self.name, "Description": "", "KmsKeyId": "", "RotationEnabled": self.rotation_enabled,
Issue <I>: Add support for DescribeSecret - Add helper method to validate the secret identifier from the client. - Update describe_secret to use new helper method. - Insert friendly name into "Name" field of returned description (was SecretId). ***Assumes acceptance of PR <I>.
py
diff --git a/proso_models/models.py b/proso_models/models.py index <HASH>..<HASH> 100644 --- a/proso_models/models.py +++ b/proso_models/models.py @@ -14,7 +14,7 @@ from decorator import cache_environment_for_item # This is hack to emulate TRUE value on both psql and sqlite -DATABASE_TRUE = '1 == 1' +DATABASE_TRUE = '1 = 1' ################################################################################
use single "=" character for emulating TRUE value
py
diff --git a/ravel.py b/ravel.py index <HASH>..<HASH> 100644 --- a/ravel.py +++ b/ravel.py @@ -147,8 +147,9 @@ class Bus : self #end __new__ - def attach_asyncio(loop = None) : + def attach_asyncio(self, loop = None) : self.connection.attach_asyncio(loop) + self.loop = self.connection.loop return \ self #end attach_asyncio
Bus.attach_asyncio() now works
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,7 @@ setup( download_url = "", classifiers = [ - 'Development Status :: 2 - Pre-Alpha', + 'Development Status :: 4 - Beta', 'Operating System :: POSIX :: Linux', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', @@ -42,7 +42,7 @@ setup( 'Topic :: Software Development :: Libraries :: Python Modules', 'License :: OSI Approved :: MIT License', ], - keywords = [], + keywords = ["PGP", "pgp", "Pretty Good Privacy", "GPG", "gpg", "OpenPGP"], install_requires = [ str(ir.req) for ir in reqs ],
changed Development Status to Beta and added some keywords
py
diff --git a/tools/c7n_org/c7n_org/cli.py b/tools/c7n_org/c7n_org/cli.py index <HASH>..<HASH> 100644 --- a/tools/c7n_org/c7n_org/cli.py +++ b/tools/c7n_org/c7n_org/cli.py @@ -484,13 +484,15 @@ def accounts_iterator(config): d = {'account_id': a['subscription_id'], 'name': a.get('name', a['subscription_id']), 'regions': ['global'], - 'tags': a.get('tags', ())} + 'tags': a.get('tags', ()), + 'vars': a.get('vars', {})} yield d for a in config.get('projects', ()): d = {'account_id': a['project_id'], 'name': a.get('name', a['project_id']), 'regions': ['global'], - 'tags': a.get('tags', ())} + 'tags': a.get('tags', ()), + 'vars': a.get('vars', {})} yield d
tools/c7n_org - fix vars usage for gcp and azure accounts (#<I>)
py
diff --git a/spyderlib/plugins/ipythonconsole.py b/spyderlib/plugins/ipythonconsole.py index <HASH>..<HASH> 100644 --- a/spyderlib/plugins/ipythonconsole.py +++ b/spyderlib/plugins/ipythonconsole.py @@ -510,7 +510,7 @@ class IPythonConsole(SpyderPluginWidget): def get_plugin_actions(self): """Return a list of actions related to plugin""" create_client_action = create_action(self, - _("Open an IPython console"), + _("Open an &IPython console"), None, 'ipython_console.png', triggered=self.create_new_client) @@ -520,10 +520,10 @@ class IPythonConsole(SpyderPluginWidget): _("Open a new IPython client connected to an external kernel"), triggered=self.create_client_for_kernel) - # Add the action to the 'Interpreters' menu on the main window - consoles_menu_actions = [create_client_action, None, - connect_to_kernel_action] - self.main.consoles_menu_actions += consoles_menu_actions + # Add the action to the 'Consoles' menu on the main window + main_consoles_menu = self.main.consoles_menu_actions + main_consoles_menu.insert(0, create_client_action) + main_consoles_menu += [None, connect_to_kernel_action] # Plugin actions self.menu_actions = [create_client_action, connect_to_kernel_action]
Consoles menu: Move "Open an IPython console" to be its first option
py
diff --git a/metaseq/rebin.py b/metaseq/rebin.py index <HASH>..<HASH> 100644 --- a/metaseq/rebin.py +++ b/metaseq/rebin.py @@ -1,6 +1,4 @@ import numpy as np -def rebin(y, bins): - len_y = y.shape[0] - x = np.arange(len_y) - xi = np.linspace(0, len_y, bins) - return np.interp(xi, x, y) +def rebin(x, y, nbin): + xi = np.linspace(x.min(), x.max(), nbin) + return xi, np.interp(xi, x, y)
much simpler rebin (interpolate)
py
diff --git a/signalr_aio/transports/_transport.py b/signalr_aio/transports/_transport.py index <HASH>..<HASH> 100644 --- a/signalr_aio/transports/_transport.py +++ b/signalr_aio/transports/_transport.py @@ -4,6 +4,12 @@ # signalr_aio/transports/_transport.py # Stanislav Lazarov +# python compatiblity for <3.6 +try: + ModuleNotFoundError +except NameError: + ModuleNotFoundError = ImportError + # ----------------------------------- # Internal Imports from ._exceptions import ConnectionClosed
allow for backwards compatibility (<I>)
py
diff --git a/tests/providers/date_time.py b/tests/providers/date_time.py index <HASH>..<HASH> 100644 --- a/tests/providers/date_time.py +++ b/tests/providers/date_time.py @@ -6,6 +6,7 @@ import unittest from faker import Factory from .. import string_types + class TestHuHU(unittest.TestCase): """ Tests date_time in hu_HU locale. """ @@ -20,3 +21,41 @@ class TestHuHU(unittest.TestCase): month = self.factory.month() assert isinstance(month, string_types) + +class TestPlPL(unittest.TestCase): + + DAY_NAMES = ( + 'poniedziałek', + 'wtorek', + 'środa', + 'czwartek', + 'piątek', + 'sobota', + 'niedziela', + ) + + MONTH_NAMES = ( + 'styczeń', + 'luty', + 'marzec', + 'kwiecień', + 'maj', + 'czerwiec', + 'lipiec', + 'sierpień', + 'wrzesień', + 'październik', + 'listopad', + 'grudzień' + ) + + def setUp(self): + self.factory = Factory.create('pl_PL') + + def test_day(self): + day = self.factory.day_of_week() + assert day in self.DAY_NAMES + + def test_month(self): + month = self.factory.month_name() + assert month in self.MONTH_NAMES
Missing test for pl_PL dow and month name
py
diff --git a/webview/platforms/winforms.py b/webview/platforms/winforms.py index <HASH>..<HASH> 100644 --- a/webview/platforms/winforms.py +++ b/webview/platforms/winforms.py @@ -541,10 +541,8 @@ def _set_ie_mode(): def _allow_localhost(): - from subprocess import check_output - - output = check_output('checknetisolation LoopbackExempt -s') - + output = os.popen('checknetisolation LoopbackExempt -s').read() + if 'cw5n1h2txyewy' not in str(output): windll.shell32.ShellExecuteW(None, 'runas', 'checknetisolation', 'LoopbackExempt -a -n=\"Microsoft.Win32WebViewHost_cw5n1h2txyewy\"', None, 1)
[Winforms] Fix pyinstaller nowindow issue
py
diff --git a/test_path.py b/test_path.py index <HASH>..<HASH> 100644 --- a/test_path.py +++ b/test_path.py @@ -30,6 +30,7 @@ import importlib import pytest +import path from path import Path, tempdir from path import CaseInsensitivePattern as ci from path import SpecialResolver @@ -351,6 +352,8 @@ class TestScratchDir: except: pass + @pytest.mark.xfail(platform.system() == 'Linux' and path.PY2, + reason="Can't decode bytes in FS. See #121") def test_listdir_other_encoding(self, tmpdir): """ Some filesystems allow non-character sequences in path names.
Mark test as xfail where it's no longer supported. Ref #<I>
py
diff --git a/multiselectfield/validators.py b/multiselectfield/validators.py index <HASH>..<HASH> 100644 --- a/multiselectfield/validators.py +++ b/multiselectfield/validators.py @@ -16,7 +16,7 @@ from django.core import validators -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ class MaxValueMultiFieldValidator(validators.MaxLengthValidator):
Update lazy translation import (#<I>) `ugettext_lazy` is deprecated as of Django <I> (to be removed in Django <I>)
py
diff --git a/GPy/core/model.py b/GPy/core/model.py index <HASH>..<HASH> 100644 --- a/GPy/core/model.py +++ b/GPy/core/model.py @@ -188,19 +188,23 @@ class model(parameterised): """ - initial_parameters = self._get_params_transformed() if parallel: - jobs = [] - pool = mp.Pool(processes=num_processes) - for i in range(Nrestarts): - self.randomize() - job = pool.apply_async(opt_wrapper, args = (self,), kwds = kwargs) - jobs.append(job) - - pool.close() # signal that no more data coming in - pool.join() # wait for all the tasks to complete + try: + jobs = [] + pool = mp.Pool(processes=num_processes) + for i in range(Nrestarts): + self.randomize() + job = pool.apply_async(opt_wrapper, args = (self,), kwds = kwargs) + jobs.append(job) + + pool.close() # signal that no more data coming in + pool.join() # wait for all the tasks to complete + except KeyboardInterrupt: + print "Ctrl+c received, terminating and joining pool." + pool.terminate() + pool.join() for i in range(Nrestarts): try:
made parallel optimize_restart responsive to ctrl+c
py
diff --git a/pywws/toservice.py b/pywws/toservice.py index <HASH>..<HASH> 100644 --- a/pywws/toservice.py +++ b/pywws/toservice.py @@ -22,7 +22,7 @@ class ToService(object): self.logger = logging.getLogger('pywws.%s' % self.__class__.__name__) self.params = params self.data = calib_data - self.old_result = None + self.old_response = None self.old_ex = None # set default socket timeout, so urlopen calls don't hang forever socket.setdefaulttimeout(10) @@ -84,14 +84,16 @@ class ToService(object): wudata = urllib.urlopen(server, coded_data) response = wudata.readlines() wudata.close() + if response != self.old_response: + for line in response: + self.logger.error(line) + self.old_response = response if not response: # Met office returns empty array on success return True - for line in response: + if response[0] == 'success\n': # Weather Underground returns 'success' string - if line == 'success\n': - return True - self.logger.error(line) + return True except Exception, ex: e = str(ex) if e != self.old_ex:
Slightly rearranged error reporting in toservice.py. Should reduce chance of multiple repeated messages in the log.
py
diff --git a/napalm/base/helpers.py b/napalm/base/helpers.py index <HASH>..<HASH> 100644 --- a/napalm/base/helpers.py +++ b/napalm/base/helpers.py @@ -248,7 +248,7 @@ def textfsm_extractor(cls, template_name, raw_text): def find_txt(xml_tree, path, default="", namespaces=None): """ Extracts the text value from an XML tree, using XPath. - In case of error, will return a default value. + In case of error or text element unavailability, will return a default value. :param xml_tree: the XML Tree object. Assumed is <type 'lxml.etree._Element'>. :param path: XPath to be applied, in order to extract the desired data. @@ -265,7 +265,10 @@ def find_txt(xml_tree, path, default="", namespaces=None): if xpath_length and xpath_applied[0] is not None: xpath_result = xpath_applied[0] if isinstance(xpath_result, type(xml_tree)): - value = xpath_result.text.strip() + if xpath_result.text: + value = xpath_result.text.strip() + else: + value = default else: value = xpath_result else:
added lxml text retrieval not to fail if no text avail (#<I>)
py
diff --git a/pyinfra/api/connectors/docker.py b/pyinfra/api/connectors/docker.py index <HASH>..<HASH> 100644 --- a/pyinfra/api/connectors/docker.py +++ b/pyinfra/api/connectors/docker.py @@ -7,8 +7,6 @@ import six from six.moves import shlex_quote -import pyinfra - from pyinfra import local, logger from pyinfra.api.exceptions import InventoryError from pyinfra.api.util import get_file_io, memoize @@ -47,9 +45,6 @@ def connect(state, host, for_fact=None): def disconnect(state, host): - if not pyinfra.is_cli: - return - container_id = host.host_data['docker_container_id'][:12] with progress_spinner({'docker commit'}):
Always disconnect/commit the Docker container (remove CLI check).
py
diff --git a/kmip/__init__.py b/kmip/__init__.py index <HASH>..<HASH> 100644 --- a/kmip/__init__.py +++ b/kmip/__init__.py @@ -15,6 +15,8 @@ import os import re +import sys +import warnings from kmip.core import enums from kmip.pie import client @@ -44,3 +46,22 @@ __all__ = [ 'objects', 'services' ] + + +if sys.version_info[:2] == (2, 7): + warnings.warn( + ( + "PyKMIP will drop support for Python 2.7 in a future release. " + "Please upgrade to a newer version of Python (3.5+ preferred)." + ), + PendingDeprecationWarning + ) + +if sys.version_info[:2] == (3, 4): + warnings.warn( + ( + "PyKMIP will drop support for Python 3.4 in a future release. " + "Please upgrade to a newer version of Python (3.5+ preferred)." + ), + PendingDeprecationWarning + )
Add pending deprecation warnings for Python <I> and <I> This change adds pending deprecation warnings for both Python <I> and <I>. Both of these Python versions have reached end-of-life and no longer receive security updates. Future versions of PyKMIP will drop support for both of these Python versions.
py
diff --git a/indra/sources/eidos/migration_table_processor.py b/indra/sources/eidos/migration_table_processor.py index <HASH>..<HASH> 100644 --- a/indra/sources/eidos/migration_table_processor.py +++ b/indra/sources/eidos/migration_table_processor.py @@ -85,9 +85,7 @@ def make_stmt(row_dict): return stmt -if __name__ == '__main__': - fname = 'Initial annotation exercise for migration use case.xlsx' - +def process_workbook(fname): wb = openpyxl.load_workbook(fname, read_only=True) sheets = wb.sheetnames cag_sheets = [s for s in sheets if 'CAG' in s] @@ -97,6 +95,4 @@ if __name__ == '__main__': sheet = wb[sheet_name] new_stmts = process_sheet(sheet) statements += new_stmts - - with open('stmts_from_migration_table.pkl', 'wb') as f: - pickle.dump(statements, f) + return statements
Refactor main into a function and remove specifics
py
diff --git a/kafka/consumer.py b/kafka/consumer.py index <HASH>..<HASH> 100644 --- a/kafka/consumer.py +++ b/kafka/consumer.py @@ -219,11 +219,6 @@ class SimpleConsumer(object): start a new batch unless we've reached the end of ths partition. """ - # Unless it is the first message in the queue, we have to fetch - # the next one - if offset != 0: - offset += 1 - while True: req = FetchRequest(self.topic, partition, offset, 1024) # TODO configure fetch size (resp,) = self.client.send_fetch_request([req])
Removing the bit about offsets
py
diff --git a/abydos/distance/_token_distance.py b/abydos/distance/_token_distance.py index <HASH>..<HASH> 100644 --- a/abydos/distance/_token_distance.py +++ b/abydos/distance/_token_distance.py @@ -557,9 +557,6 @@ class _TokenDistance(_Distance): for src_tok in src_only: for tar_tok in tar_only: - # TODO: should sim be divided by 2? should this be multiplied - # by the bag value? should it really be every token in both - # sets that we compare? sim = self.params['metric'].sim(src_tok, tar_tok) if sim >= self.params['threshold']: intersection[src_tok] += (sim / 2) * src_only[src_tok] @@ -689,8 +686,6 @@ class _TokenDistance(_Distance): for row, col in assignments.keys(): sim = orig_sim[row, col] - # TODO: should sim be divided by 2? should this be multiplied - # by the bag value? if sim >= self.params['threshold']: intersection[src_only[col]] += (sim / 2) * ( self._src_tokens - self._tar_tokens
removed TODOs to GitHub issues
py
diff --git a/asyncio_xmpp/plugins/base.py b/asyncio_xmpp/plugins/base.py index <HASH>..<HASH> 100644 --- a/asyncio_xmpp/plugins/base.py +++ b/asyncio_xmpp/plugins/base.py @@ -4,7 +4,7 @@ This module provides a base class useful for plugin development. -.. autoclass:: Service +.. autoclass:: Service([node...], [loop=None], [logger=None]) """
Make the signature of Service base class nicer to read in docs
py
diff --git a/pyontutils/docs.py b/pyontutils/docs.py index <HASH>..<HASH> 100644 --- a/pyontutils/docs.py +++ b/pyontutils/docs.py @@ -28,6 +28,7 @@ def getMdReadFormat(): md_read_format = getMdReadFormat() +# NOTE if emacs does not point to /usr/bin/emacs or similar this will fail compile_org_file = ['emacs', '-q', '-l', Path(devconfig.git_local_base, 'orgstrap/init.el').resolve().as_posix(), '--batch', '-f', 'compile-org-file'] theme = Path(devconfig.ontology_local_repo, 'docs', 'theme-readtheorg.setup') @@ -94,7 +95,7 @@ def renderMarkdown(path, title=None, authors=None, date=None, **kwargs): # if this happens direct stderr to stdout to get the message raise subprocess.CalledProcessError(e.returncode, ' '.join(e.args) + f' {path.as_posix()}') from ValueError(err.decode()) - if not body: + if not body or b'*temp*' in body: raise ValueError(f'Output document for {path.as_posix()} ' 'has no body! the input org was:\n' f'{org.decode()}')
docs check for *temp* title indicative of issues
py
diff --git a/ibis/client.py b/ibis/client.py index <HASH>..<HASH> 100644 --- a/ibis/client.py +++ b/ibis/client.py @@ -598,7 +598,9 @@ class ImpalaClient(SQLClient): stmt = ddl.CreateTableDelimited(name, hdfs_dir, schema, database=database, delimiter=delimiter, - external=external) + external=external, + lineterminator=lineterminator, + escapechar=escapechar) self._execute(stmt) return self._wrap_new_table(qualified_name, persist)
Don't ignore escapechar and lineterminator Pass kwargs through to CreateTableDelimited from ImpalaClient.delimited_file. Fixes #<I>
py
diff --git a/dvc/version.py b/dvc/version.py index <HASH>..<HASH> 100644 --- a/dvc/version.py +++ b/dvc/version.py @@ -6,7 +6,7 @@ import os import subprocess -_BASE_VERSION = "2.0.5" +_BASE_VERSION = "2.0.6" def _generate_version(base_version):
dvc: bump to <I>
py
diff --git a/cfgrib/messages.py b/cfgrib/messages.py index <HASH>..<HASH> 100644 --- a/cfgrib/messages.py +++ b/cfgrib/messages.py @@ -35,8 +35,13 @@ try: import eccodes else: from . import bindings as eccodes -except RuntimeError: - import pyeccodes.compat as eccodes +except RuntimeError as exc: + # hide the pyeccodes import error from the majority of the users + # that have problems with the ecCodes bindings + try: + import pyeccodes.compat as eccodes + except ImportError: + raise exc eccodes_version = eccodes.codes_get_api_version()
Hide the pyeccodes import error that is confusing to most users
py
diff --git a/pylint/checkers/python3.py b/pylint/checkers/python3.py index <HASH>..<HASH> 100644 --- a/pylint/checkers/python3.py +++ b/pylint/checkers/python3.py @@ -360,6 +360,11 @@ class Python3Checker(checkers.BaseChecker): '__cmp__', ]) + def __init__(self, *args, **kwargs): + self._future_division = False + self._future_absolute_import = False + super(Python3Checker, self).__init__(*args, **kwargs) + def visit_module(self, node): # pylint: disable=unused-argument """Clear checker state after previous module.""" self._future_division = False
Backed out changeset c1f2f0c<I>b0 This is actually used by the test, which aren't calling visit_module.
py
diff --git a/lib/search_engine_utils.py b/lib/search_engine_utils.py index <HASH>..<HASH> 100644 --- a/lib/search_engine_utils.py +++ b/lib/search_engine_utils.py @@ -34,6 +34,10 @@ def get_fieldvalues(recIDs, tag, repetitive_values=True): only. """ out = [] + try: + recIDs = int(recIDs) + except: + pass if isinstance(recIDs, (int, long)): recIDs = [recIDs,] if not isinstance(recIDs, (list, tuple)):
WebSearch: fix get_fieldvalues() when recid is str * recid might be string, so try to convert it to int in the get_fieldvalues function (needed for WebSubmit, major user of this function, which calls it in many different places with a string argument instead of int). * Note: this was committed previously in <I>bb<I>ce<I>b<I>b<I>f<I>c<I>a but it was apparently dropped by mishap later.
py
diff --git a/hotdoc/core/doc_repo.py b/hotdoc/core/doc_repo.py index <HASH>..<HASH> 100644 --- a/hotdoc/core/doc_repo.py +++ b/hotdoc/core/doc_repo.py @@ -437,7 +437,12 @@ class DocRepo(object): """ Banana banana """ - self.output = os.path.abspath(self.config.get('output')) + output = self.config.get('output', None) + if output is not None: + self.output = os.path.abspath(output) + else: + self.output = None + self.project_name = self.config.get('project_name', None) self.project_version = self.config.get('project_version', None) self.output_format = self.config.get('output_format')
doc_repo: allow None for output again. That was a regression
py
diff --git a/tests/basics/boundmeth1.py b/tests/basics/boundmeth1.py index <HASH>..<HASH> 100644 --- a/tests/basics/boundmeth1.py +++ b/tests/basics/boundmeth1.py @@ -22,3 +22,9 @@ print(m(1)) # bound method with lots of extra args m = A().h print(m(1, 2, 3, 4, 5, 6)) + +# can't assign attributes to a bound method +try: + A().f.x = 1 +except AttributeError: + print('AttributeError')
tests/basics: Add test for assignment of attribute to bound method.
py
diff --git a/pysat/_orbits.py b/pysat/_orbits.py index <HASH>..<HASH> 100644 --- a/pysat/_orbits.py +++ b/pysat/_orbits.py @@ -31,6 +31,10 @@ class Orbits(object): class should not be called directly by the user, use the interface provided by inst.orbits where inst = pysat.Instrument() + Warning + ------- + This class is still under development. + Examples -------- ::
Added warning that orbits is still under development.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,11 +1,12 @@ """bum - setup.py""" import setuptools +import sys try: import bum except (ImportError, SyntaxError): print("error: bum requires Python 3.6 or greater.") - quit(1) + sys.exit(1) try:
Fix erroneous use of quit()
py
diff --git a/ryu/ofproto/ofproto_v1_2_parser.py b/ryu/ofproto/ofproto_v1_2_parser.py index <HASH>..<HASH> 100644 --- a/ryu/ofproto/ofproto_v1_2_parser.py +++ b/ryu/ofproto/ofproto_v1_2_parser.py @@ -1557,16 +1557,6 @@ class OFPActionSetField(OFPAction): return not hasattr(self, 'value') def to_jsondict(self): - # XXX old api compat - if self._composed_with_old_api(): - # copy object first because serialize_old is destructive - o2 = OFPActionSetField(self.field) - # serialize and parse to fill new fields - buf = bytearray() - o2.serialize(buf, 0) - o = OFPActionSetField.parser(six.binary_type(buf), 0) - else: - o = self return { self.__class__.__name__: { 'field': ofproto.oxm_to_jsondict(self.key, self.value),
ofproto/ofproto_v1_2_parser: Flake8 Fixes
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -19,11 +19,12 @@ setup( url="https://github.com/horazont/aioxmpp", author="Jonas Wielicki", author_email="[email protected]", - license="Apache20", + license="GPLv3", classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Operating System :: POSIX", + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4",
Fix license information in setup.py
py
diff --git a/pyvista/core/common.py b/pyvista/core/common.py index <HASH>..<HASH> 100644 --- a/pyvista/core/common.py +++ b/pyvista/core/common.py @@ -287,13 +287,12 @@ class Common(DataSetFilters, DataObject): def active_scalars_info(self): """Return the active scalar's field and name: [field, name].""" field, name = self._active_scalars_info - exclude = {'__custom_rgba', 'Normals', 'vtkOriginalPointIds', 'TCoords'} - if name in exclude: name = self._last_active_scalars_name - if name is None and self.n_arrays > 0: + all_arrays = self.point_arrays.keys() + self.cell_arrays.keys() + if name is None or name not in all_arrays: # find first available array name for attributes in (self.point_arrays, self.cell_arrays): first_arr = next((arr for arr in attributes if arr not in exclude), None) @@ -301,6 +300,8 @@ class Common(DataSetFilters, DataObject): self._active_scalars_info = ActiveArrayInfo(attributes.association, first_arr) attributes.active_scalars = first_arr break + else: + self._active_scalars_info = ActiveArrayInfo(field, None) return self._active_scalars_info @property
Fix failing test when accessing active_scalars_info when clearing arrays. (#<I>)
py
diff --git a/src/Exscript/protocols/drivers/one_os.py b/src/Exscript/protocols/drivers/one_os.py index <HASH>..<HASH> 100644 --- a/src/Exscript/protocols/drivers/one_os.py +++ b/src/Exscript/protocols/drivers/one_os.py @@ -13,7 +13,7 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA """ -A driver for Cisco IOS (not IOS XR). +A driver for OneOS (OneAccess). """ import re from driver import Driver
fix: typo in one os API docs.
py
diff --git a/test.py b/test.py index <HASH>..<HASH> 100644 --- a/test.py +++ b/test.py @@ -274,6 +274,19 @@ class TestFifoLock(unittest.TestCase): self.assertEqual(acquisition_history[2], [True, True, True]) @async_test + async def test_semaphore_complete_in_order(self): + lock = FifoLock() + Semaphore = type('Semaphore', (SemaphoreBase, ), {'size': 2}) + + acquisition_history = await mutate_tasks_in_sequence(create_lock_tasks( + lock(Semaphore), lock(Semaphore), lock(Semaphore)), + complete(0), complete(1), complete(2), + ) + + self.assertEqual(acquisition_history[0], [True, True, False]) + self.assertEqual(acquisition_history[1], [True, True, True]) + + @async_test async def test_semaphore_complete_out_of_order(self): lock = FifoLock() Semaphore = type('Semaphore', (SemaphoreBase, ), {'size': 2})
(tests) Ensure that semaphores finishing in order behave as expected
py
diff --git a/salt/states/git.py b/salt/states/git.py index <HASH>..<HASH> 100644 --- a/salt/states/git.py +++ b/salt/states/git.py @@ -2213,7 +2213,7 @@ def detached(name, # Determine if supplied ref is a hash remote_rev_type = 'ref' - if len(ref) <= 40 \ + if len(rev) <= 40 \ and all(x in string.hexdigits for x in rev): rev = rev.lower() remote_rev_type = 'hash' @@ -2419,7 +2419,7 @@ def detached(name, https_pass=https_pass, ignore_retcode=False) - if 'refs/remotes/'+remote+'/'+ref in all_remote_refs: + if 'refs/remotes/'+remote+'/'+rev in all_remote_refs: checkout_commit_id = all_remote_refs['refs/remotes/' + remote + '/' + rev] elif 'refs/tags/' + rev in all_remote_refs: checkout_commit_id = all_remote_refs['refs/tags/' + rev]
Update old "ref" references to "rev" in git.detached state Fixes #<I> The "rev" kwarg was added to replace "ref" in #<I>, however, when switching the state over to "rev", some stacktraces occur due to some remaining "ref" references.
py
diff --git a/odl/test/discr/tensor_ops_test.py b/odl/test/discr/tensor_ops_test.py index <HASH>..<HASH> 100644 --- a/odl/test/discr/tensor_ops_test.py +++ b/odl/test/discr/tensor_ops_test.py @@ -89,6 +89,12 @@ def test_pointwise_norm_init_properties(): with pytest.raises(ValueError): PointwiseNorm(vfspace, exponent=0.5) # < 1 not allowed + with pytest.raises(ValueError): + PointwiseNorm(vfspace, weight=-1) # < 0 not allowed + + with pytest.raises(ValueError): + PointwiseNorm(vfspace, weight=[1, 0, 1]) # 0 invalid + def test_pointwise_norm_real(exponent): # 1d
TST: Add test for PointwiseNorm weight being negative
py
diff --git a/icon_font_to_png/icon_font_downloader.py b/icon_font_to_png/icon_font_downloader.py index <HASH>..<HASH> 100644 --- a/icon_font_to_png/icon_font_downloader.py +++ b/icon_font_to_png/icon_font_downloader.py @@ -102,10 +102,10 @@ class OcticonsDownloader(IconFontDownloader): Project page: https://octicons.github.com/ """ - css_url = ('https://raw.githubusercontent.com/github/' - 'octicons/master/octicons/octicons.css') - ttf_url = ('https://raw.githubusercontent.com/github/' - 'octicons/master/octicons/octicons.ttf') + css_url = ('https://raw.githubusercontent.com/primer/' + 'octicons/master/build/font/octicons.css') + ttf_url = ('https://raw.githubusercontent.com/primer/' + 'octicons/master/build/font/octicons.ttf') def get_latest_version_number(self): return self._get_latest_tag_from_github(
Octicons changed their GitHub owner
py