diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/cloudvolume/datasource/precomputed/mesh/unsharded.py b/cloudvolume/datasource/precomputed/mesh/unsharded.py index <HASH>..<HASH> 100644 --- a/cloudvolume/datasource/precomputed/mesh/unsharded.py +++ b/cloudvolume/datasource/precomputed/mesh/unsharded.py @@ -192,8 +192,7 @@ class UnshardedLegacyPrecomputedMeshSource(object): Supported Formats: 'obj', 'ply', 'precomputed' """ - if type(segids) != list: - segids = [segids] + segids = toiter(segids) mesh = self.get(segids, fuse=True, remove_duplicate_vertices=True)
refactor: use more robust toiter
py
diff --git a/tests/test_notification.py b/tests/test_notification.py index <HASH>..<HASH> 100644 --- a/tests/test_notification.py +++ b/tests/test_notification.py @@ -1,4 +1,4 @@ -import mock +from unittest import mock from jaraco.net import notification class TestMailbox(object):
Assume mock is available in the stdlib
py
diff --git a/tests/unit/modules/test_state.py b/tests/unit/modules/test_state.py index <HASH>..<HASH> 100644 --- a/tests/unit/modules/test_state.py +++ b/tests/unit/modules/test_state.py @@ -857,14 +857,10 @@ class StateTestCase(TestCase, LoaderModuleMockMixin): True), ["A"]) - mock = MagicMock(side_effect=[False, - True, - True, - True, - True]) - with patch.object(state, '_check_pillar', mock): + mock = MagicMock(side_effect=[['E', '1'], None, None, None, None]) + with patch.object(state, '_get_pillar_errors', mock): with patch.dict(state.__context__, {"retcode": 5}): - with patch.dict(state.__pillar__, {"_errors": "E1"}): + with patch.dict(state.__pillar__, {"_errors": ['E', '1']}): self.assertListEqual(state.sls("core,edit.vim dev", None, None,
Fix unit test: wrong error types in side effect
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -390,6 +390,7 @@ setup ( 'bin/hdfcoinc/pycbc_page_snrchi', 'bin/hdfcoinc/pycbc_page_segments', 'bin/hdfcoinc/pycbc_page_segtable', + 'bin/hdfcoinc/pycbc_page_segplot', 'bin/hdfcoinc/pycbc_plot_psd_file', 'bin/hdfcoinc/pycbc_plot_range', 'bin/hdfcoinc/pycbc_foreground_censor',
Added segplot to setup.py.
py
diff --git a/pyvisa/compat/__init__.py b/pyvisa/compat/__init__.py index <HASH>..<HASH> 100644 --- a/pyvisa/compat/__init__.py +++ b/pyvisa/compat/__init__.py @@ -51,3 +51,13 @@ try: except ImportError: from .check_output import check_output + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, str('temporary_class'), (), {})
Added with_metaclass compatibility helper
py
diff --git a/daemonocle/helpers.py b/daemonocle/helpers.py index <HASH>..<HASH> 100644 --- a/daemonocle/helpers.py +++ b/daemonocle/helpers.py @@ -18,6 +18,6 @@ class ExecWorker(Callable): if b'/' in self.prog: self.prog = posixpath.realpath(self.prog) - def __call__(self): + def __call__(self): # pragma: no cover exec_prog = os.execv if self.prog[0] == b'/' else os.execvp exec_prog(self.prog, (self.prog,) + self.args)
Add "pragma: no cover" to function that can never return
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -24,7 +24,7 @@ class PyTest(TestCommand): setup( name='graphql-relay', - version='0.4.1', + version='0.4.2', description='Relay implementation for Python', long_description=open('README.rst').read(), @@ -55,7 +55,7 @@ setup( install_requires=[ 'graphql-core>=0.5.0', - 'pypromise>=0.4.0' + 'promise>=0.4.0' ], tests_require=['pytest>=2.7.2'], extras_require={
Updated to version <I>. Use promise package
py
diff --git a/cwltool/job.py b/cwltool/job.py index <HASH>..<HASH> 100644 --- a/cwltool/job.py +++ b/cwltool/job.py @@ -19,6 +19,8 @@ class Job(object): runtime = [] if self.container and self.container.get("type") == "docker": + if "uri" in self.container: + subprocess.call("docker", "pull", self.container["uri"]) runtime = ["docker", "run", "-i"] for d in self.pathmapper.dirs: runtime.append("--volume=%s:%s:ro" % (d, self.pathmapper.dirs[d]))
Fix uri in bwa example. Cwltool now runs "docker pull" before running the job.
py
diff --git a/nidmresults/objects/inference.py b/nidmresults/objects/inference.py index <HASH>..<HASH> 100644 --- a/nidmresults/objects/inference.py +++ b/nidmresults/objects/inference.py @@ -14,7 +14,6 @@ import shutil from generic import * import uuid from math import erf, sqrt -from pandas import DataFrame class Inference(NIDMObject): @@ -728,12 +727,3 @@ class Peak(NIDMObject): (PROV['location'], self.coordinate.id)]) return self.p - - def dataframe(self): - """ - Create a dataframe - """ - df = DataFrame(columns=('peak', 'coordinate', 'z', 'p_fwer')) - df.loc[0] = [self.id, self.coordinate.coord_vector, self.equiv_z, - self.p_fwer] - return df
No longer export as a panda DataFrame
py
diff --git a/pwkit/colormaps.py b/pwkit/colormaps.py index <HASH>..<HASH> 100644 --- a/pwkit/colormaps.py +++ b/pwkit/colormaps.py @@ -20,8 +20,10 @@ colors. If the input array has shape S, the returned value has a shape (S + Example: data = np.array ([<things between 0 and 1>]) - mapper = factory_map ('cubehelix_blue') + mapper = factory_map['cubehelix_blue']() rgb = mapper (data) + green_values = rgb[:,1] + last_rgb = rgb[-1] The basic colormap names are:
pwkit/colormaps.py: correct example invocation
py
diff --git a/units.py b/units.py index <HASH>..<HASH> 100644 --- a/units.py +++ b/units.py @@ -80,7 +80,10 @@ def converter(val, current_unit, destination_unit): x = val for dic in unit_dicts: if current_unit in dic.keys() and destination_unit in dic.keys(): - native = eval(dic[current_unit][0]) + try: + native = eval(dic[current_unit][0]) + except ZeroDivisionError: + native = np.inf x = native try: out = eval(dic[destination_unit][1])
Units converter try/except for conversion to native units
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -27,6 +27,7 @@ SETUP = { "charmhelpers.contrib.saltstack", "charmhelpers.contrib.hahelpers", "charmhelpers.contrib.jujugui", + "charmhelpers.contrib.templating", ], 'scripts': [ "bin/chlp",
Include templating package in setup.py.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -92,6 +92,7 @@ setup( ], install_requires=[ "cffi>=1.0.0", + "enum34;python_version<'3.4'", ], cffi_modules=["src/brotlicffi/_build.py:ffi"], packages=find_packages('src'),
Explicitly depend on enum<I> for Python < <I> Add an explicit dependency on enum<I> for Python < <I> (notably <I> that is still seemingly supported). This is necessary for brotlicffi to seamlessly replace brotlipy in deps still claiming to support py<I>. The package's tests do not fail because enum<I> is brought indirectly by the test dependencies.
py
diff --git a/spacy/ja/__init__.py b/spacy/ja/__init__.py index <HASH>..<HASH> 100644 --- a/spacy/ja/__init__.py +++ b/spacy/ja/__init__.py @@ -51,9 +51,10 @@ def resolve_pos(token): # PoS mappings. if token.part_of_speech == '連体詞,*,*,*': - # determiner-likes get DET, otherwise ADJ if re.match('^[こそあど此其彼]の', token.surface): return token.part_of_speech + ',DET' + if re.match('^[こそあど此其彼]', token.surface): + return token.part_of_speech + ',PRON' else: return token.part_of_speech + ',ADJ' return token.part_of_speech
Fix pronoun handling Missed this case earlier. 連体詞 have three classes for UD purposes: - その -> DET - それ -> PRON - 同じ -> ADJ -POLM
py
diff --git a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/views.py b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/views.py index <HASH>..<HASH> 100644 --- a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/views.py +++ b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/views.py @@ -2,7 +2,7 @@ from django.contrib import messages from django.contrib.auth import get_user_model from django.contrib.auth.mixins import LoginRequiredMixin from django.urls import reverse -from django.utils.translation import ugettext_lazy as _ +from django.utils.translation import gettext_lazy as _ from django.views.generic import DetailView, RedirectView, UpdateView User = get_user_model()
Update ugettext_lazy to gettext_lazy in views.py
py
diff --git a/nodeconductor/structure/admin.py b/nodeconductor/structure/admin.py index <HASH>..<HASH> 100644 --- a/nodeconductor/structure/admin.py +++ b/nodeconductor/structure/admin.py @@ -84,9 +84,13 @@ class ProjectGroupAdmin(ProtectedModelMixin, ChangeReadonlyMixin, admin.ModelAdm class ServiceSettingsAdmin(admin.ModelAdmin): list_display = ('name', 'type', 'state') list_filter = ('type', 'state') - exclude = ('state',) + add_exclude = ('state',) actions = ['sync'] + def add_view(self, *args, **kwargs): + self.exclude = getattr(self, 'add_exclude', ()) + return super(ServiceSettingsAdmin, self).add_view(*args, **kwargs) + def get_form(self, request, obj=None, **kwargs): form = super(ServiceSettingsAdmin, self).get_form(request, obj, **kwargs) form.base_fields['shared'].initial = True
Exlcude state setting on creation only
py
diff --git a/gwpy/data/series.py b/gwpy/data/series.py index <HASH>..<HASH> 100644 --- a/gwpy/data/series.py +++ b/gwpy/data/series.py @@ -317,6 +317,14 @@ class TimeSeries(NDData): return new @classmethod + def from_lal(cls, lalts): + """Generate a new TimeSeries from a LAL TimeSeries of any type + """ + return cls(lalts.data.data, epoch=lalts.epoch, name=lalts.name, + sample_rate=1/lalts.deltaT, + unit=lal.swiglal.UnitToString(lalts.sampleUnits)) + + @classmethod def fetch(cls, channel, start, end, host=None, port=None): """Fetch data from NDS into a TimeSeries
gwpy.data.TimeSeries: add from_lal class method - allows easy conversion from LAL XXXXTimeSeries
py
diff --git a/toytree/__init__.py b/toytree/__init__.py index <HASH>..<HASH> 100644 --- a/toytree/__init__.py +++ b/toytree/__init__.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -__version__ = "0.0.2" +__version__ = "0.0.3" __author__ = "Deren Eaton"
"Updating toytree/__init__.py to version - <I>
py
diff --git a/turrentine/admin.py b/turrentine/admin.py index <HASH>..<HASH> 100644 --- a/turrentine/admin.py +++ b/turrentine/admin.py @@ -106,6 +106,7 @@ class PageAdmin(admin_base_class): my_urls = patterns('', (r'^add/preview$', self.admin_site.admin_view(PagePreviewView.as_view())), (r'^(?P<id>\d+)/preview$', self.admin_site.admin_view(PagePreviewView.as_view())), + (r'^(?P<id>\d+)/history/(\d+)/preview$', self.admin_site.admin_view(PagePreviewView.as_view())), ) return my_urls + urls
Ensure that iframe previews work on revision pages as well.
py
diff --git a/ipyrad/assemble/write_outputs.py b/ipyrad/assemble/write_outputs.py index <HASH>..<HASH> 100644 --- a/ipyrad/assemble/write_outputs.py +++ b/ipyrad/assemble/write_outputs.py @@ -3,7 +3,7 @@ # py2/3 compatibility from __future__ import print_function try: - from builtins import range + from builtins import range, bytes from itertools import izip, chain except ImportError: from itertools import chain @@ -2508,7 +2508,8 @@ def subsample(snpsmap): -AMBIGARR = np.array(list(b"RSKYWM")).astype(np.uint8) +AMBIGARR = np.array(list(bytes(b"RSKYWM"))).astype(np.uint8) + STATS_HEADER_1 = """ ## The number of loci caught by each filter. ## ipyrad API location: [assembly].stats_dfs.s7_filters
bytes py2/3 matching
py
diff --git a/psiturk/experiment.py b/psiturk/experiment.py index <HASH>..<HASH> 100644 --- a/psiturk/experiment.py +++ b/psiturk/experiment.py @@ -193,6 +193,7 @@ def check_worker_status(): return jsonify(**resp) @app.route('/ad', methods=['GET']) [email protected]('/pub', methods=['GET']) @nocache def advertisement(): """
adding a `pub` alias for the `ad` route, to get around ad blockers. will need more work later to interface with the psiturk ad server
py
diff --git a/angr/analyses/vfg.py b/angr/analyses/vfg.py index <HASH>..<HASH> 100644 --- a/angr/analyses/vfg.py +++ b/angr/analyses/vfg.py @@ -35,6 +35,9 @@ class VFG(Analysis, CFGBase): # It maps function key to its states self._function_initial_states = defaultdict(dict) + # All final states are put in this list + self.final_states = [ ] + self.construct(function_start=function_start, interfunction_level=interfunction_level) def copy(self): @@ -208,6 +211,11 @@ class VFG(Analysis, CFGBase): else: self._graph.add_edges_from(new_graph.edges(data=True)) + # Determine the last basic block + for n in self._graph.nodes(): + if self._graph.out_degree(n) == 0: + self.final_states.extend(n.successors) + def _create_graph(self, return_target_sources=None): ''' Create a DiGraph out of the existing edge map.
Added support for VFG.final_states.
py
diff --git a/tests/management/commands/test_admin_generator.py b/tests/management/commands/test_admin_generator.py index <HASH>..<HASH> 100644 --- a/tests/management/commands/test_admin_generator.py +++ b/tests/management/commands/test_admin_generator.py @@ -38,13 +38,14 @@ Available apps: self.assertIn('from .models import Group', self.out.getvalue()) def test_should_print_admin_class_with_date_hierarchy(self): - class TestModel(models.Model): + class TestAdminModel(models.Model): created_at = models.DateTimeField(auto_now_add=True) title = models.CharField(max_length=50) class Meta: app_label = 'testapp' - call_command('admin_generator', 'testapp', 'TestModel', stdout=self.out) + call_command('admin_generator', 'testapp', 'TestAdminModel', + stdout=self.out) self.assertIn("date_hierarchy = 'created_at'", self.out.getvalue())
changed TestModel into TestAdminModel name
py
diff --git a/properties/base.py b/properties/base.py index <HASH>..<HASH> 100644 --- a/properties/base.py +++ b/properties/base.py @@ -184,7 +184,9 @@ class HasProperties(with_metaclass(PropertyMetaclass, object)): listener.func(self, change) def _set(self, name, value): - self._notify(dict(name=name, value=value, mode='validate')) + out = self._notify(dict(name=name, value=value, mode='validate')) + if out is not None: + value = out if value is basic.undefined and name in self._backend: self._backend.pop(name) else:
Allow property validation functions to modify the value they validate This occurs after Property validation but before the backend value on the HasProperties class is set.
py
diff --git a/gffutils/helpers.py b/gffutils/helpers.py index <HASH>..<HASH> 100644 --- a/gffutils/helpers.py +++ b/gffutils/helpers.py @@ -61,10 +61,6 @@ def _choose_dialect(features): # NOTE: can use helpers.dialect_compare if you need to make this more # complex.... - # For now, this function favors the first dialect, and then appends the - # order of additional fields seen in the attributes of other lines giving - # priority to dialects that come first in the iterable. - if len(features) == 0: return constants.dialect @@ -99,7 +95,7 @@ def _choose_dialect(features): # Increment the observed value by the number of attributes (so more # complex attribute strings have higher weight in determining # dialect) - count[k][v] = val + weight + count[k][v] = val + length final_dialect = {} for k, v in count.items(): @@ -111,6 +107,18 @@ def _choose_dialect(features): # key. final_dialect[k] = vs[0][0] + # For backwards compatibility, to figure out the field order to use for the + # dialect we append additional fields as they are observed, giving priority + # to attributes that come first in earlier features. The alternative would + # be to give preference to the most-common order of attributes. + final_order = [] + for feature in features: + for o in feature.dialect["order"]: + if o not in final_order: + final_order.append(0) + + final_dialect["order"] = final_order + return final_dialect
maintain backwards compatibility with dialect order detection
py
diff --git a/src/pylexibank/db.py b/src/pylexibank/db.py index <HASH>..<HASH> 100644 --- a/src/pylexibank/db.py +++ b/src/pylexibank/db.py @@ -179,6 +179,7 @@ def schema(ds): c.separator, cname == spec.primary_key, cldf_name=c.header)) + listvalued = {c.name: bool(c.separator) for c in table.tableSchema.columns} for fk in table.tableSchema.foreignKeys: if fk.reference.schemaReference: # We only support Foreign Key references between tables! @@ -187,6 +188,9 @@ def schema(ds): ref_type = ds.get_tabletype(ref) if ref_type: colRefs = sorted(fk.columnReference) + if any(c in listvalued for c in colRefs): + # We drop list-valued foreign keys + continue if spec.name in PROPERTY_URL_TO_COL: # Must map foreign keys colRefs = []
fix problem with listvalued fks (#<I>)
py
diff --git a/stripe/__init__.py b/stripe/__init__.py index <HASH>..<HASH> 100644 --- a/stripe/__init__.py +++ b/stripe/__init__.py @@ -15,9 +15,9 @@ import types # Use cStringIO if it's available. Otherwise, StringIO is fine. try: - import cStringIO as StringIO + import cStringIO as StringIO except ImportError: - import StringIO + import StringIO # - Requests is the preferred HTTP library # - Google App Engine has urlfetch
Fix a small formatting inconsistency (four spaces instead of two).
py
diff --git a/treeherder/model/models.py b/treeherder/model/models.py index <HASH>..<HASH> 100644 --- a/treeherder/model/models.py +++ b/treeherder/model/models.py @@ -558,16 +558,14 @@ class Job(models.Model): # classified this job. return - already_autoclassified = JobNote.objects.filter(failure_classification__name=classification, job=self).exists() - if already_autoclassified and user: - # Send event to NewRelic when a User verifies an autoclassified failure. - matches = (TextLogErrorMatch.objects.filter(text_log_error__step__job=self) - .select_related('matcher')) - for match in matches: - newrelic.agent.record_custom_event('user_verified_classification', { - 'matcher': match.matcher.name, - 'job_id': self.id, - }) + # Send event to NewRelic when a verifing an autoclassified failure. + matches = (TextLogErrorMatch.objects.filter(text_log_error__step__job=self) + .select_related('matcher')) + for match in matches: + newrelic.agent.record_custom_event('user_verified_classification', { + 'matcher': match.matcher.name, + 'job_id': self.id, + }) JobNote.create_autoclassify_job_note(job=self, user=user)
record event whenever a user verifies regardless of autoclassification
py
diff --git a/tensorflow_datasets/text/lm1b.py b/tensorflow_datasets/text/lm1b.py index <HASH>..<HASH> 100644 --- a/tensorflow_datasets/text/lm1b.py +++ b/tensorflow_datasets/text/lm1b.py @@ -108,7 +108,7 @@ class Lm1b(tfds.core.GeneratorBasedBuilder): ), Lm1bConfig( name="subwords8k", - version="0.0.1", + version="0.0.2", description=("Uses `tfds.features.text.SubwordTextEncoder` with 8k " "vocab size"), text_encoder_config=tfds.features.text.TextEncoderConfig( @@ -117,7 +117,7 @@ class Lm1b(tfds.core.GeneratorBasedBuilder): ), Lm1bConfig( name="subwords32k", - version="0.0.1", + version="0.0.2", description=("Uses `tfds.features.text.SubwordTextEncoder` with " "32k vocab size"), text_encoder_config=tfds.features.text.TextEncoderConfig(
Increment LM1B version because subwords changed PiperOrigin-RevId: <I>
py
diff --git a/riak/tests/test_all.py b/riak/tests/test_all.py index <HASH>..<HASH> 100644 --- a/riak/tests/test_all.py +++ b/riak/tests/test_all.py @@ -540,6 +540,7 @@ class BaseTestCase(object): # Immediate test to see if 2i is even supported w/ the backend try: self.client.index('foo','bar_bin','baz').run() + return True except Exception as e: if "indexes_not_supported" in str(e): return False
Handling extra exit condition - success.
py
diff --git a/adnpy/api.py b/adnpy/api.py index <HASH>..<HASH> 100644 --- a/adnpy/api.py +++ b/adnpy/api.py @@ -29,13 +29,14 @@ class API(requests.Session): """ @classmethod - def build_api(cls, api_root='https://alpha-api.app.net/stream/0', access_token=None, verify_ssl=False): + def build_api(cls, api_root='https://alpha-api.app.net/stream/0', access_token=None, verify_ssl=False, headers=None): api = cls() api.api_root = api_root if access_token: api.add_authorization_token(access_token) api.verify_ssl = verify_ssl + api.headers = headers if headers else {} return api @@ -44,6 +45,11 @@ class API(requests.Session): url = self.api_root + url kwargs['verify'] = self.verify_ssl + headers = {} + headers.update(self.headers) + headers.update(kwargs.get('headers', {})) + + kwargs['headers'] = headers response = super(API, self).request(method, url, *args, **kwargs)
add in the ability to set host headers
py
diff --git a/www/tests/test_sys.py b/www/tests/test_sys.py index <HASH>..<HASH> 100644 --- a/www/tests/test_sys.py +++ b/www/tests/test_sys.py @@ -90,7 +90,7 @@ if traces != expected: print('same line', i, 'traces', line1, 'expected', line2) else: print('diff line', i, 'traces', line1, 'expected', line2) - break + raise AssertionError('result is not the same as expected') else: print('remaining in traces\n', traces[i:], '\nremaining in expected', expected[i:])
In test_sys.py, raise an exception if trace test does not give the expected result
py
diff --git a/tryp/__init__.py b/tryp/__init__.py index <HASH>..<HASH> 100644 --- a/tryp/__init__.py +++ b/tryp/__init__.py @@ -12,7 +12,7 @@ from tryp.anon import __ from tryp.env_vars import env from tryp.task import Try -development = False +development = 'TRYP_DEVELOPMENT' in env integration_test = 'TRYP_INTEGRATION' in env
set development flag if TRYP_DEVELOP is set
py
diff --git a/funcserver/funcserver.py b/funcserver/funcserver.py index <HASH>..<HASH> 100644 --- a/funcserver/funcserver.py +++ b/funcserver/funcserver.py @@ -361,6 +361,7 @@ class RPCHandler(BaseHandler): fn=fn_name, args=args, kwargs=kwargs, ) + r.update(self.server.define_common_tags()) return r def _handle_call(self, request, fn, m, protocol): @@ -640,8 +641,24 @@ class Server(BaseScript): ''' return None + def define_common_tags(self): + ''' + Define common key value pairs as a dictionary that will be + part of every response to the client + ''' + return {} + + def _validate_common_tags(self): + tags = self.define_common_tags() + for tag in ('result', 'success'): + if tag not in tags: continue + + self.log.warning("bad common tag", name=tag) + def run(self): """ prepares the api and starts the tornado funcserver """ + self._validate_common_tags() + self.log_id = 0 # all active websockets and their state
references #<I>, response contains common tags
py
diff --git a/Lib/glyphs2ufo/builder.py b/Lib/glyphs2ufo/builder.py index <HASH>..<HASH> 100644 --- a/Lib/glyphs2ufo/builder.py +++ b/Lib/glyphs2ufo/builder.py @@ -695,13 +695,7 @@ def add_features_to_ufo(ufo, feature_prefixes, classes, features): if disabled: lines.append('# disabled') lines.extend('#' + line for line in code.splitlines()) - # empty features cause makeotf to fail, but empty instructions are fine - # so insert an empty instruction into any empty feature definitions - lines.append(';') else: - # see previous comment - if not code: - code = ';' lines.append(code) lines.append('} %s;' % name) feature_defs.append('\n'.join(lines))
[builder] Allow empty feature definitions makeotf didn't allow these, but feaLib does.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,8 @@ setup( extras_require={ 'docs': [ 'sphinx~=4.2.0', - 'sphinx-rtd-theme~=1.0.0' + 'sphinx-rtd-theme~=1.0.0', + 'sphinx-autodoc-typehints~=1.12.0' ], 'test': [ 'pytest~=7.0',
Add to package file missing dependency for auto-generating docs.
py
diff --git a/tests/instrumentation/django_tests/template_tests.py b/tests/instrumentation/django_tests/template_tests.py index <HASH>..<HASH> 100644 --- a/tests/instrumentation/django_tests/template_tests.py +++ b/tests/instrumentation/django_tests/template_tests.py @@ -100,7 +100,7 @@ class TracesTest(TestCase): kinds_dict = dict([(t['kind'], t) for t in traces]) traces = [kinds_dict[k] for k in kinds] - self.assertEqual(traces[0]['kind'], 'transaction.django') + self.assertEqual(traces[0]['kind'], 'transaction') self.assertEqual(traces[0]['signature'], 'transaction') self.assertEqual(traces[0]['transaction'], 'tests.contrib.django.testapp.views.render_jinja2_template')
Transaction traces should have 'transaction' kind.
py
diff --git a/distutils_ui/build_ui.py b/distutils_ui/build_ui.py index <HASH>..<HASH> 100644 --- a/distutils_ui/build_ui.py +++ b/distutils_ui/build_ui.py @@ -78,7 +78,7 @@ class build_tool(Command): self.chdir = self.parse_arg('chdir', self.chdir or '') # subprocess environment: run tools in posix locale self.env = dict(os.environ) - self.env['LANG'] = b'C' + self.env['LANG'] = 'C' def run(self): self.debug('%s.run', self.__class__.__name__)
Fix build_ui on Windows. The environment passed to subprocesses on Windows may only contain str, not bytes.
py
diff --git a/d1_client_cli/src/d1_client_cli/dataone.py b/d1_client_cli/src/d1_client_cli/dataone.py index <HASH>..<HASH> 100755 --- a/d1_client_cli/src/d1_client_cli/dataone.py +++ b/d1_client_cli/src/d1_client_cli/dataone.py @@ -976,7 +976,10 @@ class CLI(cmd.Cmd): def do_allow(self, line): '''allow <subject> [access level] - Allow access to subject + Allow access to subject. + + Access level is one of: + 'read', 'write', 'changePermission', 'execute', 'replicate' ''' try: subject, permission = self._split_args(line, 1, 1) @@ -1258,7 +1261,7 @@ class CLI(cmd.Cmd): print_error('Unexpected error') def do_log(self, line): - '''log + '''log [path] Retrieve event log ''' try:
Update help text (allow, log).
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ setup( 'cryptography>=1.2.3', 'jsontokens>=0.0.2', 'keylib>=0.0.5', - 'blockstack-zones>=0.14.0', + 'blockstack-zones>=0.14.1', 'warlock>=1.3.0' ], classifiers=[
require <I> of blockstack zones
py
diff --git a/aerofiles/seeyou/writer.py b/aerofiles/seeyou/writer.py index <HASH>..<HASH> 100644 --- a/aerofiles/seeyou/writer.py +++ b/aerofiles/seeyou/writer.py @@ -7,7 +7,7 @@ class Writer: """ A writer for SeeYou CUP files. Supports waypoints and tasks:: - with open('competition.cup', 'w') as fp: + with open('competition.cup', 'wb') as fp: writer = Writer(fp) """
Update docs Needs wb for Python 3
py
diff --git a/plexapi/settings.py b/plexapi/settings.py index <HASH>..<HASH> 100644 --- a/plexapi/settings.py +++ b/plexapi/settings.py @@ -22,7 +22,7 @@ class Settings(PlexObject): def __getattr__(self, attr): if attr.startswith('_'): return self.__dict__[attr] - return self.get(attr) + return self.get(attr).value def __setattr__(self, attr, value): if not attr.startswith('_'):
plex.settings.__getattr__ should return the value (not the object)
py
diff --git a/test/acid.py b/test/acid.py index <HASH>..<HASH> 100755 --- a/test/acid.py +++ b/test/acid.py @@ -50,7 +50,13 @@ def _detect_encoding(filename): # Python 3 with open(filename, 'rb') as input_file: import tokenize - return tokenize.detect_encoding(input_file.readline)[0] + encoding = tokenize.detect_encoding(input_file.readline)[0] + try: + with open(filename, encoding=encoding) as input_file: + input_file.read() + return encoding + except UnicodeDecodeError: + return 'latin-1' except AttributeError: return 'utf-8'
Add fall back in _detect_encoding()
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -30,6 +30,8 @@ setup( 'Framework :: Django', 'Framework :: Django :: 1.8', 'Framework :: Django :: 1.9', + 'Framework :: Django :: 1.10', + 'Framework :: Django :: 1.11', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent',
Adjusted supported Django versions classifiers.
py
diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index <HASH>..<HASH> 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -882,11 +882,16 @@ class ShellCaseCommonTestsMixIn(object): stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, - cwd=os.path.abspath(os.path.dirname(salt.__file__)) + cwd=CODE_DIR ) - out, _ = process.communicate() + out, err = process.communicate() if not out: - self.skipTest('Failed to get the output of \'git describe\'') + self.skipTest( + 'Failed to get the output of \'git describe\'. ' + 'Error: {0!r}'.format( + err + ) + ) parsed_version = '{0}'.format(out.strip().lstrip('v')) parsed_version_info = tuple([
Show why git describe failed on the tests.
py
diff --git a/pyuploadcare/api_resources.py b/pyuploadcare/api_resources.py index <HASH>..<HASH> 100644 --- a/pyuploadcare/api_resources.py +++ b/pyuploadcare/api_resources.py @@ -502,7 +502,7 @@ class FileGroup(object): for index, file_ in enumerate(files): if isinstance(file_, File): file_index = 'files[{index}]'.format(index=index) - data[file_index] = file_.uuid + data[file_index] = unicode(file_) else: raise InvalidRequestError( 'all items have to be ``File`` instance'
Replace file uuid by file cdn url in FileGroup.create
py
diff --git a/spyder/plugins/editor/lsp/client.py b/spyder/plugins/editor/lsp/client.py index <HASH>..<HASH> 100644 --- a/spyder/plugins/editor/lsp/client.py +++ b/spyder/plugins/editor/lsp/client.py @@ -124,7 +124,8 @@ class LSPClient(QObject, LSPMethodProviderMixIn): ' '.join(self.server_args))) creation_flags = 0 if WINDOWS: - creation_flags = subprocess.CREATE_NEW_PROCESS_GROUP + creation_flags = (subprocess.CREATE_NEW_PROCESS_GROUP + | 0x08000000) # CREATE_NO_WINDOW self.lsp_server = subprocess.Popen( self.server_args, stdout=self.lsp_server_log,
LSP: Append flag instead of replacing
py
diff --git a/cherrypy/wsgiserver/__init__.py b/cherrypy/wsgiserver/__init__.py index <HASH>..<HASH> 100644 --- a/cherrypy/wsgiserver/__init__.py +++ b/cherrypy/wsgiserver/__init__.py @@ -8,7 +8,6 @@ __all__ = ['HTTPRequest', 'HTTPConnection', 'HTTPServer', import sys if sys.version_info < (3, 0): - from wsgiserver2 import * + from .wsgiserver2 import * else: - # Le sigh. Boo for backward-incompatible syntax. - exec('from .wsgiserver3 import *') + from .wsgiserver3 import *
Use uniform syntax for wsgiserver imports
py
diff --git a/tinymce/widgets.py b/tinymce/widgets.py index <HASH>..<HASH> 100644 --- a/tinymce/widgets.py +++ b/tinymce/widgets.py @@ -81,7 +81,7 @@ class TinyMCE(forms.Textarea): if tinymce.settings.USE_COMPRESSOR: js = [reverse('tinymce-compressor')] else: - js = tinymce.settings.JS_URL + js = [tinymce.settings.JS_URL] if tinymce.settings.USE_FILEBROWSER: js.append(reverse('tinymce-filebrowser')) return forms.Media(js=js)
Fixed widget media property that was not always a list.
py
diff --git a/usb/__init__.py b/usb/__init__.py index <HASH>..<HASH> 100644 --- a/usb/__init__.py +++ b/usb/__init__.py @@ -48,7 +48,7 @@ version_info = (1, 0, 0, 'b2') __version__ = '%d.%d.%d%s' % version_info -__all__ = ['legacy', 'core', 'backend', 'util', 'libloader'] +__all__ = ['legacy', 'control', 'core', 'backend', 'util', 'libloader'] def _setup_log():
Add 'control' module to the list exported by usb package. 'control' was not in the __all__ field for the 'usb' package. This means that from usb import * control.XXX would fail to find the module 'control'.
py
diff --git a/tests/test_carddb.py b/tests/test_carddb.py index <HASH>..<HASH> 100644 --- a/tests/test_carddb.py +++ b/tests/test_carddb.py @@ -36,3 +36,13 @@ def test_play_scripts(): assert card.type == CardType.HERO_POWER elif card.scripts.play: assert card.type not in (CardType.HERO, CardType.HERO_POWER, CardType.ENCHANTMENT) + + +def test_card_docstrings(): + for card in CARDS.values(): + c = utils.fireplace.utils.get_script_definition(card.id) + name = c.__doc__ + if name is not None: + if name.endswith(")"): + continue + assert name == card.name
Add a test for using card docstrings as names
py
diff --git a/dwave/cloud/cli.py b/dwave/cloud/cli.py index <HASH>..<HASH> 100644 --- a/dwave/cloud/cli.py +++ b/dwave/cloud/cli.py @@ -72,7 +72,7 @@ def cli(): @cli.command() @click.option('--config-file', default=None, help='Config file path', - type=click.Path(exists=True, dir_okay=False)) + type=click.Path(exists=False, dir_okay=False)) @click.option('--profile', default=None, help='Connection profile name (config section name)') @click.option('--list-config-files', is_flag=True, callback=list_config_files,
CLI: configure accepts non-existing file as arg
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,7 @@ setup(name='gbdxtools', install_requires=['requests==2.9.1', 'boto==2.39.0', 'gbdx-auth==0.1.2', - 'Pillow>=3', + 'Pillow==3.1.1', 'pygeoif==0.6', 'sympy==1.0', 'ndg-httpsclient==0.4.0'],
reverted Pillow requirement to <I> until we decide what to do with it
py
diff --git a/commands/reverse.py b/commands/reverse.py index <HASH>..<HASH> 100755 --- a/commands/reverse.py +++ b/commands/reverse.py @@ -15,7 +15,17 @@ # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. +args = ['logs', 'target'] + def cmd(send, msg, args): + log = args['log'][args['target']][:-1] if not msg: + send(log[-1][::-1]) return + if "\\" in msg: + user = msg[1:] + for line in reversed(log[-50:]): + if re.search(r"<@?\+?" + user + ">", line): + send(re.search(r"<.*> (.*)", msg).groups(1)[::-1]) + return send(msg[::-1]) \ No newline at end of file
allow reversing other people's messages
py
diff --git a/phoebe/parameters/datasets.py b/phoebe/parameters/datasets.py index <HASH>..<HASH> 100644 --- a/phoebe/parameters/datasets.py +++ b/phoebe/parameters/datasets.py @@ -525,7 +525,7 @@ def parse_rv(filenames,columns=None,components=None,full_output=False,**kwargs): Extra keyword arguments are passed to output RVDataSets or pbdeps, wherever they exist and override the contents of the comment lines in the - phot file. For example, compare these two files: + phot file. For example, compare these two files:: # passband = JOHNSON.V # atm = kurucz
added missing packages to setup.py thanks to input from our first active user Michel Hillen (thanks!)
py
diff --git a/opengem/hazard/opensha.py b/opengem/hazard/opensha.py index <HASH>..<HASH> 100644 --- a/opengem/hazard/opensha.py +++ b/opengem/hazard/opensha.py @@ -94,15 +94,14 @@ class MonteCarloMixin: # pylint: disable=W0232 task.wait() if task.status != 'SUCCESS': raise Exception(task.result) + # if self.params['OUTPUT_GMF_FILES'] for j in range(0, realizations): gmf_id = "%s!%s" % (i, j) gmf_key = "%s!GMF!%s" % (self.key, gmf_id) - print "LOADING from %s" % gmf_key - if kvs.get_client(binary=False).get(gmf_key): - gmf = kvs.get_value_json_decoded(gmf_key) - if gmf: - self.write_gmf_file(gmf) + gmf = kvs.get_value_json_decoded(gmf_key) + if gmf: + self.write_gmf_file(gmf) def write_gmf_file(self, gmfs): """Generate a GeoTiff file for each GMF"""
Removed sanity check on key existence (redundant).
py
diff --git a/custodian/vasp/validators.py b/custodian/vasp/validators.py index <HASH>..<HASH> 100644 --- a/custodian/vasp/validators.py +++ b/custodian/vasp/validators.py @@ -81,9 +81,4 @@ def check_broken_chgcar(chgcar): # a decent bunch of the values are negative return True - diff = chgcar_data[:-1, :-1, :-1] - chgcar_data[1:, 1:, 1:] - if diff.max()/(chgcar_data.max() - chgcar_data.min()) > 0.95: - # Some single diagonal finite difference is more than 95% of the entire range - return True - return False
The second check incorrectly flags the centers of some heavy atoms
py
diff --git a/gwpy/plotter/html.py b/gwpy/plotter/html.py index <HASH>..<HASH> 100644 --- a/gwpy/plotter/html.py +++ b/gwpy/plotter/html.py @@ -219,7 +219,7 @@ def _map(data, axes, filename, href='#', mapname='points', popup=None, transform = axes.transData # get 2-d pixels - pixels = transform.transform(data[:, :2]).astype(int) + pixels = numpy.round(transform.transform(data[:, :2])).astype(int) # get figure size dpi = fig.dpi @@ -257,5 +257,4 @@ def _map(data, axes, filename, href='#', mapname='points', popup=None, if standalone: return (HTML_HEADER.format(title=title, jquery=jquery) + hmap + HTML_FOOTER) - else: - return hmap + return hmap
plotter.html: two trivial improvements
py
diff --git a/mordred/config.py b/mordred/config.py index <HASH>..<HASH> 100644 --- a/mordred/config.py +++ b/mordred/config.py @@ -384,7 +384,7 @@ class Config(): def get_global_data_sources(cls): """ Data sources than are collected and enriched globally """ - return ['bugzilla', 'bugzillarest', 'gerrit'] + return ['bugzilla', 'bugzillarest', 'confluence', 'gerrit', 'jenkins', 'jira'] def get_data_sources(self): data_sources = []
[config] Add additional data sources that are loaded globally: confluece, jenkins, jira
py
diff --git a/renku/cli/_exc.py b/renku/cli/_exc.py index <HASH>..<HASH> 100644 --- a/renku/cli/_exc.py +++ b/renku/cli/_exc.py @@ -40,6 +40,9 @@ class IssueFromTraceback(click.Group): try: return super().main(*args, **kwargs) except Exception: + if not (sys.stdin.isatty() and sys.stdout.isatty()): + raise + value = click.prompt( click.style( 'Ahhhhhhhh! You have found a bug. 🐞\n\n',
cli: do not prompt with redirected streams
py
diff --git a/kafka/conn.py b/kafka/conn.py index <HASH>..<HASH> 100644 --- a/kafka/conn.py +++ b/kafka/conn.py @@ -106,14 +106,15 @@ class KafkaConnection(local): def send(self, request_id, payload): "Send a request to Kafka" + log.debug("About to send %d bytes to Kafka, request %d" % (len(payload), request_id)) + + # Make sure we have a connection + if self._dirty or not self._sock: + self.reinit() + try: - if self._dirty or not self._sock: - self.reinit() - log.debug("Sending payload %s" % (payload,)) - sent = self._sock.sendall(payload) - if sent is not None: - self._raise_connection_error() + self._sock.sendall(payload) except socket.error: log.exception('Unable to send payload to Kafka') self._raise_connection_error()
socket.sendall should always raise an exception on error; remove extra return val check in KafkaConnection.send()
py
diff --git a/cherrypy/__init__.py b/cherrypy/__init__.py index <HASH>..<HASH> 100644 --- a/cherrypy/__init__.py +++ b/cherrypy/__init__.py @@ -97,11 +97,6 @@ from cherrypy import _cplogging class _GlobalLogManager(_cplogging.LogManager): - def __init__(self, appid=None): - _cplogging.LogManager.__init__(self, appid) - # Set a default screen handler on the global log. - self.screen = True - def __call__(self, *args, **kwargs): try: log = request.app.log @@ -117,7 +112,9 @@ class _GlobalLogManager(_cplogging.LogManager): log = _GlobalLogManager() -log.error_file = _os.path.join(_os.getcwd(), _localdir, "error.log") +# Set a default screen handler on the global log. +log.screen = True +log.error_file = '' # Using an access file makes CP about 10% slower. Leave off by default. log.access_file = ''
Rather than default to an error log file, we'll just default to screen. This circumvents any permission problems in the cherrypy install directory.
py
diff --git a/pyls/workspace.py b/pyls/workspace.py index <HASH>..<HASH> 100644 --- a/pyls/workspace.py +++ b/pyls/workspace.py @@ -103,10 +103,10 @@ class Workspace(object): def _create_document(self, doc_uri, source=None, version=None): path = uris.to_fs_path(doc_uri) return Document( - doc_uri, source=source, version=version, + doc_uri, self, source=source, version=version, extra_sys_path=self.source_roots(path), rope_project_builder=self._rope_project_builder, - config=self._config, workspace=self, + config=self._config, )
Make workspace a required arg when calling Document in Workspace class (#<I>)
py
diff --git a/chart/models.py b/chart/models.py index <HASH>..<HASH> 100644 --- a/chart/models.py +++ b/chart/models.py @@ -11,6 +11,9 @@ class Chart(ModelBase): def get_absolute_url(self): return reverse('chart_object_detail', kwargs={'slug': self.slug}) + + def __unicode__(self): + return self.title class ChartEntry(ModelBase): chart = models.ForeignKey(
modified Chart model to include unicode method
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -8,8 +8,8 @@ setup( author="Jeffrey Gelens", author_email="[email protected]", license="BSD", - url="https://bitbucket.org/Jeffrey/gevent-socketio", - download_url="https://bitbucket.org/Jeffrey/gevent-socketio", + url="https://github.com/abourget/gevent-socketio", + download_url="https://github.com/abourget/gevent-socketio", install_requires=("gevent-websocket", "anyjson"), setup_requires = ("versiontools >= 1.7",), packages=find_packages(exclude=["examples","tests"]),
point to the abourget repo instead of bitbucket
py
diff --git a/internetarchive/utils.py b/internetarchive/utils.py index <HASH>..<HASH> 100644 --- a/internetarchive/utils.py +++ b/internetarchive/utils.py @@ -235,21 +235,21 @@ def recursive_file_count(files, item=None, checksum=False): is_dir = False if is_dir: for x, _ in iter_directory(f): - lmd5 = get_md5(open(x, 'rb')) + if checksum is True: + lmd5 = get_md5(open(x, 'rb')) + if lmd5 in md5s: + continue + total_files += 1 + else: + if checksum is True: + try: + lmd5 = get_md5(open(f, 'rb')) + except TypeError: + # Support file-like objects. + lmd5 = get_md5(f) if lmd5 in md5s: continue - else: - total_files += 1 - else: - try: - lmd5 = get_md5(open(f, 'rb')) - except TypeError: - # Support file-like objects. - lmd5 = get_md5(f) - if lmd5 in md5s: - continue - else: - total_files += 1 + total_files += 1 return total_files
Fixed bug where md5 was being calculated for every upload.
py
diff --git a/fut/core.py b/fut/core.py index <HASH>..<HASH> 100644 --- a/fut/core.py +++ b/fut/core.py @@ -604,11 +604,12 @@ class Core(object): :params resource_id: Resource id. """ # TODO: add referer to headers (futweb) - return self.players[baseId(resource_id)] - ''' - url = '{0}{1}.json'.format(self.urls['card_info'], baseId(resource_id)) - return requests.get(url, timeout=self.timeout).json() - ''' + base_id = baseId(resource_id) + if base_id in self.players: + return self.players[base_id] + else: # not a player? + url = '{0}{1}.json'.format(self.urls['card_info'], base_id) + return requests.get(url, timeout=self.timeout).json() def searchDefinition(self, asset_id, start=0, count=35): """Return variations of the given asset id, e.g. IF cards.
core: fix cardInfo for not players
py
diff --git a/basil/TL/SiTcp.py b/basil/TL/SiTcp.py index <HASH>..<HASH> 100644 --- a/basil/TL/SiTcp.py +++ b/basil/TL/SiTcp.py @@ -10,7 +10,9 @@ import socket import select import struct from array import array -from threading import Thread, Lock +from threading import Thread +from threading import RLock as Lock + from basil.TL.SiTransferLayer import SiTransferLayer @@ -139,8 +141,8 @@ class SiTcp(SiTransferLayer): return size def _get_tcp_data(self, size): - ret_size = min((size, self._get_tcp_data_size())) self._tcp_lock.acquire() + ret_size = min((size, self._get_tcp_data_size())) ret = self._tcp_read_buff[:ret_size] self._tcp_read_buff = self._tcp_read_buff[ret_size:] self._tcp_lock.release()
ENH: use RLock to prevent from filling up buffer in between reads
py
diff --git a/tests/test_api.py b/tests/test_api.py index <HASH>..<HASH> 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -139,6 +139,16 @@ def test_parse_curl_with_another_binary_data(): )""") +def test_parse_curl_with_insecure_flag(): + uncurl.parse("""curl 'https://pypi.python.org/pypi/uncurl' --insecure""").should.equal( + """requests.get("https://pypi.python.org/pypi/uncurl", + headers={}, + cookies={}, + verify=False +)""" + ) + + if __name__ == '__main__': test_basic_get() test_colon_header() @@ -150,5 +160,6 @@ if __name__ == '__main__': test_post_with_string_data() test_parse_curl_with_binary_data() test_parse_curl_with_another_binary_data() + test_parse_curl_with_insecure_flag()
Added unit test for fix #<I>
py
diff --git a/mysql/toolkit/components/execute.py b/mysql/toolkit/components/execute.py index <HASH>..<HASH> 100644 --- a/mysql/toolkit/components/execute.py +++ b/mysql/toolkit/components/execute.py @@ -40,7 +40,7 @@ class SQLScript: self.split_func, self.split_char = split_func, split_char # Dump failed SQL commands boolean - self.dump_fails = dump_fails + self._dump_fails = dump_fails @property def commands(self): @@ -62,7 +62,7 @@ class SQLScript: setattr(self, 'fetched_commands', cleaned_commands) return cleaned_commands - def execute_commands(self, commands=None, skip_drops=True): + def execute(self, commands=None, skip_drops=True): """ Sequentially execute a list of SQL commands. @@ -95,11 +95,11 @@ class SQLScript: print('\t' + str(success), 'successful commands') # Dump failed commands to text files - if len(fail) > 1 and self.dump_fails: - self.dump_failed_commands(fail) + if len(fail) > 1 and self._dump_fails: + self.dump_fails(fail) return fail, success - def dump_failed_commands(self, fails): + def dump_fails(self, fails): """Dump failed commands to .sql files in the fails directory.""" dump_commands(fails, self.sql_script)
Refactored execute_commands method to execute and dump_failed_commands to dump_fails
py
diff --git a/tests/integration/test_screenshots.py b/tests/integration/test_screenshots.py index <HASH>..<HASH> 100644 --- a/tests/integration/test_screenshots.py +++ b/tests/integration/test_screenshots.py @@ -1,6 +1,7 @@ import os import pytest +import shutil from selenium import webdriver from selenium.common.exceptions import TimeoutException from webdriver_manager.chrome import ChromeDriverManager @@ -26,6 +27,7 @@ def setup_module(m): def teardown_module(m): driver().quit() + shutil.rmtree(config.reports_folder) def get_default_screenshot_folder():
test_screenshots after test delete report folder
py
diff --git a/src/tonicdnscli/tests/test_converter.py b/src/tonicdnscli/tests/test_converter.py index <HASH>..<HASH> 100644 --- a/src/tonicdnscli/tests/test_converter.py +++ b/src/tonicdnscli/tests/test_converter.py @@ -91,12 +91,13 @@ mx.example.org A 10.10.11.10 3600\n""", self.assertListEqual(self.list4, o2.records) def test_genData(self): - o = JSONConvert('example.org') - o.genData(True) - self.assertDictEqual({'records': []}, o.dict_records) - o.genData(False) - self.assertDictEqual({'records': [], 'name': 'example.org'}, - o.dict_records) + o1 = JSONConvert('example.org') + o1.genData(True) + self.assertListEqual([{'records': []}], o1.dict_records) + o2 = JSONConvert('example.org') + o2.genData(False) + self.assertListEqual([{'records': [], 'name': 'example.org'}], + o2.dict_records) def test_separateInputFile(self): import os.path
Update test for fix bug of separate processing
py
diff --git a/openquake/db/models.py b/openquake/db/models.py index <HASH>..<HASH> 100644 --- a/openquake/db/models.py +++ b/openquake/db/models.py @@ -877,7 +877,7 @@ class RiskCalculation(djm.Model): # General parameters: ##################### - # A description for this config proifile which is meaningful to a user. + # A description for this config profile which is meaningful to a user. description = djm.TextField(default='', blank=True) # The timeout is stored in seconds and is 1 hour by default.
db/models: Fixed a typo in a comment.
py
diff --git a/tests/report_tests/plugin_tests/networking.py b/tests/report_tests/plugin_tests/networking.py index <HASH>..<HASH> 100644 --- a/tests/report_tests/plugin_tests/networking.py +++ b/tests/report_tests/plugin_tests/networking.py @@ -6,6 +6,7 @@ # # See the LICENSE file in the source distribution for further information. +import os from sos_tests import StageOneReportTest @@ -29,3 +30,9 @@ class NetworkingPluginTest(StageOneReportTest): def test_forbidden_globs_skipped(self): self.assertFileGlobNotInArchive('/proc/net/rpc/*/channel') self.assertFileGlobNotInArchive('/proc/net/rpc/*/flush') + + def test_netdevs_properly_iterated(self): + for dev in os.listdir('/sys/class/net'): + self.assertFileGlobInArchive( + "sos_commands/networking/ethtool_*_%s" % dev + )
[tests] Update networking test in suite Adds a new test to the networking plugin test to ensure we iterate correctly over network devices.
py
diff --git a/beprof/curve.py b/beprof/curve.py index <HASH>..<HASH> 100644 --- a/beprof/curve.py +++ b/beprof/curve.py @@ -45,7 +45,7 @@ class Curve(np.ndarray): def __new__(cls, input_array, **meta): # print("Here I am in Curve.__new__, cls:", cls) obj = np.asarray(input_array).view(cls) - # print("MOVING ON") + # print("MOVING ON Curve.__new__") if meta is None: obj.metadata = {} else:
Cosmetical changes for testing purpose Nothing importand has changed in this file
py
diff --git a/holoviews/core/io.py b/holoviews/core/io.py index <HASH>..<HASH> 100644 --- a/holoviews/core/io.py +++ b/holoviews/core/io.py @@ -565,8 +565,8 @@ class FileArchive(Archive): def _single_file_archive(self, export_name, files, root): ((_, ext), entry) = files[0] (data, info) = entry - unique_name = self._unique_name(export_name, ext, root) - filename = self._truncate_name(self._normalize_name(*unique_name)) + (unique_name, ext) = self._unique_name(export_name, ext, root) + filename = self._truncate_name(self._normalize_name(unique_name), ext=ext) fpath = os.path.join(root, filename) with open(fpath, 'w') as f: f.write(Exporter.encode(entry)) @@ -598,8 +598,6 @@ class FileArchive(Archive): skip = False if force else (not self.unique_name) if skip: return (basename, ext) ext = '' if ext is None else ext - - ext = '' if ext is None else ext if isinstance(existing, str): split = [os.path.splitext(el) for el in os.listdir(os.path.abspath(existing))]
Fixed bug in FileArchive._single_file_archive method
py
diff --git a/tools/run_tests/run_xds_tests.py b/tools/run_tests/run_xds_tests.py index <HASH>..<HASH> 100755 --- a/tools/run_tests/run_xds_tests.py +++ b/tools/run_tests/run_xds_tests.py @@ -21,7 +21,6 @@ import json import logging import os import random -import shlex import socket import subprocess import sys @@ -1800,9 +1799,8 @@ try: rpcs_to_send=rpcs_to_send, metadata_to_send=metadata_to_send) logger.debug('running client: %s', client_cmd_formatted) - client_cmd = shlex.split(client_cmd_formatted) try: - client_process = subprocess.Popen(client_cmd, + client_process = subprocess.Popen(['/bin/bash', '-i', '-c', client_cmd_formatted], env=client_env, stderr=subprocess.STDOUT, stdout=test_log_file)
In run_xds_tests.py, run clients in bash
py
diff --git a/ryu/services/protocols/bgp/base.py b/ryu/services/protocols/bgp/base.py index <HASH>..<HASH> 100644 --- a/ryu/services/protocols/bgp/base.py +++ b/ryu/services/protocols/bgp/base.py @@ -257,21 +257,24 @@ class Activity(object): """ hub.sleep(seconds) - def _stop_child_activities(self): + def _stop_child_activities(self, name=None): """Stop all child activities spawn by this activity. """ # Makes a list copy of items() to avoid dictionary size changed # during iteration for child_name, child in list(self._child_activity_map.items()): + if name is not None and name != child_name: + continue LOG.debug('%s: Stopping child activity %s ', self.name, child_name) if child.started: child.stop() + self._child_activity_map.pop(child_name, None) def _stop_child_threads(self, name=None): """Stops all threads spawn by this activity. """ for thread_name, thread in list(self._child_thread_map.items()): - if not name or thread_name is name: + if name is not None and thread_name is name: LOG.debug('%s: Stopping child thread %s', self.name, thread_name) thread.kill()
BGPSpeaker/base: Stop child activity by name This patch enables Activity base to stop the child activity by name.
py
diff --git a/gandi/cli/modules/iaas.py b/gandi/cli/modules/iaas.py index <HASH>..<HASH> 100644 --- a/gandi/cli/modules/iaas.py +++ b/gandi/cli/modules/iaas.py @@ -276,11 +276,9 @@ class Iaas(GandiModule, SshkeyHelper): @classmethod def from_hostname(cls, hostname): """Retrieve virtual machine id associated to a hostname.""" - result = cls.list({'hostname': hostname}) + result = cls.list({'hostname': str(hostname)}) if result: return result[0]['id'] - # No "else" clause - if no VM was found matching this hostname, we - # just fall through the default (which effectively returns None). @classmethod def usable_id(cls, id):
Fixes vm post creation bug Introduced with pull request c<I>d<I>d9fc<I>a8bf<I>a<I>b<I>f1d7fe9b We must cast api parameter to string otherwise it fails and exit process.
py
diff --git a/mimesis/providers/internet.py b/mimesis/providers/internet.py index <HASH>..<HASH> 100644 --- a/mimesis/providers/internet.py +++ b/mimesis/providers/internet.py @@ -214,7 +214,6 @@ class Internet(BaseProvider): :Example: AMQP """ - # TODO: Refactoring. layer = layer.lower() try: protocol = self.random.choice(NETWORK_PROTOCOLS[layer])
Removed forgotten TODO. (#<I>)
py
diff --git a/superset/viz.py b/superset/viz.py index <HASH>..<HASH> 100644 --- a/superset/viz.py +++ b/superset/viz.py @@ -1648,6 +1648,7 @@ class DistributionBarViz(BaseViz): raise QueryObjectValidationError(_("Pick at least one metric")) if not fd.get("groupby"): raise QueryObjectValidationError(_("Pick at least one field for [Series]")) + d["orderby"] = [(d["metrics"][0], False)] return d def get_data(self, df: pd.DataFrame) -> VizData:
add order by for bar charts (#<I>)
py
diff --git a/uncompyle6/main.py b/uncompyle6/main.py index <HASH>..<HASH> 100644 --- a/uncompyle6/main.py +++ b/uncompyle6/main.py @@ -43,7 +43,7 @@ def _get_outstream(outfile): mode = 'wb' else: mode = 'w' - return open(outfile, mode) + return open(outfile, mode, encoding="utf-8") def decompile( bytecode_version, co, out=None, showasm=None, showast=False,
support utf-8 chars
py
diff --git a/py17track/__version__.py b/py17track/__version__.py index <HASH>..<HASH> 100644 --- a/py17track/__version__.py +++ b/py17track/__version__.py @@ -1,2 +1,2 @@ """Define a version constant.""" -__version__ = "2.2.2" +__version__ = "2.2.3"
Bumped version to <I>
py
diff --git a/salt/modules/win_pkg.py b/salt/modules/win_pkg.py index <HASH>..<HASH> 100644 --- a/salt/modules/win_pkg.py +++ b/salt/modules/win_pkg.py @@ -160,7 +160,7 @@ def list_upgrades(refresh=True, saltenv='base', **kwargs): # pylint: disable=W0 ret = {} for name, data in six.iteritems(get_repo_data(saltenv).get('repo', {})): if version(name): - latest = latest_version(name, refresh=False) + latest = latest_version(name, refresh=False, saltenv=saltenv) if latest: ret[name] = latest return ret
Pass saltenv to call to latest_version in list_upgrades function
py
diff --git a/brothon/utils/file_tailer.py b/brothon/utils/file_tailer.py index <HASH>..<HASH> 100644 --- a/brothon/utils/file_tailer.py +++ b/brothon/utils/file_tailer.py @@ -24,12 +24,13 @@ class FileTailer(object): self._full_read = full_read self._tail = tail - def readlines(self): + def readlines(self, offset=0): """Open the file for reading and yield lines as they are added""" try: with open(self._filepath) as fp: # For full read go through existing lines in file if self._full_read: + fp.seek(offset) for row in fp.readlines(): yield row
adding an offline arg to readlines
py
diff --git a/datajoint/declare.py b/datajoint/declare.py index <HASH>..<HASH> 100644 --- a/datajoint/declare.py +++ b/datajoint/declare.py @@ -76,7 +76,7 @@ def compile_foreign_key(line, context, attributes, primary_key, attr_sql, foreig # match new attributes and referenced attributes and create foreign keys missing_attrs = [attr for attr in ref.primary_key if attr not in attributes] or ( - len(result.new_attrs) == len(ref.primary_key) == 1 and ref.primary_key) + ref.primary_key if len(result.new_attrs) == len(ref.primary_key) == 1 else []) new_attrs = result.new_attrs or missing_attrs ref_attrs = result.ref_attrs or missing_attrs if len(new_attrs) != len(ref_attrs):
fixed the foreign key declaration bug described in issue #<I>
py
diff --git a/tools/cluster.py b/tools/cluster.py index <HASH>..<HASH> 100755 --- a/tools/cluster.py +++ b/tools/cluster.py @@ -12,6 +12,7 @@ # simultaneous invocations by the same user may have unpredictable results. import sys +from time import sleep from subprocess import Popen, PIPE # constants @@ -21,7 +22,7 @@ LOCKFILE = "/dev/shm/reserved" CHECK_UMASK = "umask -S | sed 's/.*,//g' | grep -v w" def usage(): - print sys.argv[0], "[release|reserve] [machine...]" + print sys.argv[0], "[release|reserve|wait] [machine...]" sys.exit() def system(args): @@ -71,5 +72,16 @@ elif (option == "reserve"): print machine, "could not be reserved" delete_lockfiles(reserved) sys.exit(-1) +elif (option == "wait"): + while 1: + for machine in machines: + if 0 == system(["ssh", machine, "test -O " + LOCKFILE]): + # one of the machines we're waiting for is unavailable + break # out of the for loop + else: # for...else, not if...else!! yes this indentation is correct + # we made it all the way through the for loop + # no machines are reserved + break # out of the while loop + sleep(5) else: usage()
Add option to wait for machines to become available. This option will allow hudson to start a process on an executor which runs until some other process completes, without having to have general-purpose process monitoring in place. (really written by nshectman)
py
diff --git a/goose/crawler.py b/goose/crawler.py index <HASH>..<HASH> 100644 --- a/goose/crawler.py +++ b/goose/crawler.py @@ -102,7 +102,7 @@ class Crawler(object): article.cleaned_text = output_formatter.get_formatted_text(article) # cleanup tmp file - self.relase_resources(article) + self.release_resources(article) return article @@ -138,7 +138,7 @@ class Crawler(object): def get_extractor(self): return StandardContentExtractor(self.config) - def relase_resources(self, article): + def release_resources(self, article): path = os.path.join(self.config.local_storage_path, '%s_*' % article.link_hash) for fname in glob.glob(path): try:
renamed relase_resources to release_sources
py
diff --git a/poetry/installation/pip_installer.py b/poetry/installation/pip_installer.py index <HASH>..<HASH> 100644 --- a/poetry/installation/pip_installer.py +++ b/poetry/installation/pip_installer.py @@ -105,12 +105,6 @@ class PipInstaller(BaseInstaller): self.install(target, update=True) def remove(self, package): - # If we have a VCS package, remove its source directory - if package.source_type == "git": - src_dir = self._env.path / "src" / package.name - if src_dir.exists(): - safe_rmtree(str(src_dir)) - try: self.run("uninstall", package.name, "-y") except CalledProcessError as e: @@ -119,6 +113,12 @@ class PipInstaller(BaseInstaller): raise + # If we have a VCS package, remove its source directory + if package.source_type == "git": + src_dir = self._env.path / "src" / package.name + if src_dir.exists(): + safe_rmtree(str(src_dir)) + def run(self, *args, **kwargs): # type: (...) -> str return self._env.run_pip(*args, **kwargs)
pip: ensure vcs source is removed after package removal Relates-to: #<I>
py
diff --git a/docs/gl_objects/projects.py b/docs/gl_objects/projects.py index <HASH>..<HASH> 100644 --- a/docs/gl_objects/projects.py +++ b/docs/gl_objects/projects.py @@ -229,7 +229,9 @@ f.content = 'new content' f.save(branch_name='master', commit_message='Update testfile') # or for binary data -f.content = base64.b64encode(open('image.png').read()) +# Note: decode() is required with python 3 for data serialization. You can omit +# it with python 2 +f.content = base64.b64encode(open('image.png').read()).decode() f.save(branch_name='master', commit_message='Update testfile', encoding='base64') # end files update
docs: add a note for python <I> for file content update The data passed to the JSON serializer must be a string with python 3. Document this in the exemples. Fix #<I>
py
diff --git a/zipline/assets/assets.py b/zipline/assets/assets.py index <HASH>..<HASH> 100644 --- a/zipline/assets/assets.py +++ b/zipline/assets/assets.py @@ -831,6 +831,12 @@ class AssetFinder(object): """ Lookup a list of equities by symbol. + Equivalent to:: + + [finder.lookup_symbol(s, as_of, fuzzy) for s in symbols] + + but potentially faster because repeated lookups are memoized. + Parameters ---------- symbols : sequence[str]
DOC: Add note on lookup_symbols.
py
diff --git a/ndio/ramon/enums.py b/ndio/ramon/enums.py index <HASH>..<HASH> 100644 --- a/ndio/ramon/enums.py +++ b/ndio/ramon/enums.py @@ -24,7 +24,7 @@ eRAMONAnnoStatus = enum("DEFAULT", PROCESSED=2, IGNORED=3) -DEFAULT_ID = -1 +DEFAULT_ID = 0 DEFAULT_CONFIDENCE = 0 DEFAULT_DYNAMIC_METADATA = {} DEFAULT_STATUS = eRAMONAnnoStatus.DEFAULT
default id for ramons is None (0)
py
diff --git a/pandas/tests/test_format.py b/pandas/tests/test_format.py index <HASH>..<HASH> 100644 --- a/pandas/tests/test_format.py +++ b/pandas/tests/test_format.py @@ -1,5 +1,5 @@ -from __future__ import print_function # -*- coding: utf-8 -*- +from __future__ import print_function import re from pandas.compat import range, zip, lrange, StringIO, PY3, lzip, u
BUG: moving the utf encoding line to the first line (before from __future ..)
py
diff --git a/src/concurrent_log_handler/__init__.py b/src/concurrent_log_handler/__init__.py index <HASH>..<HASH> 100644 --- a/src/concurrent_log_handler/__init__.py +++ b/src/concurrent_log_handler/__init__.py @@ -173,6 +173,7 @@ class ConcurrentRotatingFileHandler(BaseRotatingHandler): self._debug = debug self.use_gzip = True if gzip and use_gzip else False + self.gzip_buffer = 8096 # Absolute file name handling done by FileHandler since Python 2.5 super(ConcurrentRotatingFileHandler, self).__init__( @@ -436,10 +437,15 @@ class ConcurrentRotatingFileHandler(BaseRotatingHandler): self._console_log("#no gzip available", stack=False) return out_filename = input_filename + ".gz" - # TODO: we probably need to buffer large files here to avoid memory problems + with open(input_filename, "rb") as input_fh: with gzip.open(out_filename, "wb") as gzip_fh: - gzip_fh.write(input_fh.read()) + while True: + data = input_fh.read(self.gzip_buffer) + if not data: + break + gzip_fh.write(data) + os.remove(input_filename) self._console_log("#gzipped: %s" % (out_filename,), stack=False) return
Use buffering when gzipping files; fixes issues with gzip failing or high memory usage.
py
diff --git a/jss/jamf_software_server.py b/jss/jamf_software_server.py index <HASH>..<HASH> 100644 --- a/jss/jamf_software_server.py +++ b/jss/jamf_software_server.py @@ -62,7 +62,8 @@ class JSS(object): # pylint: disable=too-many-arguments def __init__( self, jss_prefs=None, url=None, user=None, password=None, - repo_prefs=None, ssl_verify=True, verbose=False, jss_migrated=False): + repo_prefs=None, ssl_verify=True, verbose=False, jss_migrated=False, + **kwargs): """Setup a JSS for making API requests. Provide either a JSSPrefs object OR specify url, user, and @@ -110,6 +111,9 @@ class JSS(object): ssl_verify = jss_prefs.verify suppress_warnings = jss_prefs.suppress_warnings + # TODO: This method currently accepts '**kwargs' to soften + # the deprecation of the urllib warnings removal. + self.base_url = url self.session = CurlAdapter()
Handle deprecated args by ignoring them in JSS.__init__
py
diff --git a/docs/example/example.py b/docs/example/example.py index <HASH>..<HASH> 100644 --- a/docs/example/example.py +++ b/docs/example/example.py @@ -28,13 +28,13 @@ config = parser.gccxml_configuration_t( decls = parser.parse(['example.hpp'], config) global_ns = declarations.get_global_namespace(decls) -# Get object that describes unittests namespace +# Get object that describes unittests namespace unittests = global_ns.namespace('unittests') print('"unittests" declarations: \n') declarations.print_declarations(unittests) -# Print all base and derived class names +# Print all base and derived class names for class_ in unittests.classes(): print('class "%s" hierarchy information:' % class_.name) print('\tbase classes : ', repr([ @@ -45,7 +45,7 @@ for class_ in unittests.classes(): # Pygccxml has very powerfull query api: -# Select multiple declarations +# Select multiple declarations run_functions = unittests.member_functions('run') print('the namespace contains %d "run" member functions' % len(run_functions)) print('they are: ')
Fix space character in example.py
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -40,8 +40,7 @@ setup( packages=['vcspull', 'vcspull.testsuite', 'vcspull.repo', - 'vcspull._vendor', - 'vcspull._vendor.colorama'], + ], scripts=['pkg/vcspull.bash', 'pkg/vcspull.zsh', 'pkg/vcspull.tcsh'], entry_points=dict(console_scripts=['vcspull=vcspull:cli.main']), classifiers=[
remove vcspull._vendor from package
py
diff --git a/msrest/serialization.py b/msrest/serialization.py index <HASH>..<HASH> 100644 --- a/msrest/serialization.py +++ b/msrest/serialization.py @@ -453,7 +453,7 @@ class Serializer(object): raise ValidationError("required", "body", True) # Just in case this is a dict - if data_type.strip('[]{}') in self.dependencies: + if data_type.strip('[]{}') in self.dependencies: deserializer = Deserializer(self.dependencies) deserializer.key_extractors = [ rest_key_case_insensitive_extractor, @@ -1234,6 +1234,8 @@ class Deserializer(object): :rtype: Enum :raises: DeserializationError if string is not valid enum value. """ + if isinstance(data, enum_obj): + return data if isinstance(data, int): # Workaround. We might consider remove it in the future. # https://github.com/Azure/azure-rest-api-specs/issues/141
Enum are an Autorest subclass
py
diff --git a/entity/__init__.py b/entity/__init__.py index <HASH>..<HASH> 100644 --- a/entity/__init__.py +++ b/entity/__init__.py @@ -1,7 +1,4 @@ # flake8: noqa -from .config import EntityConfig, entity_registry, register_entity from .version import __version__ -from .signal_handlers import turn_on_syncing, turn_off_syncing -from .sync import sync_entities default_app_config = 'entity.apps.EntityConfig'
do not import files that import models in init
py
diff --git a/flask_restful_hal/resource.py b/flask_restful_hal/resource.py index <HASH>..<HASH> 100644 --- a/flask_restful_hal/resource.py +++ b/flask_restful_hal/resource.py @@ -174,7 +174,8 @@ class Resource(rest.Resource): # type: ignore for key in ('embed', 'include_links'): del kwargs[key] merged_kwargs = merge_args_and_kwargs(*args, **kwargs) - resource = dict(cls.data(**merged_kwargs)) + data = cls.data(**merged_kwargs) + resource = dict(data if data is not None else ()) if include_links: add_links(resource, **merged_kwargs) if embed > 0:
Handle `data` routines that can return `None`
py