diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/ofp/v0x01/common/queue.py b/ofp/v0x01/common/queue.py index <HASH>..<HASH> 100644 --- a/ofp/v0x01/common/queue.py +++ b/ofp/v0x01/common/queue.py @@ -12,7 +12,7 @@ from foundation import basic_types # Enums -class QueueProperties (enum.Enum): +class QueueProperties(enum.Enum): """ Describes the queue properties. @@ -27,7 +27,7 @@ class QueueProperties (enum.Enum): # Classes -class PacketQueue (base.GenericStruct): +class PacketQueue(base.GenericStruct): """ This class describes a queue. @@ -49,7 +49,7 @@ class PacketQueue (base.GenericStruct): self.properties = properties -class QueuePropHeader (base.GenericStruct): +class QueuePropHeader(base.GenericStruct): """ This class describes the header of each queue property. @@ -68,7 +68,7 @@ class QueuePropHeader (base.GenericStruct): self.pad = pad -class QueuePropMinRate (base.GenericStruct): +class QueuePropMinRate(base.GenericStruct): """ This class defines the minimum-rate type queue.
This commit Fixes #<I> implementing all common messages
py
diff --git a/tests/test_nearley/test_nearley.py b/tests/test_nearley/test_nearley.py index <HASH>..<HASH> 100644 --- a/tests/test_nearley/test_nearley.py +++ b/tests/test_nearley/test_nearley.py @@ -19,7 +19,12 @@ if not os.path.exists(BUILTIN_PATH): logger.warning("Nearley not included. Skipping Nearley tests! (use git submodule to add)") raise ImportError("Skipping Nearley tests!") -import js2py # Ensures that js2py exists, to avoid failing tests +try: + import js2py # Ensures that js2py exists, to avoid failing tests +except RuntimeError as e: + if "python version" in str(e): + raise ImportError("js2py does not support this python version") + raise class TestNearley(unittest.TestCase):
Skip test_nearley.py when js2py doesn't recognize the python version.
py
diff --git a/nougat/config.py b/nougat/config.py index <HASH>..<HASH> 100644 --- a/nougat/config.py +++ b/nougat/config.py @@ -20,4 +20,6 @@ class Config(dict): :param object_name: the object you wanna load :return: """ - # TODO: load from object + for key in dir(object_name): + if key.isupper(): + self[key] = getattr(object_name, key)
feature: complete load_from_object
py
diff --git a/nashvegas/management/commands/upgradedb.py b/nashvegas/management/commands/upgradedb.py index <HASH>..<HASH> 100644 --- a/nashvegas/management/commands/upgradedb.py +++ b/nashvegas/management/commands/upgradedb.py @@ -318,7 +318,8 @@ class Command(BaseCommand): try: for migration in migrations: - created_models |= self._execute_migration(db, migration, show_traceback=show_traceback) + migration_path = os.path.join(self.path, db, migration) + created_models |= self._execute_migration(db, migration_path, show_traceback=show_traceback) sys.stdout.write("Emitting post sync signal.\n") emit_post_sync_signal(
Correct --execute to handle path correctly
py
diff --git a/src/python/ttfautohint/cli.py b/src/python/ttfautohint/cli.py index <HASH>..<HASH> 100644 --- a/src/python/ttfautohint/cli.py +++ b/src/python/ttfautohint/cli.py @@ -37,13 +37,6 @@ if IN-FILE is missing also, standard input and output are used. The new hints are based on FreeType's auto-hinter. This program is a simple front-end to the `ttfautohint' library. - -Long options can be given with one or two dashes, -and with and without equal sign between option and argument. -This means that the following forms are acceptable: -`-foo=bar', `--foo=bar', `-foo bar', `--foo bar'. - -Mandatory arguments to long options are mandatory for short options too. """ EPILOG = """\ @@ -168,8 +161,6 @@ Key letters `l', `r', `n', `p', `t', `w', `x', and `y' have the verbose aliases `left', `right', `nodir', `point', `touch', `width', `xshift', and `yshift', respectively. -A GUI version of this program is called `ttfautohintGUI'. - Report bugs to: [email protected] ttfautohint home page: <http://www.freetype.org/ttfautohint>
cli: remove --help text paragraphs that don't apply here
py
diff --git a/landsat/downloader.py b/landsat/downloader.py index <HASH>..<HASH> 100644 --- a/landsat/downloader.py +++ b/landsat/downloader.py @@ -43,12 +43,14 @@ class Downloader(VerbosityMixin): if isinstance(bands, list): # Create a folder to download the specific bands into path = check_create_folder(join(self.download_dir, scene)) - for band in bands: - self.amazon_s3(scene, band, path) + try: + for band in bands: + self.amazon_s3(scene, band, path) + except RemoteFileDoesntExist: + self.google_storage(scene, self.download_dir) else: raise Exception('Expected bands list') - else: - self.google_storage(scene, self.download_dir) + self.google_storage(scene, self.download_dir) return True
if image is not found on amazon download from Google
py
diff --git a/kdcount/correlate.py b/kdcount/correlate.py index <HASH>..<HASH> 100644 --- a/kdcount/correlate.py +++ b/kdcount/correlate.py @@ -701,7 +701,7 @@ class paircount_worker(object): self.size = len(self.p) self.pts_only = isinstance(self.data[0], points) and isinstance(self.data[1], points) - self.dofast = self.usefast and type(self.bins) is RBinning and self.pts_only + self.dofast = self.usefast and isinstance(self.bins, RBinning) and self.pts_only # initialize arrays to hold total sum1 and sum2 # grabbing the desired shapes from the binning instance
use fast paircounting for all R binning schemes.
py
diff --git a/tests/test_gitpuller.py b/tests/test_gitpuller.py index <HASH>..<HASH> 100644 --- a/tests/test_gitpuller.py +++ b/tests/test_gitpuller.py @@ -286,6 +286,32 @@ def test_merging_simple(): assert puller.read_file('README.md') == '2' +def test_merging_after_commit(): + """ + Test that merging works even after we make a commit locally + """ + with Remote() as remote, Pusher(remote) as pusher: + pusher.push_file('README.md', '1') + + with Puller(remote) as puller: + assert puller.read_file('README.md') == pusher.read_file('README.md') == '1' + + puller.write_file('README.md', '2') + puller.git('commit', '-am', 'Local change') + + puller.pull_all() + + assert puller.read_file('README.md') == '2' + assert pusher.read_file('README.md') == '1' + + pusher.push_file('README.md', '3') + puller.pull_all() + + # Check if there is a merge commit + parent_commits = puller.git('show', '-s', '--format="%P"', 'HEAD').strip().split(' ') + assert(len(parent_commits) == 2) + + def test_untracked_puller(): """ Test that untracked files in puller are preserved when pulling
Test that merge works after making a manual commit I was worried that making a manual commit (i.e. outside of nbgitpuller) might cause a problem with subsequent syncs, so I added a test for it.
py
diff --git a/setuptools/command/build_ext.py b/setuptools/command/build_ext.py index <HASH>..<HASH> 100644 --- a/setuptools/command/build_ext.py +++ b/setuptools/command/build_ext.py @@ -197,14 +197,13 @@ class build_ext(_build_ext): def get_outputs(self): outputs = _build_ext.get_outputs(self) - optimize = self.get_finalized_command('build_py').optimize + fn_exts = ['.py', '.pyc'] + if self.get_finalized_command('build_py').optimize: + fn_exts.append('.pyo') ns_ext = (ext for ext in self.extensions if ext._needs_stub) for ext in ns_ext: base = os.path.join(self.build_lib, *ext._full_name.split('.')) - outputs.append(base + '.py') - outputs.append(base + '.pyc') - if optimize: - outputs.append(base + '.pyo') + outputs.extend(base + fnext for fnext in fn_exts) return outputs def write_stub(self, output_dir, ext, compile=False):
Rewrite function to use extend and a generator expression.
py
diff --git a/clkhash/bloomfilter.py b/clkhash/bloomfilter.py index <HASH>..<HASH> 100644 --- a/clkhash/bloomfilter.py +++ b/clkhash/bloomfilter.py @@ -209,7 +209,7 @@ def blake_encode_ngrams(ngrams, # type: Iterable[str] :return: bitarray of length l with the bits set which correspond to the encoding of the ngrams """ - key, = keys # Unpack. + key = keys[0] # We only need the first key log_l = int(math.log(l, 2)) if not 2 ** log_l == l:
fix for issue #<I>. Unpacking fails if more than one element in tuple
py
diff --git a/HARK/ConsumptionSaving/ConsIndShockModel.py b/HARK/ConsumptionSaving/ConsIndShockModel.py index <HASH>..<HASH> 100644 --- a/HARK/ConsumptionSaving/ConsIndShockModel.py +++ b/HARK/ConsumptionSaving/ConsIndShockModel.py @@ -1783,9 +1783,7 @@ class IndShockConsumerType(PerfForesightConsumerType): def updateIncomeProcess(self): ''' - Updates this agent's income process based on his own attributes. The - function that generates the discrete income process can be swapped out - for a different process. + Updates this agent's income process based on his own attributes. Parameters ----------
Remove incorrect statement in updateIncomeProcess @mnwhite this seems to be wrong, right? I don't see any `self.constructIncomeProcess`-ish statements, so it appears that the mean one log-normal equiprobably version is hard-coded. Correct? I think we should remove the statement unless I missed something in the code, and if it's a *planned* feature, let's just open an issue.
py
diff --git a/timepiece/forms.py b/timepiece/forms.py index <HASH>..<HASH> 100644 --- a/timepiece/forms.py +++ b/timepiece/forms.py @@ -81,17 +81,11 @@ class AddUpdateEntryForm(forms.ModelForm): except: raise forms.ValidationError('Please enter an end time.') - if end > datetime.now(): - raise forms.ValidationError('You cannot clock out in the future!') - if start >= end: raise forms.ValidationError('The entry must start before it ends!') return end - #def clean(self): - # print self.cleaned_data - class DateForm(forms.Form): from_date = forms.DateField(label="From", required=False)
allow users to clock out in the future
py
diff --git a/doc/conf.py b/doc/conf.py index <HASH>..<HASH> 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -21,6 +21,7 @@ with open("../cihai/__about__.py") as fp: extensions = [ 'sphinx.ext.autodoc', + 'sphinxcontrib.napoleon', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'releases',
add sphinx-napoleon to sphinx conf, for numpy docstring support
py
diff --git a/elasticsearch_dsl/document.py b/elasticsearch_dsl/document.py index <HASH>..<HASH> 100644 --- a/elasticsearch_dsl/document.py +++ b/elasticsearch_dsl/document.py @@ -120,8 +120,6 @@ class DocType(ObjectBase): def from_es(cls, hit): # don't modify in place meta = hit.copy() - # make sure highlighting information ends up in meta - meta['_highlight'] = meta.pop('highlight', {}) doc = meta.pop('_source') return cls(meta=meta, **doc)
'highlight' was already on meta
py
diff --git a/sox/version.py b/sox/version.py index <HASH>..<HASH> 100644 --- a/sox/version.py +++ b/sox/version.py @@ -2,5 +2,5 @@ # -*- coding: utf-8 -*- """Version info""" -short_version = '1.3' -version = '1.3.7' +short_version = '1.4' +version = '1.4.0a0'
Update version.py bumping to <I> alpha 0
py
diff --git a/salt/transport/tcp.py b/salt/transport/tcp.py index <HASH>..<HASH> 100644 --- a/salt/transport/tcp.py +++ b/salt/transport/tcp.py @@ -878,6 +878,9 @@ class SaltMessageClient(object): if self._connecting_future.done(): self._connecting_future = self.connect() yield self._connecting_future + except TypeError: + # This is an invalid transport + raise SaltClientError except Exception as e: log.error('Exception parsing response', exc_info=True) for future in six.itervalues(self.send_future_map):
if the transport is wrong we should throw a clienterrror
py
diff --git a/max7219/led.py b/max7219/led.py index <HASH>..<HASH> 100644 --- a/max7219/led.py +++ b/max7219/led.py @@ -159,6 +159,34 @@ class device(object): if redraw: self.flush() + def rotate_left(self, redraw=True): + """ + Scrolls the buffer one column to the left. The data that scrolls off + the left side re-appears at the right-most position. If redraw + is not suppled, or left set to True, will force a redraw of _all_ buffer + items + """ + t = self._buffer[-1] + for i in range((self.NUM_DIGITS * self._cascaded) - 1, 0, -1): + self._buffer[i] = self._buffer[i - 1] + self._buffer[0] = t + if redraw: + self.flush() + + def rotate_right(self, redraw=True): + """ + Scrolls the buffer one column to the right. The data that scrolls off + the right side re-appears at the left-most position. If redraw + is not suppled, or left set to True, will force a redraw of _all_ buffer + items + """ + t = self._buffer[0] + for i in range(0, (self.NUM_DIGITS * self._cascaded) - 1, 1): + self._buffer[i] = self._buffer[i + 1] + self._buffer[-1] = t + if redraw: + self.flush() + def scroll_left(self, redraw=True): """ Scrolls the buffer one column to the left. Any data that scrolls off
Update led.py Adding rotate_right/left member functions to the device base class.
py
diff --git a/pysubs2/ssafile.py b/pysubs2/ssafile.py index <HASH>..<HASH> 100644 --- a/pysubs2/ssafile.py +++ b/pysubs2/ssafile.py @@ -159,7 +159,8 @@ class SSAFile(MutableSequence): # The file might be a pipe and we need to read it twice, # so just buffer everything. text = fp.read() - format_ = autodetect_format(text) + fragment = text[:10000] + format_ = autodetect_format(fragment) fp = io.StringIO(text) impl = get_format_class(format_)
Autodetect format using only initial part of file
py
diff --git a/build/build.py b/build/build.py index <HASH>..<HASH> 100755 --- a/build/build.py +++ b/build/build.py @@ -873,6 +873,7 @@ class Release(): def createNightly(self, jarOrWar): self.createExecutable(jarOrWar) + removeIfExists(os.path.join(distDir, "VERSION")) self.writeHashes() self.sign() self.upload(nightliesPath)
[build] Minot build-script change.
py
diff --git a/pipes/configs/outputs.py b/pipes/configs/outputs.py index <HASH>..<HASH> 100644 --- a/pipes/configs/outputs.py +++ b/pipes/configs/outputs.py @@ -50,10 +50,11 @@ def convert_ini(config_dict): def write_variables(app_configs=None, out_file='', git_short=''): - """Append _application.json_ configurations to _out_file_ and .exports. + """Append _application.json_ configs to _out_file_, .exports, and .json. Variables are written in INI style, e.g. UPPER_CASE=value. The .exports file - contains 'export' prepended to each line for easy sourcing. + contains 'export' prepended to each line for easy sourcing. The .json file + is a minified representation of the combined configurations. Args: app_configs (dict): Environment configurations from _application.json_
docs: Update docstring
py
diff --git a/plugins/function_strings.py b/plugins/function_strings.py index <HASH>..<HASH> 100644 --- a/plugins/function_strings.py +++ b/plugins/function_strings.py @@ -22,15 +22,24 @@ class FunctionStrings(idaapi.plugin_t): idaapi.msg("String References in {}:0x{:08X}\n".format(function.name, function.startEA)) idaapi.msg("From To String\n") - for line in function.lines: - for ea in line.drefs_from: - if idaapi.isCode(idaapi.getFlags(ea)): - continue - string = sark.read_ascii_string(ea, max_length=100) - if not sark.core.is_string_printable(string): - continue - - idaapi.msg("0x{:08X} 0x{:08X} {}\n".format(line.ea, ea, repr(string))) + for xref in function.xrefs_from: + if xref.type.is_code: + continue + + string_type = idc.GetStringType(xref.to) + + if string_type is None: + continue + + string = idc.GetString(xref.to, strtype=string_type) + + if not string: + continue + + # Trim the string for easier display + string = string[:100] + + idaapi.msg("0x{:08X} 0x{:08X} {}\n".format(xref.frm, xref.to, repr(string))) def PLUGIN_ENTRY():
Updated the function strings plugin to new API.
py
diff --git a/src/rituals/acts/documentation.py b/src/rituals/acts/documentation.py index <HASH>..<HASH> 100644 --- a/src/rituals/acts/documentation.py +++ b/src/rituals/acts/documentation.py @@ -316,8 +316,9 @@ class DocsUploader(object): except subprocess.CalledProcessError: git_path = '' else: - git_path = git_path.decode('ascii').strip().replace('http://', '').replace('https://', '') - git_path = re.search((r'[^:]+:' if 'git@' in git_path else r'[^/]+/') + r'(.+)', git_path) + git_path = git_path.decode('ascii').strip() + git_path = git_path.replace('http://', '').replace('https://', '').replace('ssh://', '') + git_path = re.search(r'[^:/]+?[:/](.+)', git_path) git_path = git_path.group(1).replace('.git', '') if git_path else '' url = None with self._zipped(docs_base) as handle:
docs.upload: fix for ssh:// git urls
py
diff --git a/tests/ethpm/test_package_init_from_registry_uri.py b/tests/ethpm/test_package_init_from_registry_uri.py index <HASH>..<HASH> 100644 --- a/tests/ethpm/test_package_init_from_registry_uri.py +++ b/tests/ethpm/test_package_init_from_registry_uri.py @@ -1,3 +1,4 @@ +import json import os import pytest
Write local ipfs node backend and remove http reqs from tests
py
diff --git a/atlassian/jira.py b/atlassian/jira.py index <HASH>..<HASH> 100644 --- a/atlassian/jira.py +++ b/atlassian/jira.py @@ -21,6 +21,9 @@ class Jira(Atlassian): def project(self, key): return self.get("/rest/api/2/project/{0}".format(key)) + def issue(self, key): + return self.get("/rest/api/2/issue/{0}".format(key)) + def project_leaders(self): for project in self.projects().json(): key = project["key"]
PYAPI-<I> Jira: Get issue details
py
diff --git a/sample/sync-gerrit-review.py b/sample/sync-gerrit-review.py index <HASH>..<HASH> 100755 --- a/sample/sync-gerrit-review.py +++ b/sample/sync-gerrit-review.py @@ -143,8 +143,9 @@ def review_patchset(dci_client, project, version): "%s" % version['id']) return # TODO(Gonéri): also push a message and the URL to see the job. - print("DCI-CS → Gerrit: %s" % status) - _gerrit_review(project["gerrit"]["server"], sha, status) + if status != '0': + print("DCI-CS → Gerrit: %s" % status) + _gerrit_review(project["gerrit"]["server"], sha, status) def _init_conf():
Review only if the status is different from 0. Change-Id: I8effde<I>fad<I>f<I>c<I>c6b6b<I>ec<I>ebcf<I>
py
diff --git a/pyes/es.py b/pyes/es.py index <HASH>..<HASH> 100644 --- a/pyes/es.py +++ b/pyes/es.py @@ -64,9 +64,6 @@ class ElasticSearchModel(DotDict): else: self.update(dict(*args, **kwargs)) - def __repr__(self): - return DotDict.__repr__(self) - def save(self, bulk=False, id=None): """ Save the object and returns id
Removed __repr__ method which was initially broken and in retrospect unnecessary
py
diff --git a/dataset/persistence/database.py b/dataset/persistence/database.py index <HASH>..<HASH> 100644 --- a/dataset/persistence/database.py +++ b/dataset/persistence/database.py @@ -80,7 +80,7 @@ class Database(object): self.local.must_release = True def _release_internal(self): - if not hasattr(self.local, 'must_release') and self.local.must_release: + if getattr(self.local, 'must_release', None): self.lock.release() self.local.must_release = False
Update database.py Inverted logic means _release_internal will always fail on a rollback. Fixed and simplified.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -51,7 +51,7 @@ install_reqs = [ 'pandas>=0.19.0', 'pytz>=2014.10', 'scipy>=0.14.0', - 'statsmodels>=0.5.0' + 'statsmodels>=0.6.1' 'seaborn>=0.7.1', 'pandas-datareader>=0.2', 'empyrical>=0.3.0'
BUG made statsmodels version >= <I>
py
diff --git a/lazysignup/tests.py b/lazysignup/tests.py index <HASH>..<HASH> 100644 --- a/lazysignup/tests.py +++ b/lazysignup/tests.py @@ -191,4 +191,14 @@ class LazyTestCase(TestCase): def testGetConvert(self): self.client.get('/lazy/') response = self.client.get('/convert/') - self.assertEqual(200, response.status_code) \ No newline at end of file + self.assertEqual(200, response.status_code) + + def testConversionKeepsSameUser(self): + self.client.get('/lazy/') + response = self.client.post('/convert/', { + 'username': 'demo', + 'password1': 'password', + 'password2': 'password', + }) + self.assertEqual(1, len(User.objects.all())) + \ No newline at end of file
Extra test to check that the same user is maintained
py
diff --git a/protos/compile.py b/protos/compile.py index <HASH>..<HASH> 100755 --- a/protos/compile.py +++ b/protos/compile.py @@ -53,6 +53,7 @@ def main(): for fname in args.proto_file: # Get the Java class name (name, _) = os.path.splitext(fname) + name = os.path.basename(name) if not name in SERVICES: sys.stderr.write("Error: Service %s is not known. Known services: %s\n" % (name, SERVICES.keys())) return RESULT['INVALID_SERVICE']
Only use the basename for filename comparison
py
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -40,8 +40,7 @@ class Mock(MagicMock): def __getattr__(cls, name): return MagicMock() -MOCK_MODULES = ['numpy', 'scipy', 'pandas', 'lxml', 'beautifulsoup4', - 'sklearn', 'pyjq', 'tika'] +MOCK_MODULES = ['pyjq'] sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES) # -- General configuration ------------------------------------------------
Clean up mock imports Only use it for pyjq (the other dependencies are installed without problems on read the docs).
py
diff --git a/mpd/base.py b/mpd/base.py index <HASH>..<HASH> 100644 --- a/mpd/base.py +++ b/mpd/base.py @@ -592,6 +592,7 @@ class MPDClient(MPDClientBase): sock = None try: sock = socket.socket(af, socktype, proto) + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) sock.settimeout(self.timeout) sock.connect(sa)
Create TCP socket with `TCP_NODELAY`. Nagle's algorithm plays badly with delayed ACK algorithm. Fix it to make interactive clients more responsive.
py
diff --git a/d1_common_python/src/d1common/xmlrunner.py b/d1_common_python/src/d1common/xmlrunner.py index <HASH>..<HASH> 100644 --- a/d1_common_python/src/d1common/xmlrunner.py +++ b/d1_common_python/src/d1common/xmlrunner.py @@ -73,7 +73,7 @@ class _TestInfo(object): text = escape(str(error[1])) stream.write('\n') stream.write(' <%s type="%s">%s\n' \ - % (tagname, str(error[0]), text)) + % (tagname, escape(str(error[0])), text)) tb_stream = StringIO() traceback.print_tb(error[2], None, tb_stream) stream.write(escape(tb_stream.getvalue()))
Error in xmlrunner let through unescaped reserved chars
py
diff --git a/pulsarpy/submit_to_dcc.py b/pulsarpy/submit_to_dcc.py index <HASH>..<HASH> 100644 --- a/pulsarpy/submit_to_dcc.py +++ b/pulsarpy/submit_to_dcc.py @@ -387,6 +387,17 @@ class Submit(): payload["size_range"] = rec.size_range payload["strand_specificity"] = rec.strand_specific payload["source"] = rec.vendor["id"] + + def post_single_cell_sorting(self, rec_id, patch=False) + rec = models.SingleCellSorting(rec_id) + aliases = [] + aliases.append(rec.abbrev_id()) + name = rec.name + if name: + aliases.append(self.clean_name(name)) + payload = {} + payload["aliases"] = aliases + sreqs = rec.sequencing_requests
Started method to post single_cell_sortings
py
diff --git a/cocaine/logging/hanlders.py b/cocaine/logging/hanlders.py index <HASH>..<HASH> 100644 --- a/cocaine/logging/hanlders.py +++ b/cocaine/logging/hanlders.py @@ -45,12 +45,11 @@ VERBOSITY_MAP = { class CocaineHandler(logging.Handler): def __init__(self): - super(CocaineHandler, self).__init__() + logging.Handler.__init__(self) self._log = Logger.instance() self._dispatch = {} for level in VERBOSITY_LEVELS: self._dispatch[level] = functools.partial(self._log.emit, level) - self.devnull = lambda msg: None def emit(self, record):
Backward compatibility with Python <I> logging module. Sad, but true - `logging.Handler` module in Python <I> is not "new-style-object", so `super` keyword leads to the TypeError.
py
diff --git a/salt/grains/core.py b/salt/grains/core.py index <HASH>..<HASH> 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py @@ -124,8 +124,10 @@ def os_data(): grains['os'] = 'Solaris' elif grains['kernel'] == 'VMkernel': grains['os'] = 'ESXi' + elif grains['kernel'] == 'Darwin': + grains['os'] = 'MacOS' else: - grains['os'] = kernel + grains['os'] = grains['kernel'] # Load the virtual machine info
Fixed error if we don't have known kernel, added MacOS
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -24,7 +24,7 @@ setup( platforms=['any'], python_requires='>=3.6', classifiers=[ - 'Development Status :: 4 - Beta', + 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent',
Switch to the "Stable" development status classifier
py
diff --git a/voluptuous.py b/voluptuous.py index <HASH>..<HASH> 100644 --- a/voluptuous.py +++ b/voluptuous.py @@ -309,7 +309,8 @@ class Schema(object): errors.append(Invalid('extra keys not allowed', key_path)) for key in required_keys: - errors.append(Invalid(key.msg or 'required key not provided', path + [key])) + msg = key.msg if hasattr(key, 'msg') and key.msg else 'required key not provided' + errors.append(Invalid(msg, path + [key])) if errors: raise InvalidList(errors) return out
Fixes regression for dictionary schemas introduced in #<I>.
py
diff --git a/helper.py b/helper.py index <HASH>..<HASH> 100644 --- a/helper.py +++ b/helper.py @@ -71,6 +71,15 @@ class OpenMensaCanteen(): """ Stores that this cateen is closed on $date.""" self._days[self.convertDate(date)] = False + def clearDay(self, date): + try: + del self._days[self.convertDate(date)] + except KeyError: + pass + + def dayCount(self): + return len(self._days) + def toXMLFeed(self): """ Convert this cateen information into string which is a valid OpenMensa v2 xml feed"""
helper: support counting and clearing days
py
diff --git a/ceph_deploy/__init__.py b/ceph_deploy/__init__.py index <HASH>..<HASH> 100644 --- a/ceph_deploy/__init__.py +++ b/ceph_deploy/__init__.py @@ -1,3 +1,3 @@ -__version__ = '1.5.37' +__version__ = '1.5.38'
[RM-<I>] bump to <I>
py
diff --git a/cassandra/__init__.py b/cassandra/__init__.py index <HASH>..<HASH> 100644 --- a/cassandra/__init__.py +++ b/cassandra/__init__.py @@ -23,7 +23,7 @@ class NullHandler(logging.Handler): logging.getLogger('cassandra').addHandler(NullHandler()) -__version_info__ = (3, 1, '0a1', 'post0') +__version_info__ = (3, 1, '0a2') __version__ = '.'.join(map(str, __version_info__))
<I>a2 release version
py
diff --git a/t/napbase.py b/t/napbase.py index <HASH>..<HASH> 100755 --- a/t/napbase.py +++ b/t/napbase.py @@ -266,6 +266,15 @@ class NapTest(unittest.TestCase): def test_prefix_indent(self): """ """ + p1 = self.nap.list_prefix({ 'prefix': '1.3.3.1/32' })[0] + p2 = self.nap.list_prefix({ 'prefix': '1.3.3.0/24' })[0] + p3 = self.nap.list_prefix({ 'prefix': '1.3.0.0/16' })[0] + self.assertEqual(p1['indent'], 2, "Indent calc on add failed") + self.assertEqual(p2['indent'], 1, "Indent calc on add failed") + self.assertEqual(p3['indent'], 0, "Indent calc on add failed") + # remove middle prefix + # FIXME: uncomment when remove_prefix() is implemented + #self.nap.remove_prefix({ 'id': self.prefix_attrs2['id'] })
Simple test for checking indent calc is working This test should be expanded once remove_prefix() is implemented
py
diff --git a/ceam/framework/values.py b/ceam/framework/values.py index <HASH>..<HASH> 100644 --- a/ceam/framework/values.py +++ b/ceam/framework/values.py @@ -84,7 +84,7 @@ def joint_value_post_processor(a): if isinstance(a, NullValue): return pd.Series(1, index=a.index) else: - return 1-a + return 1-(1-a) class Pipeline: """A single mutable value.
joint paf calculation needed one more tweak
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -23,13 +23,14 @@ def readme(): def version(): - return '0.1.5' + return '0.1.6' setuptools.setup( name='qj', description='qj: logging designed for debugging.', long_description=readme(), + long_description_content_type='text/markdown', version=version(), url='https://github.com/iansf/qj', download_url='https://github.com/iansf/qj/archive/%s.tar.gz' % version(), @@ -39,5 +40,5 @@ setuptools.setup( license='Apache 2.0', install_requires=[], test_suite='nose.collector', - tests_require=['nose'], + tests_require=['nose', 'mock'], )
Update setup.py for new pypi requirements. Update version to <I>. All tests pass on <I> and <I>.
py
diff --git a/language_check/__init__.py b/language_check/__init__.py index <HASH>..<HASH> 100644 --- a/language_check/__init__.py +++ b/language_check/__init__.py @@ -42,7 +42,7 @@ from .backports import subprocess from .which import which -__version__ = '1.0' +__version__ = '1.1' __all__ = ['LanguageTool', 'Error', 'get_languages', 'correct', 'get_version',
Increment minor version to <I>
py
diff --git a/netdb/src/netdb.py b/netdb/src/netdb.py index <HASH>..<HASH> 100644 --- a/netdb/src/netdb.py +++ b/netdb/src/netdb.py @@ -139,7 +139,7 @@ class Entry: def _read_time(fd): d = Entry._read(fd, 8) if d: - li = struct.unpack('!Q', d) + li = struct.unpack('!Q', d)[0] return li @staticmethod
have netdb.Entry._read_time not return a tuple
py
diff --git a/mot/lib/utils.py b/mot/lib/utils.py index <HASH>..<HASH> 100644 --- a/mot/lib/utils.py +++ b/mot/lib/utils.py @@ -120,6 +120,9 @@ def convert_data_to_dtype(data, data_type, mot_float_type='float'): dtype = ctype_to_dtype(data_type, mot_float_type) ve = np.zeros(shape[:-1], dtype=dtype) + if data.dtype == dtype: + return data + if len(shape) == 1: for vector_ind in range(shape[0]): ve[0][vector_ind] = data[vector_ind]
Bugfix in convert_data_to_dtype, in the case a vector type was already in the correct dtype
py
diff --git a/Spyrk/spark_cloud.py b/Spyrk/spark_cloud.py index <HASH>..<HASH> 100644 --- a/Spyrk/spark_cloud.py +++ b/Spyrk/spark_cloud.py @@ -13,6 +13,7 @@ # You should have received a copy of the GNU Lesser General Public License # along with Spyrk. If not, see <http://www.gnu.org/licenses/>. +from pprint import pprint from collections import namedtuple from hammock import Hammock # pip install hammock @@ -52,7 +53,7 @@ class SparkCloud(object): self.devices = {} if json_list: - Device = namedtuple('Device', json_list[0].keys() + ['functions', 'variables', 'api']) + Device = namedtuple('Device', list(set(json_list[0].keys() + ['requires_deep_update'])) + ['functions', 'variables', 'api']) _check_error = self._check_error def device_getattr(self, name): if name in self.functions:
Added `requires_deep_update` to Device declaration
py
diff --git a/tests/test_pd2hts.py b/tests/test_pd2hts.py index <HASH>..<HASH> 100644 --- a/tests/test_pd2hts.py +++ b/tests/test_pd2hts.py @@ -375,7 +375,8 @@ class Pd2htsTestCase(TestCase): StringIO(tenmin_test_timeseries), parse_dates=[0], usecols=['date', 'value', 'flags'], index_col=0, header=None, names=('date', 'value', 'flags'), - converters={'flags': lambda x: x}).asfreq('10T') + converters={'flags': lambda x: x}, + dtype={'value': np.float64, 'flags': str}).asfreq('10T') self.reference_ts.timestamp_rounding = '0,0' self.reference_ts.timestamp_offset = '0,0' self.reference_ts.unit = '°C'
Make read_csv more robust in unit tests If the dtype argument is not specified in read_csv, the result is not always the same in all runs. This is probably a pandas bug (<URL>).
py
diff --git a/pyamg/classical/cr.py b/pyamg/classical/cr.py index <HASH>..<HASH> 100644 --- a/pyamg/classical/cr.py +++ b/pyamg/classical/cr.py @@ -67,7 +67,7 @@ def CR(S, method='habituated',maxiter=20): S = binormalize(S) - splitting = numpy.zeros( (S.shape[0],1), dtype=int ) + splitting = numpy.zeros( (S.shape[0],1), dtype='intc' ) # out iterations --------------- for m in range(0,maxiter):
changed int to 'intc'
py
diff --git a/bsdploy/tests/test_bsdploy.py b/bsdploy/tests/test_bsdploy.py index <HASH>..<HASH> 100644 --- a/bsdploy/tests/test_bsdploy.py +++ b/bsdploy/tests/test_bsdploy.py @@ -87,6 +87,15 @@ def test_augment_ezjail_master_fabfile_implicit(ctrl, ployconf, tempdir): assert get_fabfile(ctrl.instances['jailhost']).endswith('jailhost/fabfile.py') +def test_augment_ezjail_jail_fabfile_implicit(ctrl, ployconf, tempdir): + jailhost_fab = tempdir['jailhost-foo/fabfile.py'] + jailhost_fab.fill('') + config = dict(ctrl.instances['foo'].config) + assert config['fabfile'].endswith('jailhost-foo/fabfile.py') + from ploy_fabric import get_fabfile + assert get_fabfile(ctrl.instances['foo']).endswith('jailhost-foo/fabfile.py') + + def test_augment_ezjail_master_fabfile_explicit(ctrl, ployconf, tempdir): jailhost_fab = tempdir['jailhost/fabfile.py'] jailhost_fab.fill('')
test for fabfile-by-convention for jails
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,14 @@ import sys, os.path from setuptools import setup # for f2py extension building -from numpy.distutils.core import Extension, setup as npsetup +try: + from numpy.distutils.core import Extension, setup as npsetup +except: + print('\nyou need to have numpy installed before running setup.py,\n' + 'because we need its Extension functionality to make a\n' + 'Fortran extension for BLS\n') + raise + def readme(): with open('README.md') as f:
setup.py: add friendly error if numpy is not installed
py
diff --git a/salt/modules/solarispkg.py b/salt/modules/solarispkg.py index <HASH>..<HASH> 100644 --- a/salt/modules/solarispkg.py +++ b/salt/modules/solarispkg.py @@ -328,7 +328,7 @@ def remove(name, **kwargs): basedir = kwargs.get('basedir', 'default') # Make tempfile to hold the adminfile contents. - fd, adminfile = salt.utils.mkstemp(prefix="salt-") + fd, adminfile = salt.utils.mkstemp(prefix="salt-", close_fd=False) # Write to file then close it. os.write(fd, 'email={0}\n'.format(email))
Add required `close_fd=False` to keep the `fd` open.
py
diff --git a/client/deis.py b/client/deis.py index <HASH>..<HASH> 100755 --- a/client/deis.py +++ b/client/deis.py @@ -784,7 +784,8 @@ class DeisClient(object): def builds_create(self, args): """ - Creates a new build of an application. + Creates a new build of an application. Imports an <image> and deploys it to Deis + as a new release. Usage: deis builds:create <image> [--app=<app>]
docs(client): elaborate upon builds:create
py
diff --git a/sentinelhub/constants.py b/sentinelhub/constants.py index <HASH>..<HASH> 100644 --- a/sentinelhub/constants.py +++ b/sentinelhub/constants.py @@ -323,7 +323,7 @@ class _BaseCRS(Enum): # Look-up class with possible combinations of UTM zone and direction CRS = _BaseCRS("CRS", dict( - [_get_utm_name_value_pair(zone, direction) for zone, direction in it.product(range(1, 65), _Direction)] + + [_get_utm_name_value_pair(zone, direction) for zone, direction in it.product(range(1, 61), _Direction)] + [('WGS84', '4326'), ('POP_WEB', '3857')] ))
fixed CRS constants - only <I> utm zones
py
diff --git a/pyes/query.py b/pyes/query.py index <HASH>..<HASH> 100644 --- a/pyes/query.py +++ b/pyes/query.py @@ -131,7 +131,7 @@ class Search(EqualityComparableUsingAttributeDictionary): raise InvalidQuery("Invalid query") if self.filter: res['filter'] = self.filter.serialize() - if self.fields is not None: + if self.fields: res['fields'] = self.fields if self.size is not None: res['size'] = self.size
Checked fields is empty and not None.
py
diff --git a/MAVProxy/modules/lib/wxsettings.py b/MAVProxy/modules/lib/wxsettings.py index <HASH>..<HASH> 100644 --- a/MAVProxy/modules/lib/wxsettings.py +++ b/MAVProxy/modules/lib/wxsettings.py @@ -59,7 +59,7 @@ if __name__ == "__main__": print("Changing %s to %s" % (setting.name, setting.value)) # test the settings - import mp_settings, time + from MAVProxy.modules.lib import mp_settings, time from mp_settings import MPSetting settings = mp_settings.MPSettings( [ MPSetting('link', int, 1, tab='TabOne'),
lib: wxsettings conversion to py3
py
diff --git a/synphot/spectrum.py b/synphot/spectrum.py index <HASH>..<HASH> 100644 --- a/synphot/spectrum.py +++ b/synphot/spectrum.py @@ -1737,7 +1737,7 @@ class SpectralElement(BaseUnitlessSpectrum): header, wavelengths, throughput = specio.read_spec(filename, **kwargs) return cls(Empirical1D, points=wavelengths, lookup_table=throughput, - meta={'header': header}) + keep_neg=True, meta={'header': header}) @classmethod def from_filter(cls, filtername, **kwargs):
Do not discard neg throughput, to be consistent with ASTROLIB PYSYNPHOT. [skip ci]
py
diff --git a/shinken/modules/graphite_broker.py b/shinken/modules/graphite_broker.py index <HASH>..<HASH> 100644 --- a/shinken/modules/graphite_broker.py +++ b/shinken/modules/graphite_broker.py @@ -88,12 +88,13 @@ class Graphite_broker(BaseModule): continue # Try to get the int/float in it :) - m = re.search("(\d*\.*\d*)", name_value[name]) - if m: - name_value[name] = m.groups(0)[0] - else: - continue -# print "graphite : got in the end :", name, value + for key,value in name_value.items(): + m = re.search("(\d*\.*\d*)", value) + if m: + name_value[key] = m.groups(0)[0] + else: + continue +# print "graphite : got in the end :", name, value for key,value in name_value.items(): res.append((key, value)) return res
Fix : extract int/float part of all metric in perfdata instead of only the first one
py
diff --git a/requests_oauthlib/oauth1_session.py b/requests_oauthlib/oauth1_session.py index <HASH>..<HASH> 100644 --- a/requests_oauthlib/oauth1_session.py +++ b/requests_oauthlib/oauth1_session.py @@ -21,6 +21,13 @@ if sys.version > "3": log = logging.getLogger(__name__) +class TokenRequestDenied(ValueError): + + def __init__(self, message, status_code): + super(TokenRequestDenied, self).__init__(message) + self.status_code = status_code + + class OAuth1Session(requests.Session): """Request signing and convenience methods for the oauth dance. @@ -287,7 +294,7 @@ class OAuth1Session(requests.Session): if r.status_code >= 400: error = "Token request failed with code %s, response was '%s'." - raise ValueError(error % (r.status_code, r.text)) + raise TokenRequestDenied(error % (r.status_code, r.text), r.status_code) log.debug('Decoding token from response "%s"', r.text) try:
Introduce TokenRequestDenied exception for <I>x responses on token fetch. Fix #<I>.
py
diff --git a/bika/lims/browser/worksheet.py b/bika/lims/browser/worksheet.py index <HASH>..<HASH> 100644 --- a/bika/lims/browser/worksheet.py +++ b/bika/lims/browser/worksheet.py @@ -83,7 +83,7 @@ class WorksheetWorkflowAction(WorkflowAction): Retested = form.has_key('retested') and \ form['retested'].has_key(uid), Unit = unit and unit or '', - Remarks = form['Remarks'][0][uid]) + Remarks = form['Remarks'][0].get(uid, '')) # discover which items may be submitted submissable = []
Don't crash if analysis remarks are missing in worksheet views
py
diff --git a/lib/python/voltdbclient.py b/lib/python/voltdbclient.py index <HASH>..<HASH> 100644 --- a/lib/python/voltdbclient.py +++ b/lib/python/voltdbclient.py @@ -1020,13 +1020,14 @@ class VoltProcedure: original_timeout = self.fser.socket.gettimeout() self.fser.socket.settimeout(timeout) try: - res = VoltResponse(self.fser) - except socket.timeout: - res = VoltResponse(None) - res.statusString = "timeout: procedure call took longer than %d seconds" % timeout - except IOError, err: - res = VoltResponse(None) - res.statusString = str(err) + try: + res = VoltResponse(self.fser) + except socket.timeout: + res = VoltResponse(None) + res.statusString = "timeout: procedure call took longer than %d seconds" % timeout + except IOError, err: + res = VoltResponse(None) + res.statusString = str(err) finally: self.fser.socket.settimeout(original_timeout) return response and res or None
ENG-<I>. Python <I> compatible try/except/finally construct.
py
diff --git a/utwist/_utwist.py b/utwist/_utwist.py index <HASH>..<HASH> 100644 --- a/utwist/_utwist.py +++ b/utwist/_utwist.py @@ -145,22 +145,8 @@ def _twisted_test_sync(callee, call_args, call_kwargs, timeout=120): # the value with the string representation provided by `failure`. failure = retval.failure - if failure.type == TypeError: - raise failure.type, failure.getTraceback(), None - elif failure.type == DirtyReactorAggregateError: - # I really don't understand this yet. failure.getTraceback() returns - # a string, but somehow a "\n".join(..) is done on it, leading to - # one charater per line. Does only seem to happen with this specific - # failure type. - raise failure.type, [failure.getTraceback()], None - else: - try: - # Sometimes this fails with a TypeError. Probably has problems - # creating the exception instance. - raise failure.type, failure.getTraceback(), None - except TypeError: - failure.raiseException() - + failure.printTraceback(file=sys.stderr) + failure.raiseException() else: return retval
Improved (or at least changed) failure handling.
py
diff --git a/neurom/io/tests/test_h5_reader.py b/neurom/io/tests/test_h5_reader.py index <HASH>..<HASH> 100644 --- a/neurom/io/tests/test_h5_reader.py +++ b/neurom/io/tests/test_h5_reader.py @@ -90,14 +90,14 @@ def test_read_h5v2_raw_basic(): def test_read_h5v2_raw_basic(): - data, offset, fmt = readers.H5.read( + data, offset, fmt = hdf5.H5.read( os.path.join(H5V2_PATH, 'Neuron_unraveled.h5')) nt.ok_(fmt == 'H5V2') nt.ok_(offset == 0) nt.assert_equal(len(data), 847) nt.assert_equal(np.shape(data), (847, 7)) - data, offset, fmt = readers.H5.read( + data, offset, fmt = hdf5.H5.read( os.path.join(H5V2_PATH, 'Neuron_unraveled.h5'), remove_duplicates=False) nt.ok_(fmt == 'H5V2') nt.ok_(offset == 0)
Lazy loading: merge unraveled h5 reading.
py
diff --git a/molo/core/models.py b/molo/core/models.py index <HASH>..<HASH> 100644 --- a/molo/core/models.py +++ b/molo/core/models.py @@ -157,6 +157,8 @@ ArticlePage.content_panels = [ ] ArticlePage._meta.get_field('first_published_at').editable = True +ArticlePage._meta.get_field('first_published_at').blank = True +ArticlePage._meta.get_field('first_published_at').null = True ArticlePage._meta.get_field('first_published_at').help_text = _( "Please add a date-time in the form YYYY-MM-DD hh:mm.")
make first_published_at nullable
py
diff --git a/build.py b/build.py index <HASH>..<HASH> 100755 --- a/build.py +++ b/build.py @@ -832,7 +832,6 @@ def prepareLocalEntityJar(): shutil.copyfile(os.path.join(buildRoot, linkRelExtensions), os.path.join(filesDir, "link-rel-extensions")) shutil.copyfile(os.path.join(buildRoot, aRelExtensions), os.path.join(filesDir, "a-rel-extensions")) shutil.copyfile(os.path.join(buildRoot, presetsFile), os.path.join(filesDir, "presets")) - shutil.copyfile(os.path.join(buildRoot, presetsFile), os.path.join(filesDir, "presets")) shutil.copyfile(os.path.join(buildRoot, aboutFile), os.path.join(filesDir, "about.html")) shutil.copyfile(os.path.join(buildRoot, stylesheetFile), os.path.join(filesDir, "style.css")) shutil.copyfile(os.path.join(buildRoot, scriptFile), os.path.join(filesDir, "script.js"))
Minor change to remove accidental duplication.
py
diff --git a/doc/conf.py b/doc/conf.py index <HASH>..<HASH> 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -35,6 +35,15 @@ try: except ImportError: print("no ipython") +# This is a horrible hack that I'm pretty sure is incorrect, but somehow it +# works around (what I think is) a bug in IPython.sphinxext.ipython_directive. +# Without this the tutorial images don't show up -- maybe it tries to write +# them to _build/html/_static, but the _static dir doesn't exist yet? And +# anyway they end up getting delivered in _images/, not _static/? So with this +# they get written into _build/html/, and then copied to +# _build/html/_images/. Sure, why not. +ipython_savefig_dir = "." + # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here.
[rtd] Attempt to fix images in tutorial page
py
diff --git a/src/bio2bel/sources/tfregulons.py b/src/bio2bel/sources/tfregulons.py index <HASH>..<HASH> 100644 --- a/src/bio2bel/sources/tfregulons.py +++ b/src/bio2bel/sources/tfregulons.py @@ -118,13 +118,6 @@ def _add_rows(df: pd.DataFrame, graph: BELGraph) -> None: evidence = 'From TFregulons' for citation in citations: - graph.add_binds( - tf_protein, - target_gene, - citation=citation, - evidence=evidence, - ) - if effect == 1: binds_dna_adder, affects_expression_adder = graph.add_directly_increases, graph.add_increases else:
Update tfregulons output The fact that the proten binds the gene is already taken care of with the second statement
py
diff --git a/passpie/utils.py b/passpie/utils.py index <HASH>..<HASH> 100644 --- a/passpie/utils.py +++ b/passpie/utils.py @@ -1,6 +1,5 @@ from argparse import Namespace from contextlib import contextmanager -from functools import wraps from pkg_resources import get_distribution, DistributionNotFound from random import SystemRandom import errno
Refactor clean imports from passpie/utils
py
diff --git a/hendrix/contrib/async/messaging.py b/hendrix/contrib/async/messaging.py index <HASH>..<HASH> 100644 --- a/hendrix/contrib/async/messaging.py +++ b/hendrix/contrib/async/messaging.py @@ -26,12 +26,12 @@ class RecipientManager(object): """ self.transports[transport.uid] = transport - def send(self, string): # usually a json string... + def send(self, message): # usually a json string... """ sends whatever it is to each transport """ for transport in self.transports.values(): - transport.write(string) + transport.protocol.sendMessage(message) def remove(self, transport): """
It's not a string. It's a message.
py
diff --git a/visidata/main.py b/visidata/main.py index <HASH>..<HASH> 100755 --- a/visidata/main.py +++ b/visidata/main.py @@ -204,7 +204,7 @@ def main_vd(): opts['filetype'] = current_args['filetype'] vs = vd.openSource(p, **opts) - for k, v in current_args.items(): # apply final set of args to sheets specifically if not already set #573 + for k, v in current_args.items(): # apply final set of args to sheets specifically on cli, if not set otherwise #573 if not vs.options.is_set(k): vs.options[k] = v
[dev nfc] improve comment clarity
py
diff --git a/gtts/tests/test_tts.py b/gtts/tests/test_tts.py index <HASH>..<HASH> 100644 --- a/gtts/tests/test_tts.py +++ b/gtts/tests/test_tts.py @@ -26,12 +26,10 @@ class TestLanguages(unittest.TestCase): tts.save(path_slow) # Check if files created is > 2k - # Check if slow speed file is larger than normal speed file filesize = os.path.getsize(path) filesize_slow = os.path.getsize(path_slow) self.assertTrue(filesize > 2000) self.assertTrue(filesize_slow > 2000) - self.assertTrue(filesize_slow > filesize) # Cleanup os.remove(path)
Removed test assert slow > normal file, not guaranteed
py
diff --git a/openquake/baselib/parallel.py b/openquake/baselib/parallel.py index <HASH>..<HASH> 100644 --- a/openquake/baselib/parallel.py +++ b/openquake/baselib/parallel.py @@ -772,8 +772,7 @@ class Starmap(object): self.sent = AccumDict(accum=AccumDict()) # fname -> argname -> nbytes self.monitor.inject = (self.argnames[-1].startswith('mon') or self.argnames[-1].endswith('mon')) - self.receiver = 'tcp://%s:%s' % ( - config.dbserver.listen, config.dbserver.receiver_ports) + self.receiver = 'tcp://0.0.0.0:%s' % config.dbserver.receiver_ports self.monitor.backurl = None # overridden later self.tasks = [] # populated by .submit self.task_no = 0
Fixed an IP issue in the receiver address
py
diff --git a/falafel/core/archives.py b/falafel/core/archives.py index <HASH>..<HASH> 100644 --- a/falafel/core/archives.py +++ b/falafel/core/archives.py @@ -130,7 +130,7 @@ class TarExtractor(Extractor): self._assert_type(path, False) tar_flag = self.TAR_FLAGS.get(self.content_type) self.tmp_dir = tempfile.mkdtemp(dir=extract_dir) - command = "tar %s -x -f %s -C %s" % (tar_flag, path, self.tmp_dir) + command = "tar %s -x --exclude=*/dev/null -f %s -C %s" % (tar_flag, path, self.tmp_dir) logging.info("Extracting files in '%s'", self.tmp_dir) subproc.call(command, timeout=self.timeout)
Fix failure when unprivileged user execution of archive containing /dev/null (#<I>) See discussion in <URL>
py
diff --git a/salt/states/libcloud_dns.py b/salt/states/libcloud_dns.py index <HASH>..<HASH> 100644 --- a/salt/states/libcloud_dns.py +++ b/salt/states/libcloud_dns.py @@ -48,10 +48,6 @@ Example: from __future__ import absolute_import # Import salt libs -import salt.modules.libcloud_dns as libcloud_dns_module -import salt.utils - -# Import salt libs import salt.utils import logging log = logging.getLogger(__name__)
Remove unused/duplicate imports leftover from merge-conflict resolution
py
diff --git a/tests/nosetests/test_misaligned/test_misaligned_area_volume.py b/tests/nosetests/test_misaligned/test_misaligned_area_volume.py index <HASH>..<HASH> 100644 --- a/tests/nosetests/test_misaligned/test_misaligned_area_volume.py +++ b/tests/nosetests/test_misaligned/test_misaligned_area_volume.py @@ -14,11 +14,11 @@ def test_area_volume(): d = 1 Omega0 = 10 theta = 0.9 - area0 = 1.5542549932711638e-01 + area0 = 0.1554255703973858 volume0 = 5.7617852701434746e-03 av = ph.roche_misaligned_area_volume(q, F, d, theta, Omega0, choice, larea=True, lvolume=True) - + assert(m.fabs(av["larea"] - area0) < 1e-8*area0) assert(m.fabs(av["lvolume"] - volume0) < 1e-8*volume0)
Updating the misaligned area test.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ from distutils.core import setup from setuptools import find_packages -setup(name='btcpy', +setup(name='chainside-btcpy', version='0.1', packages=find_packages(), install_requires=['ecdsa', 'base58', 'python-bitcoinlib==0.7.0'], @@ -22,4 +22,4 @@ setup(name='btcpy', author_email='[email protected]', url='https://github.com/chainside/btcpy', download_url='https://github.com/chainside/btcpy/archive/0.1.tar.gz', - keywords=['bitcoin', 'blockchain', 'bitcoind']) + keywords=['bitcoin', 'blockchain', 'bitcoind', 'chainside'])
change package name for pypi
py
diff --git a/src/pyrocore/error.py b/src/pyrocore/error.py index <HASH>..<HASH> 100644 --- a/src/pyrocore/error.py +++ b/src/pyrocore/error.py @@ -48,6 +48,11 @@ class EngineError(LoggableError): """ +class NetworkError(LoggableError): + """ External connection errors. + """ + + class UserError(LoggableError): """ Yes, it was your fault! """
added error.NetworkError
py
diff --git a/retrying.py b/retrying.py index <HASH>..<HASH> 100644 --- a/retrying.py +++ b/retrying.py @@ -12,29 +12,6 @@ ## See the License for the specific language governing permissions and ## limitations under the License. -## --- The following is for portions of the "six" module ---------------------- -## -## Copyright (c) 2010-2014 Benjamin Peterson -## -## Permission is hereby granted, free of charge, to any person obtaining a copy -## of this software and associated documentation files (the "Software"), to deal -## in the Software without restriction, including without limitation the rights -## to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -## copies of the Software, and to permit persons to whom the Software is -## furnished to do so, subject to the following conditions: -## -## The above copyright notice and this permission notice shall be included in all -## copies or substantial portions of the Software. -## -## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -## AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -## OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -## SOFTWARE. -## ---------------------------------------------------------------------------- - import random import six import sys
remove six license boilerplate since we're not embedding it anymore
py
diff --git a/source/test/common/test_run_to_selected_state.py b/source/test/common/test_run_to_selected_state.py index <HASH>..<HASH> 100644 --- a/source/test/common/test_run_to_selected_state.py +++ b/source/test/common/test_run_to_selected_state.py @@ -16,9 +16,9 @@ def test_run_to_selected_state(caplog): rafcon.statemachine.singleton.state_machine_manager.delete_all_state_machines() testing_utils.test_multithrading_lock.acquire() - sm = storage.load_state_machine_from_path("/home_local/stoertebeker/develop/rafcon_tests/run_selected_state") + sm = storage.load_state_machine_from_path(testing_utils.get_test_sm_path("unit_test_state_machines/" + "run_to_selected_state_test")) # select state machine for this purpose - rafcon.statemachine.singleton.state_machine_manager.add_state_machine(sm) rafcon.statemachine.singleton.state_machine_execution_engine.run_to_selected_state("VVBPOY/AOZXRY", sm.state_machine_id) # run the statemachine to the state before AOYXRY, this is an asychronous task
fixed path fixed the wrong path for the test state machine which is located in test_script/unit_test...
py
diff --git a/openquake/engine/logs.py b/openquake/engine/logs.py index <HASH>..<HASH> 100644 --- a/openquake/engine/logs.py +++ b/openquake/engine/logs.py @@ -181,12 +181,12 @@ def handle(job, log_level='info', log_file=None): try: yield finally: + # sanity check to make sure that the logging on file is working + if log_file and os.path.getsize(log_file) == 0: + logging.root.warn('The log file %s is empty!?' % log_file) for handler in handlers: logging.root.removeHandler(handler) - # sanity check to make sure that the logging on file is working - if log_file: - assert os.path.getsize(log_file) > 0 - + class tracing(object): """
Declassed an assert to a warning
py
diff --git a/gnupg.py b/gnupg.py index <HASH>..<HASH> 100644 --- a/gnupg.py +++ b/gnupg.py @@ -1215,7 +1215,9 @@ class GPG(object): [cmd.append(opt) for opt in iter(_sanitise_list(self.options))] if args: [cmd.append(arg) for arg in iter(_sanitise_list(args))] - logger.debug("make_args(): Using command: %s" % cmd) + ## so that we don't print it twice, here and in _open_subprocess(): + if not self.verbose: + logger.debug("make_args(): Using command: %s" % cmd) return cmd def _open_subprocess(self, args=None, passphrase=False):
Remove duplicate log statement of the GnuPG command we're running.
py
diff --git a/tests/integration_test.py b/tests/integration_test.py index <HASH>..<HASH> 100644 --- a/tests/integration_test.py +++ b/tests/integration_test.py @@ -79,7 +79,7 @@ def setup_module(): c.inspect_image(BUSYBOX) except NotFound: os.write(2, "\npulling busybox\n".encode('utf-8')) - for data in c.pull('busybox', stream=True): + for data in c.pull(BUSYBOX, stream=True): data = json.loads(data.decode('utf-8')) os.write(2, ("%c[2K\r" % 27).encode('utf-8')) status = data.get("status")
Pull correct version of busybox in integration tests
py
diff --git a/pyang/statements.py b/pyang/statements.py index <HASH>..<HASH> 100644 --- a/pyang/statements.py +++ b/pyang/statements.py @@ -340,7 +340,7 @@ _deviate_delete_singleton_keywords = { _valid_deviations = { 'type':['leaf', 'leaf-list'], 'units':['leaf', 'leaf-list'], - 'default':['leaf', 'choice'], + 'default':['leaf', 'leaf-list', 'choice'], 'config':['leaf', 'choice', 'container', 'list', 'leaf-list'], 'mandatory':['leaf', 'choice'], 'min-elements':['leaf-list', 'list'],
Bugfix #<I>: deviate add should be able to add default stmt to leaf-list node
py
diff --git a/asv/commands/run.py b/asv/commands/run.py index <HASH>..<HASH> 100644 --- a/asv/commands/run.py +++ b/asv/commands/run.py @@ -26,7 +26,6 @@ class Run(object): "run", help="Run a benchmark suite", description="Run a benchmark suite.") - # TODO: Range of branches parser.add_argument( "--range", "-r", default="master^!", help="""Range of commits to benchmark. This is passed as @@ -40,8 +39,8 @@ class Run(object): reasonable number.""") parser.add_argument( "--bench", "-b", type=str, nargs="*", - help="""Regular expression for benchmark to run. When - none are provided, all benchmarks are run.""") + help="""Regular expression(s) for benchmark to run. When + not provided, all benchmarks are run.""") parser.set_defaults(func=cls.run_from_args) @@ -85,8 +84,8 @@ class Run(object): steps = len(commit_hashes) * len(benchmarks) * len(environments) console.message( - "Running {0} total benchmarks ({1} commits * {2} benchmarks * {3} environments)".format( - steps, len(commit_hashes), len(benchmarks), len(environments)), "green") + "Running {0} total benchmarks ({1} commits * {2} environments * {3} benchmarks)".format( + steps, len(commit_hashes), len(environments), len(benchmarks)), "green") console.set_nitems(steps) for env in environments:
Improve "asv run"'s output
py
diff --git a/test/unit/test_service_models.py b/test/unit/test_service_models.py index <HASH>..<HASH> 100644 --- a/test/unit/test_service_models.py +++ b/test/unit/test_service_models.py @@ -87,13 +87,6 @@ class HostListTestMixin(UrlTesterTestMixin): def _get_expected_items_for_urls(self, urls): return self._get_expected_items(get_hosts(urls)) - - def _test_function_does_not_handle_invalid_host_error(self, function, arg): - self._test_function_does_not_handle(InvalidHostError, - self.host_factory_mock, - function, - arg - ) def _get_result_for_invalid_host(self, function): unsupported_host = 'unsupported.com' @@ -407,7 +400,11 @@ class HostCollectionTest( def test_add_does_not_handle_value_error(self): function = self.tested_instance.add - self._test_function_does_not_handle_invalid_host_error(function, 'invalidhost.com') + self._test_function_does_not_handle(InvalidHostError, + self.host_factory_mock, + function, + 'invalidhost.com' + ) @parameterized.expand(HostListTestMixin.valid_host_input) def test_add_for_valid(self, _, value):
Remove an unnecessary function The function _test_function_does_not_handle_invalid_host_error ended up being called in just one place. Now it is being removed and its body is being moved to the function that uses it.
py
diff --git a/holoviews/element/raster.py b/holoviews/element/raster.py index <HASH>..<HASH> 100644 --- a/holoviews/element/raster.py +++ b/holoviews/element/raster.py @@ -481,6 +481,25 @@ class Image(Dataset, Element2D, SheetCoordinateSystem): ydensity=self.ydensity, **kwargs) + def sample(self, samples=[], **kwargs): + """ + Allows sampling of Dataset as an iterator of coordinates + matching the key dimensions, returning a new object containing + just the selected samples. Alternatively may supply kwargs + to sample a co-ordinate on an object. + """ + if kwargs and samples: + raise Exception('Supply explicit list of samples or kwargs, not both.') + if len(kwargs) == 1 and self.ndims == 2 and self.interface.gridded: + dim, val = list(kwargs.items())[0] + kdims = [d for d in self.kdims if d != dim] + return Curve(self.select(**kwargs).columns(), + kdims=kdims, vdims=self.vdims) + elif kwargs: + return self.clone(self.select(**kwargs).columns(), new_type=Dataset) + return self.clone(self.interface.sample(self, samples), new_type=Dataset) + + def _coord2matrix(self, coord): return self.sheet2matrixidx(*coord)
Reimplemented Image sample using interface methods
py
diff --git a/raiden/utils/profiling/greenlets.py b/raiden/utils/profiling/greenlets.py index <HASH>..<HASH> 100644 --- a/raiden/utils/profiling/greenlets.py +++ b/raiden/utils/profiling/greenlets.py @@ -1,6 +1,6 @@ import json import sys -import time +from datetime import datetime from typing import Any import greenlet @@ -38,7 +38,7 @@ def install_switch_log(): "origin": str(origin), "target": str(target), "target_callstack": callstack, - "time": time.time(), + "time": datetime.utcnow().isoformat(), } ) )
Use datetime in switch tracing This makes it easier to find the corresponding entries in other logs.
py
diff --git a/pythonforandroid/build.py b/pythonforandroid/build.py index <HASH>..<HASH> 100644 --- a/pythonforandroid/build.py +++ b/pythonforandroid/build.py @@ -381,7 +381,7 @@ class Context(object): self.ndk_platform = join( self.ndk_dir, 'platforms', - 'android-{}'.format(self.android_api), + 'android-{}'.format(self.ndk_api), platform_dir) if not exists(self.ndk_platform): warning('ndk_platform doesn\'t exist: {}'.format(
Corrected NDK platform finding to use ndk_api instead of android_api
py
diff --git a/Lib/fontbakery/codetesting.py b/Lib/fontbakery/codetesting.py index <HASH>..<HASH> 100644 --- a/Lib/fontbakery/codetesting.py +++ b/Lib/fontbakery/codetesting.py @@ -219,7 +219,7 @@ def assert_results_contain(check_results, f"(Bare string: {msg!r})") if status == expected_status and ( - expected_msgcode is None + (isinstance(msg, str) and msg == expected_msgcode) or (isinstance(msg, Message) and msg.code == expected_msgcode) ): if isinstance(msg, Message):
Fix condition 1. `expected_msgcode` can never be None because of the exception upstream: "Test must provide the expected message code" 2. Adds support for PASS and DEBUG statuses, which are allowed to use bare strings for their messages
py
diff --git a/spyder/widgets/calltip.py b/spyder/widgets/calltip.py index <HASH>..<HASH> 100644 --- a/spyder/widgets/calltip.py +++ b/spyder/widgets/calltip.py @@ -157,6 +157,10 @@ class CallTipWidget(QLabel): def show_tip(self, point, tip, wrapped_tiplines): """ Attempts to show the specified tip at the current cursor location. """ + # Don't attempt to show it if it's already visible + if self.isVisible(): + return True + # Attempt to find the cursor position at which to show the call tip. text_edit = self._text_edit cursor = text_edit.textCursor()
Calltip: Don't show it if it's already visible
py
diff --git a/dosagelib/plugins/s.py b/dosagelib/plugins/s.py index <HASH>..<HASH> 100644 --- a/dosagelib/plugins/s.py +++ b/dosagelib/plugins/s.py @@ -109,6 +109,14 @@ class SinFest(_BasicScraper): help = 'Index format: n (unpadded)' +class SkinDeep(_BasicScraper): + url = 'http://www.skindeepcomic.com/' + stripUrl = url + 'archive/%s/' + imageSearch = compile(r'<span class="webcomic-object[^>]*><img src="([^"]*)"') + prevSearch = compile(tagre("a", "href", r'([^"]+)', after="previous-webcomic-link")) + help = 'Index format: custom' + + class SlightlyDamned(_BasicScraper): url = 'http://www.sdamned.com/' stripUrl = url + '%s/'
Add SkinDeep. Filenames for this are all over the place :(
py
diff --git a/kdcount/sphere.py b/kdcount/sphere.py index <HASH>..<HASH> 100644 --- a/kdcount/sphere.py +++ b/kdcount/sphere.py @@ -101,7 +101,7 @@ def bootstrap(nside, rand, nbar, *data): r0 = numpy.concatenate((r0, r), axis=-1) else: heapq.heappush(heap, (a, j, r, d)) - heapq.heappush(heap, (a0, j, r0, d0)) + heapq.heappush(heap, (a0, j0, r0, d0)) for i in range(len(heap)): area, j, r, d = heapq.heappop(heap)
break a degeneracy by using correct j/j0.
py
diff --git a/ykman/cli/piv.py b/ykman/cli/piv.py index <HASH>..<HASH> 100644 --- a/ykman/cli/piv.py +++ b/ykman/cli/piv.py @@ -793,7 +793,11 @@ def _verify_pin(ctx, controller, pin, no_prompt=False): try: controller.verify(pin, touch_callback=prompt_for_touch) - except APDUError: + except WrongPin as e: + ctx.fail('PIN verification failed, {} tries left.'.format(e.tries_left)) + except AuthenticationBlocked as e: + ctx.fail('PIN is blocked.') + except Exception: ctx.fail('PIN verification failed.')
Catch WrongPin and AuthenticationBlocked in cli.piv._verify_pin
py
diff --git a/grimoire_elk/enriched/stackexchange.py b/grimoire_elk/enriched/stackexchange.py index <HASH>..<HASH> 100644 --- a/grimoire_elk/enriched/stackexchange.py +++ b/grimoire_elk/enriched/stackexchange.py @@ -234,7 +234,7 @@ class StackExchangeEnrich(Enrich): rich_answer = self.get_rich_item(answer, kind='answer', question_tags=item['data']['tags']) - if rich_answer['answer_tags']: + if 'answer_tags' in rich_answer: answers_tags.extend(rich_answer['answer_tags']) items_to_enrich.append(rich_answer)
[stackexchange] Handle missing `answer_tags` This code handles enriched items not having `answer_tags` field. Thus, it prevents the error KeyError: 'answer_tags'.
py
diff --git a/spatialist/vector.py b/spatialist/vector.py index <HASH>..<HASH> 100644 --- a/spatialist/vector.py +++ b/spatialist/vector.py @@ -578,11 +578,6 @@ class Vector(object): """ return self.layer.GetSpatialRef() - # todo Should return the wkt of the object, not of the projection - @property - def wkt(self): - return self.srs.ExportToWkt() - def write(self, outfile, format='ESRI Shapefile', overwrite=True): """ write the Vector object to a file
[Vector] removed property method wkt
py
diff --git a/lightcli.py b/lightcli.py index <HASH>..<HASH> 100644 --- a/lightcli.py +++ b/lightcli.py @@ -23,7 +23,7 @@ # lightcli # A lightweight terminal interaction library for Python. -__version__ = '0.2-alpha' +__version__ = '0.3-alpha'
Update version: <I>-alpha
py
diff --git a/modelx/core/space.py b/modelx/core/space.py index <HASH>..<HASH> 100644 --- a/modelx/core/space.py +++ b/modelx/core/space.py @@ -612,7 +612,7 @@ class SpaceImpl(SpaceContainerImpl): self._spaces._repr = '<' + self.name + '.spaces>' self._builtin_names = LazyEvalDict( - data={ + data={'__builtins__': __builtins__, 'get_self': self.get_self_interface}) self._names = LazyEvalChainMap([self._builtin_names,
FIX: added __builtins__ in SpaceImpl._builtin_names
py
diff --git a/polyaxon_client/workers/queue_worker.py b/polyaxon_client/workers/queue_worker.py index <HASH>..<HASH> 100644 --- a/polyaxon_client/workers/queue_worker.py +++ b/polyaxon_client/workers/queue_worker.py @@ -12,7 +12,7 @@ from polyaxon_client.workers.base_worker import BaseWorker class QueueWorker(BaseWorker): - MIN_TIMEOUT = 0.1 + MIN_TIMEOUT = 1 TIMEOUT_ATTEMPTS = 5 QUEUE_SIZE = -1 # inf END_EVENT = object() @@ -55,10 +55,10 @@ class QueueWorker(BaseWorker): # Queue still has message, try another time size = self._queue.qsize() - print('Polyaxon %s is attempting to send %i pending messages' % - (self.NAME, size)) - print('Waiting up to {} seconds'.format(self._timeout)) if not settings.IN_CLUSTER: + print('Polyaxon %s is attempting to send %i pending messages' % + (self.NAME, size)) + print('Waiting up to {} seconds'.format(self._timeout)) if os.name == 'nt': print('Press Ctrl-Break to quit') else:
Update min timeout to allow transport to execute tasks in the queue
py
diff --git a/holoviews/core/util.py b/holoviews/core/util.py index <HASH>..<HASH> 100644 --- a/holoviews/core/util.py +++ b/holoviews/core/util.py @@ -649,7 +649,10 @@ def sort_topologically(graph): def is_cyclic(graph): - """Return True if the directed graph g has a cycle.""" + """ + Return True if the directed graph g has a cycle. The directed graph + should be represented as adictionary mapping of edges for each node. + """ path = set() def visit(vertex): @@ -664,9 +667,11 @@ def is_cyclic(graph): def one_to_one(graph, nodes): - """Return True if graph contains only one to one mappings. - Pass a graph as a dictionary mapping of edges for each node and - a list of all nodes.""" + """ + Return True if graph contains only one to one mappings. The + directed graph should be represented as a dictionary mapping of + edges for each node. Nodes should be passed a simple list. + """ edges = itertools.chain.from_iterable(graph.values()) return len(graph) == len(nodes) and len(set(edges)) == len(nodes)
Added docstrings for graph utility functions
py