diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/trakt/interfaces/auth.py b/trakt/interfaces/auth.py index <HASH>..<HASH> 100644 --- a/trakt/interfaces/auth.py +++ b/trakt/interfaces/auth.py @@ -4,13 +4,13 @@ from trakt.interfaces.base import Interface class AuthInterface(Interface): path = 'auth' - def login(self, login, password): + def login(self, login, password, **kwargs): response = self.http.post('login', data={ 'login': login, 'password': password }) - data = self.get_data(response) + data = self.get_data(response, **kwargs) if not data: return None
Added **kwargs to [/auth] login()
py
diff --git a/flask_user/forms.py b/flask_user/forms.py index <HASH>..<HASH> 100644 --- a/flask_user/forms.py +++ b/flask_user/forms.py @@ -210,7 +210,7 @@ class LoginForm(Form): user, user_email = user_manager.find_user_by_email(self.email.data) # Handle successful authentication - if user and user_manager.verify_password(self.password.data, user): + if user and user.password and user_manager.verify_password(self.password.data, user): return True # Successful authentication # Handle unsuccessful authentication
When integrating with other authentication mechanisms (i.e. SSO), some users may not have a populated password field. Check for user.password before attempting to verify_password.
py
diff --git a/safe_qgis/function_options_dialog.py b/safe_qgis/function_options_dialog.py index <HASH>..<HASH> 100644 --- a/safe_qgis/function_options_dialog.py +++ b/safe_qgis/function_options_dialog.py @@ -101,10 +101,6 @@ class FunctionOptionsDialog(QtGui.QDialog, QtGui.QFormLayout.FieldRole, myLineEdit) - #FIXME (MB) temporary fix through hiding for issue 365 - if theParameterKey == 'postprocessors': - myLineEdit.hide() - myLabel.hide() self.formItemCounters += 1 def setDialogInfo(self, theFunctionID):
reshowing postprocessors configuration. deals with #<I> and closes #<I>
py
diff --git a/gwpy/tests/test_timeseries.py b/gwpy/tests/test_timeseries.py index <HASH>..<HASH> 100644 --- a/gwpy/tests/test_timeseries.py +++ b/gwpy/tests/test_timeseries.py @@ -144,6 +144,14 @@ class TimeSeriesTestMixin(object): pass else: nptest.assert_array_almost_equal(ts.value, comp.value) + # test observatory + ts2 = self.TEST_CLASS.find(FIND_CHANNEL, FIND_GPS, FIND_GPS+1, + frametype=FIND_FRAMETYPE, + observatory=FIND_CHANNEL[0]) + self.assertArraysEqual(ts, ts2) + self.assertRaises(RuntimeError, self.TEST_CLASS.find, FIND_CHANNEL, + FIND_GPS, FIND_GPS+1, frametype=FIND_FRAMETYPE, + observatory='X') def test_find_best_frametype(self): try:
tests: added test of TimeSeries.find with observatory
py
diff --git a/editor.py b/editor.py index <HASH>..<HASH> 100755 --- a/editor.py +++ b/editor.py @@ -73,7 +73,7 @@ def get_tty_filename(): return '/dev/tty' -def edit(filename=None, contents=None, use_tty=None, suffix=None): +def edit(filename=None, contents=None, use_tty=None, suffix=''): editor = get_editor() args = [editor] + get_editor_args(os.path.basename(os.path.realpath(editor)))
Fix suffix default (should be '', not None)
py
diff --git a/c7n/resources/route53.py b/c7n/resources/route53.py index <HASH>..<HASH> 100644 --- a/c7n/resources/route53.py +++ b/c7n/resources/route53.py @@ -59,15 +59,15 @@ def _describe_route53_tags( k = k.split("/")[-1] resource_map[k] = r - results = retry( - client.list_tags_for_resources, - ResourceType=model.type, - ResourceIds=list(resource_map.keys())) - - for resource_tag_set in results['ResourceTagSets']: - if ('ResourceId' in resource_tag_set and - resource_tag_set['ResourceId'] in resource_map): - resource_map[resource_tag_set['ResourceId']]['Tags'] = resource_tag_set['Tags'] + for resource_batch in chunks(list(resource_map.keys()), 10): + results = retry( + client.list_tags_for_resources, + ResourceType=model.type, + ResourceIds=resource_batch) + for resource_tag_set in results['ResourceTagSets']: + if ('ResourceId' in resource_tag_set and + resource_tag_set['ResourceId'] in resource_map): + resource_map[resource_tag_set['ResourceId']]['Tags'] = resource_tag_set['Tags'] with executor_factory(max_workers=2) as w: return list(w.map(process_tags, chunks(resources, 20)))
aws.route<I> - tag augment workaround api limitation of <I> hosted zones at a time (#<I>)
py
diff --git a/performance/compile.py b/performance/compile.py index <HASH>..<HASH> 100644 --- a/performance/compile.py +++ b/performance/compile.py @@ -427,9 +427,6 @@ class BenchmarkRevision(Application): self.safe_makedirs(self.conf.json_directory) self.safe_makedirs(self.conf.uploaded_json_dir) - # FIXME: remove this, only kept to check that the code doesn't rely on current working directory anymore! - os.chdir('/') - self.compile_install() self.run_benchmark() if self.conf.upload: @@ -439,8 +436,6 @@ class BenchmarkRevision(Application): dt = datetime.timedelta(seconds=dt) self.logger.error("Benchmark completed in %s" % dt) - return self.filename - class Configuration: pass @@ -451,7 +446,7 @@ def parse_config(filename, command): parse_compile_all = False if command == 'compile_all': parse_compile = True - parse_compile_all = False + parse_compile_all = True elif command == 'compile': parse_compile = True else: @@ -592,7 +587,6 @@ class BenchmarkAll(Application): def main(self): self.safe_makedirs(self.conf.directory) - self.update_repository() try: for revision, branch in self.conf.revisions:
Fix compile_all Remove also chdir('/'), now useless.
py
diff --git a/salt/modules/timezone.py b/salt/modules/timezone.py index <HASH>..<HASH> 100644 --- a/salt/modules/timezone.py +++ b/salt/modules/timezone.py @@ -80,7 +80,7 @@ def set_zone(timezone): if not os.path.exists(zonepath): return 'Zone does not exist: {0}'.format(zonepath) - if os.path.exists('/etc/locatime'): + if os.path.exists('/etc/localtime'): os.unlink('/etc/localtime') os.symlink(zonepath, '/etc/localtime')
fixed typo s/locatime/localtime/
py
diff --git a/test/functional/test_warehouse.py b/test/functional/test_warehouse.py index <HASH>..<HASH> 100644 --- a/test/functional/test_warehouse.py +++ b/test/functional/test_warehouse.py @@ -371,7 +371,12 @@ class BundleWarehouse(TestBase): def test_bundle_warehouse_query(self): l = self.library() - b = l.bundle('build.example.com-casters') + b = self.import_single_bundle('build.example.com/casters') + b.ingest() + b.source_schema() + b.schema() + b.build() + wh = b.warehouse('test') wh.clean() @@ -379,7 +384,6 @@ class BundleWarehouse(TestBase): self.assertEqual(20, sum(1 for row in wh.query('SELECT * FROM p00casters004003;'))) self.assertEqual(6000, sum(1 for row in wh.query('SELECT * FROM p00casters006003;'))) - self.assertEqual(4000, sum(1 for row in wh.query('SELECT * FROM pERJQxWUVb005001;'))) p = l.partition('p00casters004003')
Warehouse broken test fixed. #<I>.
py
diff --git a/sendgrid/transport/smtp.py b/sendgrid/transport/smtp.py index <HASH>..<HASH> 100644 --- a/sendgrid/transport/smtp.py +++ b/sendgrid/transport/smtp.py @@ -131,12 +131,12 @@ class Smtp(object): """ Create a text based MIME part from the given text """ - return MIMEText(payload, subtype, 'utf-8') + return MIMEText(payload.encode('utf-8'), subtype, 'utf-8') def _encodeEmail(self, name, e): - if name and not self._isAscii(name): - return utils.formataddr((base64mime.header_encode(name, 'utf-8'), e)) - return utils.formataddr((name, e)) + encoded_name = str(Header(unicode(name), 'ISO-8859-1')) + encoded_e = e.encode('ascii') + return utils.formataddr((encoded_name, encoded_e)) def _encodeHeader(self, header): """
Fixed UTF-8 encoding crash Fixed UTF-8 encoding support in email body, email address and names of users
py
diff --git a/tests/commands/test_commit_command.py b/tests/commands/test_commit_command.py index <HASH>..<HASH> 100644 --- a/tests/commands/test_commit_command.py +++ b/tests/commands/test_commit_command.py @@ -129,3 +129,14 @@ def test_commit_when_customized_expected_raised(config, mocker, capsys): # Assert only the content in the formatted text assert "This is the root custom err" in str(excinfo.value) + + [email protected]("staging_is_clean") +def test_commit_when_non_customized_expected_raised(config, mocker, capsys): + _err = ValueError() + prompt_mock = mocker.patch("questionary.prompt") + prompt_mock.side_effect = _err + + with pytest.raises(ValueError): + commit_cmd = commands.Commit(config, {}) + commit_cmd()
test(commands/commit): add test case for raising non customized exception
py
diff --git a/tests/test_hmm_likelihood.py b/tests/test_hmm_likelihood.py index <HASH>..<HASH> 100644 --- a/tests/test_hmm_likelihood.py +++ b/tests/test_hmm_likelihood.py @@ -96,7 +96,7 @@ def like_hand_test_2(): trans_matrix=np.eye(2), init_distn=np.array([0.,1.]), data=np.zeros(10,dtype=int), - target_val=np.log(0.)) + target_val=-np.inf) @attr('hmm','likelihood','messages','basic') def like_hand_test_3():
replace an np.log(0) with an -np.inf
py
diff --git a/temperusb/cli.py b/temperusb/cli.py index <HASH>..<HASH> 100644 --- a/temperusb/cli.py +++ b/temperusb/cli.py @@ -76,3 +76,6 @@ def main(): portinfo, reading['temperature_c'], reading['temperature_f'])) + +if __name__ == '__main__': + main()
Convenience execution of main() when cli.py is started from the command line
py
diff --git a/apython/stream.py b/apython/stream.py index <HASH>..<HASH> 100644 --- a/apython/stream.py +++ b/apython/stream.py @@ -40,12 +40,17 @@ class NonFileStreamReader: loop = asyncio.get_event_loop() self.loop = loop self.stream = stream + self.eof = False + + def at_eof(self): + return self.eof @asyncio.coroutine def readline(self): data = yield from self.loop.run_in_executor(None, self.stream.readline) if isinstance(data, str): data = data.encode() + self.eof = not data return data @asyncio.coroutine @@ -53,6 +58,7 @@ class NonFileStreamReader: data = yield from self.loop.run_in_executor(None, self.stream.read, n) if isinstance(data, str): data = data.encode() + self.eof = not data return data
Add at_eof() for NonFileStreamReader
py
diff --git a/ceph_deploy/install.py b/ceph_deploy/install.py index <HASH>..<HASH> 100644 --- a/ceph_deploy/install.py +++ b/ceph_deploy/install.py @@ -54,6 +54,7 @@ def detect_components(args, distro): 'install_rgw': 'ceph-radosgw', 'install_mds': 'ceph-mds', 'install_mon': 'ceph-mon', + 'install_common': 'ceph-common', } if distro.is_rpm: @@ -506,6 +507,13 @@ def make(parser): ) version.add_argument( + '--cli', '--common', + dest='install_common', + action='store_true', + help='install the common component only', + ) + + version.add_argument( '--all', dest='install_all', action='store_true',
RM-<I>: Add install of ceph-common only Add options --cli and --common, both of which allow a user to install only the ceph-common package on a node.
py
diff --git a/PyFunceble.py b/PyFunceble.py index <HASH>..<HASH> 100755 --- a/PyFunceble.py +++ b/PyFunceble.py @@ -1802,6 +1802,7 @@ class Referer(object): 'ad', 'al', 'ao', + 'arpa', 'az', 'ba', 'bb', @@ -2661,7 +2662,7 @@ if __name__ == '__main__': '-v', '--version', action='version', - version='%(prog)s 0.22.0-beta' + version='%(prog)s 0.22.1-beta' ) ARGS = PARSER.parse_args()
Introduction of `arpa` into the list of ignored extensions cf: No whois server.
py
diff --git a/aiomysql/utils.py b/aiomysql/utils.py index <HASH>..<HASH> 100644 --- a/aiomysql/utils.py +++ b/aiomysql/utils.py @@ -71,6 +71,7 @@ class _ContextManager(base): @asyncio.coroutine def __aexit__(self, exc_type, exc, tb): yield from self._obj.close() + self._obj = None class _ConnectionContextManager(_ContextManager): @@ -82,6 +83,7 @@ class _ConnectionContextManager(_ContextManager): self._obj.close() else: yield from self._obj.ensure_closed() + self._obj = None class _PoolContextManager(_ContextManager): @@ -91,6 +93,7 @@ class _PoolContextManager(_ContextManager): def __aexit__(self, exc_type, exc, tb): self._obj.close() yield from self._obj.wait_closed() + self._obj = None class _PoolConnectionContextManager:
drop references to objects (connection, cursor) in __aexit__
py
diff --git a/conferences/rules.py b/conferences/rules.py index <HASH>..<HASH> 100644 --- a/conferences/rules.py +++ b/conferences/rules.py @@ -90,20 +90,6 @@ def short_description(self, key, value): } [email protected]('keywords', '^6531.') -def keywords(self, key, value): - def get_value(value): - return { - 'value': value.get('a'), - 'source': value.get('9') - } - value = force_list(value) - keywords = self.get('keywords', []) - for val in value: - keywords.append(get_value(val)) - return keywords - - @conferences.over('series', '^411..') def series(self, key, value): def _get_name(value):
dojson: remove keywords from conferences * Strictly speaking this needs to happen when we bump the schemas to version ~<I>, but we might as well do it now to fix a few errors on Sentry.
py
diff --git a/salt/grains/core.py b/salt/grains/core.py index <HASH>..<HASH> 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py @@ -932,6 +932,7 @@ _OS_NAME_MAP = { 'scientific': 'ScientificLinux', 'synology': 'Synology', 'manjaro': 'Manjaro', + 'sles': 'SUSE', } # Map the 'os' grain to the 'os_family' grain
fix the os grain in sle<I>sp4 to be SUSE instead of SLES due to the existence of /etc/os-release on SLE<I>SP4, the os grain between SLE<I>SP3 and SLE<I>SP4 was different (SLES vs SUSE). Thus, it was falsely trying to use systemd on SLE<I>SP4 to start services
py
diff --git a/Xlib/ext/randr.py b/Xlib/ext/randr.py index <HASH>..<HASH> 100644 --- a/Xlib/ext/randr.py +++ b/Xlib/ext/randr.py @@ -444,12 +444,11 @@ class GetOutputInfo(rq.ReplyRequest): rq.Card8('subpixel_order'), rq.LengthOf('crtcs', 2), rq.LengthOf('modes', 2), - rq.LengthOf('preferred', 2), + rq.Card16('num_preferred'), rq.LengthOf('clones', 2), rq.LengthOf('name', 2), rq.List('crtcs', rq.Card32Obj), rq.List('modes', rq.Card32Obj), - rq.List('preferred', rq.Card32Obj), rq.List('clones', rq.Card32Obj), rq.String8('name'), )
randr: Fix GetOutputInfo reply definition by replacing invalid field "preferred" and the associated length field with a single field, "num_preferred".
py
diff --git a/dipper/graph/RDFGraph.py b/dipper/graph/RDFGraph.py index <HASH>..<HASH> 100644 --- a/dipper/graph/RDFGraph.py +++ b/dipper/graph/RDFGraph.py @@ -42,7 +42,8 @@ class RDFGraph(ConjunctiveGraph, DipperGraph): (self._getNode(subject_id), self._getNode(predicate_id), Literal(obj))) else: - logger.warn("Null value passed as object") + logger.warn("None as object for subj: %s and pred: %s", + subject_id, predicate_id) else: self.add( (self._getNode(subject_id), self._getNode(predicate_id),
log which subject and predicate when an object is None
py
diff --git a/gwpy/plot/tests/test_axes.py b/gwpy/plot/tests/test_axes.py index <HASH>..<HASH> 100644 --- a/gwpy/plot/tests/test_axes.py +++ b/gwpy/plot/tests/test_axes.py @@ -23,7 +23,7 @@ import pytest import numpy -from matplotlib import rcParams +from matplotlib import (rcParams, __version__ as mpl_version) from matplotlib.collections import PolyCollection from matplotlib.lines import Line2D @@ -101,6 +101,8 @@ class TestAxes(AxesTestBase): 11, endpoint=True), ) + @pytest.mark.xfail(mpl_version < '1.4.0', + reason='bugs in matplotlib-1.4.0') def test_tile(self, ax): x = numpy.arange(10) y = numpy.arange(x.size)
gwpy.plot.tests: mark xfails for old matplotlib
py
diff --git a/salt/states/cmd.py b/salt/states/cmd.py index <HASH>..<HASH> 100644 --- a/salt/states/cmd.py +++ b/salt/states/cmd.py @@ -325,6 +325,7 @@ def mod_run_check(cmd_kwargs, onlyif, unless, creates): # to quote problems cmd_kwargs = copy.deepcopy(cmd_kwargs) cmd_kwargs['use_vt'] = False + cmd_kwargs['bg'] = False if onlyif is not None: if isinstance(onlyif, string_types):
never run bg for onlyif or unless cmd states
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,6 @@ install_requires = [ "parsedatetime", "cached-property", "click", - "enum34", # backported versions from Python3 "pathlib", "configparser", "zope.interface",
enum<I> is not needed anymore It is part of the standard library and we are <I>+ only.
py
diff --git a/test/unit/Utilities/IOTest.py b/test/unit/Utilities/IOTest.py index <HASH>..<HASH> 100644 --- a/test/unit/Utilities/IOTest.py +++ b/test/unit/Utilities/IOTest.py @@ -132,9 +132,9 @@ class IOTest: def test_load_imorph(self): path = os.path.join(FIXTURE_DIR, 'iMorph-Sandstone') - path += '/' - net = io.iMorph.load(node_file=path + 'throats_cellsThroatsGraph_Nodes.txt', - graph_file=path + 'throats_cellsThroatsGraph.txt') + node_file = os.path.join(path, 'throats_cellsThroatsGraph_Nodes.txt') + graph_file = os.path.join(path, 'throats_cellsThroatsGraph.txt') + net = io.iMorph.load(node_file=node_file,graph_file=graph_file) assert net.Np == 1518 assert net.Nt == 2424 assert sp.shape(net['pore.coords']) == (1518, 3)
minor fix to path joining for sub files
py
diff --git a/waterboy/api/info.py b/waterboy/api/info.py index <HASH>..<HASH> 100644 --- a/waterboy/api/info.py +++ b/waterboy/api/info.py @@ -199,7 +199,7 @@ class BatchInfo(abc.MutableMapping): @property def aggregate_batch_number(self): - return self.batch_number + self.epoch_info.batches_per_epoch * self.epoch_info.global_epoch_idx + return self.batch_number + self.epoch_info.batches_per_epoch * (self.epoch_info.global_epoch_idx - 1) @property def epoch_number(self):
Making aggregate batch number 0-based.
py
diff --git a/billy/bin/update.py b/billy/bin/update.py index <HASH>..<HASH> 100755 --- a/billy/bin/update.py +++ b/billy/bin/update.py @@ -64,6 +64,9 @@ def _run_scraper(scraper_type, options, metadata): _clear_scraped_data(options.output_dir, 'events') scraper = _get_configured_scraper(scraper_type, options, metadata) + ua_email = os.environ.get('BILLY_UA_EMAIL') + if ua_email: + scraper.user_agent += ' ({})'.format(ua_email) if not scraper: return [{ "type": scraper_type,
set BILLY_UA_EMAIL to include email address in user-agent, #<I>
py
diff --git a/raiden/tests/utils/blockchain.py b/raiden/tests/utils/blockchain.py index <HASH>..<HASH> 100644 --- a/raiden/tests/utils/blockchain.py +++ b/raiden/tests/utils/blockchain.py @@ -10,7 +10,7 @@ import termios import time import gevent -from eth_utils import denoms +from eth_utils import denoms, to_checksum_address import structlog from requests import ConnectionError @@ -206,16 +206,16 @@ def geth_wait_and_check(deploy_client, privatekeys, random_marker): raise ValueError('geth didnt start the jsonrpc interface') for key in sorted(set(privatekeys)): - address = address_encoder(privatekey_to_address(key)) + address = to_checksum_address(privatekey_to_address(key)) tries = 10 - balance = '0x0' - while balance == '0x0' and tries > 0: - balance = deploy_client.rpccall_with_retry('eth_getBalance', address, 'latest') + balance = 0 + while balance == 0 and tries > 0: + balance = deploy_client.web3.eth.getBalance(address, 'latest') gevent.sleep(1) tries -= 1 - if balance == '0x0': + if balance == 0: raise ValueError('account is with a balance of 0')
Fix a call to eth_getBalance
py
diff --git a/test_autofit/mapper/promise/test_promise.py b/test_autofit/mapper/promise/test_promise.py index <HASH>..<HASH> 100644 --- a/test_autofit/mapper/promise/test_promise.py +++ b/test_autofit/mapper/promise/test_promise.py @@ -31,6 +31,26 @@ def make_last_instance(): return af.last.instance.one.redshift +class TestHasAttr: + def test_model(self, phase): + model = phase.result.model + assert hasattr(model, "one") + assert not hasattr(model, "gone") + + galaxy = model.one + assert hasattr(galaxy, "light") + assert not hasattr(galaxy, "nada") + + def test_instance(self, phase): + model = phase.result.instance + assert hasattr(model, "one") + assert not hasattr(model, "gone") + + galaxy = model.one + assert hasattr(galaxy, "light") + assert not hasattr(galaxy, "nada") + + class TestLastPromises: def test_indexed_hyper(self, collection): result = af.last[0].hyper_result.model.populate(collection)
pushed test illustrating that hasattr works as expected
py
diff --git a/flask_security/forms.py b/flask_security/forms.py index <HASH>..<HASH> 100644 --- a/flask_security/forms.py +++ b/flask_security/forms.py @@ -92,7 +92,10 @@ class RegisterFormMixin(): submit = SubmitField("Register") def to_dict(form): - fields = inspect.getmembers(form, lambda member: isinstance(member, Field)) + def is_field_and_user_attr(member): + return isinstance(member, Field) and hasattr(_datastore.user_model, member.name) + + fields = inspect.getmembers(form, is_field_and_user_attr) return dict((key, value.data) for key, value in fields)
Fix to RegisterForm.to_dict. Only add fields that are also attributes on the datastorage.user_model.
py
diff --git a/python/ray/rllib/es/es.py b/python/ray/rllib/es/es.py index <HASH>..<HASH> 100644 --- a/python/ray/rllib/es/es.py +++ b/python/ray/rllib/es/es.py @@ -124,13 +124,13 @@ class Worker(object): [np.sign(rewards_pos).sum(), np.sign(rewards_neg).sum()]) lengths.append([lengths_pos, lengths_neg]) - return Result( - noise_indices=noise_indices, - noisy_returns=returns, - sign_noisy_returns=sign_returns, - noisy_lengths=lengths, - eval_returns=eval_returns, - eval_lengths=eval_lengths) + return Result( + noise_indices=noise_indices, + noisy_returns=returns, + sign_noisy_returns=sign_returns, + noisy_lengths=lengths, + eval_returns=eval_returns, + eval_lengths=eval_lengths) class ESAgent(Agent):
fix indentation for ES (#<I>)
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -94,7 +94,7 @@ setup( # https://packaging.python.org/en/latest/requirements.html install_requires=[ 'ruamel.yaml>=0.13.4,<0.14.0', - 'sympy>=0.7.7', + 'sympy>=1.1.1', 'pycachesim>=0.1.5', 'pylru', 'numpy',
updated required sympy version to <I> to resolve index error
py
diff --git a/GPy/models/GPLVM.py b/GPy/models/GPLVM.py index <HASH>..<HASH> 100644 --- a/GPy/models/GPLVM.py +++ b/GPy/models/GPLVM.py @@ -28,7 +28,7 @@ class GPLVM(GP): if X is None: X = self.initialise_latent(init, Q, Y) if kernel is None: - kernel = kern.rbf(Q) + kern.bias(Q) + kernel = kern.rbf(Q, ARD=Q>1) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2)) likelihood = Gaussian(Y, normalize=normalize_Y) GP.__init__(self, X, likelihood, kernel, **kwargs)
modified: GPy/models/GPLVM.py Using the following kernel by default: kernel = kern.rbf(Q, ARD=Q>1) + kern.bias(Q, np.exp(-2)) + kern.white(Q, np.exp(-2))
py
diff --git a/pysat/_instrument.py b/pysat/_instrument.py index <HASH>..<HASH> 100644 --- a/pysat/_instrument.py +++ b/pysat/_instrument.py @@ -3201,7 +3201,7 @@ class Instrument(object): del kwargs['freq'] else: freq = 'D' - + # Make sure directories are there, otherwise create them try: os.makedirs(self.files.data_path)
MAINT: removed extra whitespace Removed extra whitespace.
py
diff --git a/skyfield/units.py b/skyfield/units.py index <HASH>..<HASH> 100644 --- a/skyfield/units.py +++ b/skyfield/units.py @@ -15,8 +15,6 @@ def _to_array(value): else: return value -# Distance and velocity. - class UnpackingError(Exception): """You cannot iterate directly over a Skyfield measurement object.""" @@ -24,8 +22,7 @@ class Distance(object): """A distance, stored internally as au and available in other units. You can initialize a ``Distance`` by providing a single float or a - float array as either an ``au=`` parameter or a ``km=`` parameter - when building a ``Distance`` object. + float array as either an ``au=`` parameter or a ``km=`` parameter. """ _warned = False @@ -78,8 +75,8 @@ class Distance(object): class Velocity(object): """A velocity, stored internally as au/day and available in other units. - You can initialize a ``Velocity`` by providing a single float or a - float array as either an ``au_per_d=`` parameter. + You can initialize a ``Velocity`` by providing a float or float + array to its ``au_per_d=`` parameter. """ _warned = False
Tweak a few units docstrings
py
diff --git a/dock/core.py b/dock/core.py index <HASH>..<HASH> 100644 --- a/dock/core.py +++ b/dock/core.py @@ -467,8 +467,11 @@ class DockerTasker(LastLogger): logger.info("does image exists?") logger.debug("image_id = '%s'", image_id) try: - response = self.d.inspect_image(image_id) is not None - except APIError: + response = self.d.inspect_image(image_id) + except APIError as ex: + logger.warning(repr(ex)) response = False + else: + response = response is not None logger.debug("image exists: %s", response) return response
core, image exists: log possible errors
py
diff --git a/EventRegistry/QueryArticles.py b/EventRegistry/QueryArticles.py index <HASH>..<HASH> 100644 --- a/EventRegistry/QueryArticles.py +++ b/EventRegistry/QueryArticles.py @@ -1,4 +1,6 @@ -from eventregistry.Base import * + +import six +from eventregistry.Base import * from eventregistry.ReturnInfo import * @@ -210,7 +212,7 @@ class QueryArticles(Query): -class QueryArticlesIter(QueryArticles): +class QueryArticlesIter(QueryArticles, six.Iterator): """ class that simplifies and combines functionality from QueryArticles and RequestArticlesInfo. It provides an iterator over the list of articles that match the specified conditions @@ -295,7 +297,7 @@ class QueryArticlesIter(QueryArticles): return self - def next(self): + def __next__(self): """iterate over the available articles""" if len(self._articleList) == 0: self._getNextArticleBatch()
Add Python 3 support for QueryArticlesIter
py
diff --git a/moto/sns/models.py b/moto/sns/models.py index <HASH>..<HASH> 100644 --- a/moto/sns/models.py +++ b/moto/sns/models.py @@ -580,7 +580,12 @@ class SNSBackend(BaseBackend): return subscription.attributes def set_subscription_attributes(self, arn, name, value): - if name not in ["RawMessageDelivery", "DeliveryPolicy", "FilterPolicy"]: + if name not in [ + "RawMessageDelivery", + "DeliveryPolicy", + "FilterPolicy", + "RedrivePolicy", + ]: raise SNSInvalidParameter("AttributeName") # TODO: should do validation
Bugfix: RedrivePolicy Issue SNS (#<I>) * Bugfix: S3 time precision issue fixed * Bugfix: S3 time precision issue fixed * s3 timeformat fix * Quickfix S3 timefix * Bugfix: Redrive Policy Allow * Linting Fixed
py
diff --git a/optlang/interface.py b/optlang/interface.py index <HASH>..<HASH> 100644 --- a/optlang/interface.py +++ b/optlang/interface.py @@ -203,7 +203,7 @@ class Variable(sympy.Symbol): else: if (primal <= self.lb) and ((self.lb - primal) <= tolerance): return self.lb - elif (primal >= self.ub) and ((self.ub - primal) >= tolerance): + elif (primal >= self.ub) and ((self.ub - primal) >= -tolerance): return self.ub else: raise AssertionError('The primal value %s returned by the solver is out of bounds for variable %s (lb=%s, ub=%s)' % (primal, self.name, self.lb, self.ub))
Fixed an issue with primal rounding ... again.
py
diff --git a/pygmsh/common/geometry.py b/pygmsh/common/geometry.py index <HASH>..<HASH> 100644 --- a/pygmsh/common/geometry.py +++ b/pygmsh/common/geometry.py @@ -41,12 +41,7 @@ class CommonGeometry: def __enter__(self): - if self.init_argv is None: - init_argv = [] - else: - init_argv = self.init_argv - - gmsh.initialize(init_argv) + gmsh.initialize([] if self.init_argv is None else self.init_argv) gmsh.model.add("pygmsh model") return self
onelined argument passing to gmsh.init
py
diff --git a/few/few.py b/few/few.py index <HASH>..<HASH> 100644 --- a/few/few.py +++ b/few/few.py @@ -260,7 +260,8 @@ class FEW(BaseEstimator): # print("survivors:",stacks_2_eqns(survivors)) pop.individuals[:] = survivors pop.X = np.vstack((pop.X, X_offspring))[survivor_index,:] - assert pop.X.shape[0] == self.population_size + if pop.X.shape[0] != self.population_size: + pdb.set_trace() # print("new pop.X:",pop.X[:,:4]) # pdb.set_trace() # pop.X = pop.X[survivor_index,:]
debugging occasional error about size of X
py
diff --git a/cassandra/cluster.py b/cassandra/cluster.py index <HASH>..<HASH> 100644 --- a/cassandra/cluster.py +++ b/cassandra/cluster.py @@ -1476,6 +1476,8 @@ class ControlConnection(object): try: if self._connection: self._refresh_schema(self._connection, keyspace, table) + except ReferenceError: + pass # our weak reference to the Cluster is no good except Exception: log.debug("[control connection] Error refreshing schema", exc_info=True) self._signal_error() @@ -1522,6 +1524,8 @@ class ControlConnection(object): try: if self._connection: self._refresh_node_list_and_token_map(self._connection) + except ReferenceError: + pass # our weak reference to the Cluster is no good except Exception: log.debug("[control connection] Error refreshing node list and token map", exc_info=True) self._signal_error()
Ignore reference errors in the control conn These frequently happen when the cluster is being shutdown and the control connection is refreshing the schema or ring topology
py
diff --git a/holoviews/element/tabular.py b/holoviews/element/tabular.py index <HASH>..<HASH> 100644 --- a/holoviews/element/tabular.py +++ b/holoviews/element/tabular.py @@ -132,7 +132,7 @@ class ItemTable(Element): else k): [v] for k, v in self.data.items()}) - def table(self): + def table(self, datatype=None): return Table(OrderedDict([((), self.values())]), kdims=[], vdims=self.vdims)
Fixed incorrect signature of table method on ItemTable
py
diff --git a/troposphere/emr.py b/troposphere/emr.py index <HASH>..<HASH> 100644 --- a/troposphere/emr.py +++ b/troposphere/emr.py @@ -475,7 +475,7 @@ class Studio(AWSObject): "ServiceRole": (str, True), "SubnetIds": ([str], True), "Tags": (Tags, False), - "UserRole": (str, True), + "UserRole": (str, False), "VpcId": (str, True), "WorkspaceSecurityGroupId": (str, True), }
making user role optional for emr studio
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,11 @@ setup( version='1.0.1', author='Saul Shanabrook', author_email='[email protected]', - packages=['simpleimages', 'simpleimages.management.commands'], + packages=[ + 'simpleimages', + 'simpleimages.management', + 'simpleimages.management.commands', + ], url='https://www.github.com/saulshanabrook/django-simpleimages', license=open('LICENSE.txt').read(), description='Opinionated Django image transforms on models',
Added intermediate management command directory to setup.py ala <URL>
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ setup( url="http://github.com/tobami/littlechef", download_url="http://github.com/tobami/littlechef/tags", keywords=["chef", "devops", "operations", "sysadmin"], - install_requires=['fabric>=1.5.4', 'argparse', 'jinja2'], + install_requires=['fabric>=1.5.4', 'argparse', 'jinja2>=2.7.3'], packages=['littlechef'], package_data={ 'littlechef': ['solo.rb', 'environment.rb']
Add version constrain to jinja2.
py
diff --git a/stanza/utils/datasets/ner/prepare_ner_dataset.py b/stanza/utils/datasets/ner/prepare_ner_dataset.py index <HASH>..<HASH> 100644 --- a/stanza/utils/datasets/ner/prepare_ner_dataset.py +++ b/stanza/utils/datasets/ner/prepare_ner_dataset.py @@ -18,13 +18,14 @@ IJCNLP 2008 produced a few Indian language NER datasets. download: http://ltrc.iiit.ac.in/ner-ssea-08/index.cgi?topic=5 The models produced from these datasets have extremely low recall, unfortunately. - - prepare_ner_dataset.py hi-fire2013 + - prepare_ner_dataset.py hi_ijc FIRE 2013 also produced NER datasets for Indian languages. http://au-kbc.org/nlp/NER-FIRE2013/index.html The datasets are password locked. For Stanford users, contact Chris Manning for license details. For external users, please contact the organizers for more information. + - prepare_ner_dataset.py hi-fire2013 Ukranian NER is provided by lang-uk, available here: https://github.com/lang-uk/ner-uk
Fix command line for hindi datasets
py
diff --git a/monolithe/generators/vspk/cli.py b/monolithe/generators/vspk/cli.py index <HASH>..<HASH> 100755 --- a/monolithe/generators/vspk/cli.py +++ b/monolithe/generators/vspk/cli.py @@ -53,8 +53,8 @@ def main(argv=sys.argv): args = parser.parse_args() - if not args.vsdurl and "VSD_API_URL" in os.environ: args.vsdurl = os.environ["VSD_API_URL"] - if not args.apiversions and "VSD_API_VERSION" in os.environ: args.apiversions = [os.environ["VSD_API_VERSION"]] + if not args.vsdurl and not args.swagger_paths and "VSD_API_URL" in os.environ: args.vsdurl = os.environ["VSD_API_URL"] + if not args.apiversions and not args.swagger_paths and "VSD_API_VERSION" in os.environ: args.apiversions = [os.environ["VSD_API_VERSION"]] from monolithe.generators import VSDKGenerator, VSPKGenerator, VSPKDocumentationGenerator
Fixed using environment variable when forcing swagger path usage
py
diff --git a/monitoring/noxfile.py b/monitoring/noxfile.py index <HASH>..<HASH> 100644 --- a/monitoring/noxfile.py +++ b/monitoring/noxfile.py @@ -122,7 +122,8 @@ def system(session): session.install("-e", ".") # Additional setup for VPCSC system tests - if os.environ.get("GOOGLE_CLOUD_TESTS_IN_VPCSC") != "true": + in_vpc = os.environ.get("GOOGLE_CLOUD_TESTS_IN_VPCSC", "false") + if in_vpc.lower() != "true": # Unset PROJECT_ID, since VPCSC system tests expect this to be a project # within the VPCSC perimeter. env = {
fix(monitoring): make VPCSC env comparison case-insensitive (#<I>)
py
diff --git a/odl/set/domain.py b/odl/set/domain.py index <HASH>..<HASH> 100644 --- a/odl/set/domain.py +++ b/odl/set/domain.py @@ -259,8 +259,12 @@ class IntervalProd(Set): maxs = np.fromiter((np.max(vec) for vec in vecs), dtype=float) return np.all(mins >= self.begin) and np.all(maxs <= self.end) elif is_valid_input_array(other, self.ndim): - mins = np.min(other, axis=1) - maxs = np.max(other, axis=1) + if self.ndim == 1: + mins = np.min(other) + maxs = np.max(other) + else: + mins = np.min(other, axis=1) + maxs = np.max(other, axis=1) return np.all(mins >= self.begin) and np.all(maxs <= self.end) else: return False
ENH: make contains_all work for 1d arrays
py
diff --git a/cme/cmedb.py b/cme/cmedb.py index <HASH>..<HASH> 100644 --- a/cme/cmedb.py +++ b/cme/cmedb.py @@ -157,10 +157,9 @@ class CMEDatabaseNavigator(cmd.Cmd): password = cred[5] cred_type = cred[6] - if port == '445/tcp' and proto == '(smb)': - if cred_type == 'Password': - self.db.add_credential('plaintext', '', username, password) - + if proto == '(smb)' and cred_type == 'Password': + self.db.add_credential('plaintext', '', username, password) + except IndexError: continue
Fixed if statement in msf credential import code
py
diff --git a/pysat/utils/files.py b/pysat/utils/files.py index <HASH>..<HASH> 100644 --- a/pysat/utils/files.py +++ b/pysat/utils/files.py @@ -430,11 +430,6 @@ def construct_searchstring_from_format(format_str, wildcard=False): else: raise ValueError("Couldn't determine formatting width") - # Last block could potentially end upon a variable that needs to be parsed, - # rather than a string. Check for this condition. - if snip[1] is not None: - out_dict['string_blocks'].append('') - return out_dict
ENH: Removed code not needed
py
diff --git a/soco.py b/soco.py index <HASH>..<HASH> 100644 --- a/soco.py +++ b/soco.py @@ -146,7 +146,8 @@ class SoCo(object): """ Adds a given track to the queue. Returns: - True if the Sonos speaker successfully added the track + If the Sonos speaker successfully added the track, returns the queue + position of the track added. If an error occurs, we'll attempt to parse the error and return a UPnP error code. If that fails, the raw response sent back from the Sonos @@ -161,7 +162,9 @@ class SoCo(object): if "errorCode" in response: return self.__parse_error(response) else: - return True + dom = XML.fromstring(response) + qnumber = dom.findtext('.//FirstTrackNumberEnqueued') + return int(qnumber) def pause(self): """ Pause the currently playing track. @@ -193,7 +196,7 @@ class SoCo(object): If an error occurs, we'll attempt to parse the error and return a UPnP error code. If that fails, the raw response sent back from the Sonos - speaker will be returned. + epeaker will be returned. """ action = '"urn:schemas-upnp-org:service:AVTransport:1#Stop"'
Had add_to_queue return the queue position of the newly added track
py
diff --git a/ceph_deploy/hosts/suse/install.py b/ceph_deploy/hosts/suse/install.py index <HASH>..<HASH> 100644 --- a/ceph_deploy/hosts/suse/install.py +++ b/ceph_deploy/hosts/suse/install.py @@ -123,6 +123,7 @@ def repo_install(distro, repo_name, baseurl, gpgkey, **kw): enabled = kw.get('enabled', 1) gpgcheck = kw.get('gpgcheck', 1) install_ceph = kw.pop('install_ceph', False) + proxy = kw.get('proxy') _type = 'repo-md' baseurl = baseurl.strip('/') # Remove trailing slashes @@ -136,7 +137,7 @@ def repo_install(distro, repo_name, baseurl, gpgkey, **kw): ] ) - repo_content = templates.custom_repo.format( + repo_content = templates.custom_repo( repo_name=repo_name, name = name, baseurl = baseurl, @@ -144,6 +145,7 @@ def repo_install(distro, repo_name, baseurl, gpgkey, **kw): gpgcheck = gpgcheck, _type = _type, gpgkey = gpgkey, + proxy = proxy, ) distro.conn.remote_module.write_yum_repo(
make suse use the new custom_repo callable
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -32,6 +32,6 @@ setup( long_description=long_description, name='pixelscan', packages=find_packages(exclude=['tests']), - url='https://github.com/dpmcmlxxvi/pixelscan', + url='https://github.com/dpmcmlxxvi/pixelscan/tarball/0.1.0', version='0.1.0', )
Add tarball url to setup
py
diff --git a/openquake/hazard/disagg/core.py b/openquake/hazard/disagg/core.py index <HASH>..<HASH> 100644 --- a/openquake/hazard/disagg/core.py +++ b/openquake/hazard/disagg/core.py @@ -35,6 +35,7 @@ from openquake.job import config as job_cfg from openquake.output import hazard_disagg as hazard_output from openquake.utils import config +from openquake.calculators.base import Calculator from openquake.hazard.disagg import subsets from openquake.hazard.general import ( preload, generate_erf, generate_gmpe_map, set_gmpe_params, @@ -159,7 +160,7 @@ def compute_disagg_matrix_task(job_id, site, realization, poe, result_dir): return compute_disagg_matrix(job_id, site, poe, result_dir) -class DisaggMixin(Mixin): +class DisaggMixin(Calculator): """The Python part of the Disaggregation calculator. This calculator computes disaggregation matrix results in the following manner:
DisaggMixin now uses Calculator for it's base class.
py
diff --git a/safe/impact_functions/generic/classified_polygon_building/metadata_definitions.py b/safe/impact_functions/generic/classified_polygon_building/metadata_definitions.py index <HASH>..<HASH> 100644 --- a/safe/impact_functions/generic/classified_polygon_building/metadata_definitions.py +++ b/safe/impact_functions/generic/classified_polygon_building/metadata_definitions.py @@ -92,7 +92,7 @@ class ClassifiedPolygonBuildingFunctionMetadata(ImpactFunctionMetadata): }, 'parameters': OrderedDict([ # The attribute of hazard zone in hazard layer - ('hazard zone attribute', 'zone') + ('hazard zone attribute', 'KRB') ]) } return dict_meta
Change the default value of the hazard zone attribute to KRB for classified polygon IF on building.
py
diff --git a/modeltranslation/tests/__init__.py b/modeltranslation/tests/__init__.py index <HASH>..<HASH> 100644 --- a/modeltranslation/tests/__init__.py +++ b/modeltranslation/tests/__init__.py @@ -739,11 +739,16 @@ class ForeignKeyFieldsTest(ModeltranslationTestBase): self.assertEqual(inst.optional_en_id, test_inst2.pk) self.assertEqual(inst.optional_en.title, 'title2_en') - # Check filtering in direct way + lookup spanning + # Test caching inst.test_en = test_inst2 inst.save() - manager = self.model.objects + trans_real.activate("de") + self.assertEqual(inst.test, test_inst1) + trans_real.activate("en") + self.assertEqual(inst.test, test_inst2) + # Check filtering in direct way + lookup spanning + manager = self.model.objects trans_real.activate("de") self.assertEqual(manager.filter(test=test_inst1).count(), 1) self.assertEqual(manager.filter(test_en=test_inst1).count(), 0)
Add caching test for relation fields.
py
diff --git a/openquake/hazardlib/source/point.py b/openquake/hazardlib/source/point.py index <HASH>..<HASH> 100644 --- a/openquake/hazardlib/source/point.py +++ b/openquake/hazardlib/source/point.py @@ -403,9 +403,9 @@ class CollapsedPointSource(ParametricSeismicSource): def count_ruptures(self): """ - :returns: the number of underlying point sources * 2 + :returns: the number of underlying point ruptures * 2 """ - return len(self.pointsources) * 2 + return len(self.get_annual_occurrence_rates()) * 2 def get_bounding_box(self, maxdist): """
Fixed weight [skip CI]
py
diff --git a/shap/explainers/tree.py b/shap/explainers/tree.py index <HASH>..<HASH> 100644 --- a/shap/explainers/tree.py +++ b/shap/explainers/tree.py @@ -494,14 +494,14 @@ class TreeExplainer(Explainer): The one reference Shapley value for all features. """ assert have_cext, "C extension was not built during install!" - x_missing = np.zeros(x.shape, dtype=np.bool) + x_missing = np.isnan(x) feats = range(0, self.data.shape[1]) phi_final = [] for tree in self.trees: phi = [] for j in range(self.data.shape[0]): r = self.data[j,:] - r_missing = np.zeros(r.shape, dtype=np.bool) + r_missing = np.isnan(r) out_contribs = np.zeros(x.shape) _cext.tree_shap_indep( tree.max_depth, tree.children_left, tree.children_right,
Modified independent tree shap to use np.nan to check for missing values
py
diff --git a/django_enumfield/tests/models.py b/django_enumfield/tests/models.py index <HASH>..<HASH> 100644 --- a/django_enumfield/tests/models.py +++ b/django_enumfield/tests/models.py @@ -13,7 +13,7 @@ class LampState(Enum): class Lamp(models.Model): - state = EnumField(LampState) + state = EnumField(LampState, verbose_name="stately_state") class PersonStatus(Enum):
Set verbose_name in test model to check for errors
py
diff --git a/room.py b/room.py index <HASH>..<HASH> 100644 --- a/room.py +++ b/room.py @@ -121,3 +121,10 @@ class Room(CampfireEntity): self._load() return result["success"] + + def get_users(self): + self._load() + return self.users + + def get_uploads(self): + return self._connection.get("room/%s/uploads" % self.id, key="uploads")
Implementing get_users and get_uploads for room
py
diff --git a/tests.py b/tests.py index <HASH>..<HASH> 100644 --- a/tests.py +++ b/tests.py @@ -1485,9 +1485,9 @@ class PrefetchTestCase(ModelTestCase): for g in range(2): gc = Category.create(name='g%d' % g) for p in range(2): - pc = Category.create(name='g%d-p%d' % (g, p), parent=g) + pc = Category.create(name='g%d-p%d' % (g, p), parent=gc) for c in range(2): - Category.create(name='g%d-p%d-c%d' % (g, p, c), parent=p) + Category.create(name='g%d-p%d-c%d' % (g, p, c), parent=pc) Children = Category.alias() Grandchildren = Category.alias()
Fxing small bug in the category alias prefetch stuff
py
diff --git a/pyxmpp/stanzapayload.py b/pyxmpp/stanzapayload.py index <HASH>..<HASH> 100644 --- a/pyxmpp/stanzapayload.py +++ b/pyxmpp/stanzapayload.py @@ -32,8 +32,8 @@ class StanzaPayload: """Abstract base class for stanza payload objects.""" __metaclass__ = ABCMeta - def __init__(self, data): - raise NotImplementedError + def __init__(self, element): + pass def as_xml(self): raise NotImplementedError
'Do nothing' constructor for StanzaPayload abc
py
diff --git a/safe_qgis/report/html_renderer.py b/safe_qgis/report/html_renderer.py index <HASH>..<HASH> 100644 --- a/safe_qgis/report/html_renderer.py +++ b/safe_qgis/report/html_renderer.py @@ -245,8 +245,9 @@ class HtmlRenderer(): html += aggregation_table if attribution_table is not None: html += attribution_table.to_html() - html += '<h2>%s</h2>' % self.tr('Detailed Table') - html += full_table + if full_table is not None: + html += '<h2>%s</h2>' % self.tr('Detailed Table') + html += full_table else: if aggregation_table is not None: html = aggregation_table
Fix issue with printing maps for impacts without full_report
py
diff --git a/perceval/_version.py b/perceval/_version.py index <HASH>..<HASH> 100644 --- a/perceval/_version.py +++ b/perceval/_version.py @@ -1,2 +1,2 @@ # Versions compliant with PEP 440 https://www.python.org/dev/peps/pep-0440 -__version__ = "0.11.5" +__version__ = "0.11.6"
Update version number to <I>
py
diff --git a/HARK/ConsumptionSaving/ConsIndShockModel.py b/HARK/ConsumptionSaving/ConsIndShockModel.py index <HASH>..<HASH> 100644 --- a/HARK/ConsumptionSaving/ConsIndShockModel.py +++ b/HARK/ConsumptionSaving/ConsIndShockModel.py @@ -2958,7 +2958,6 @@ class KinkedRconsumerType(IndShockConsumerType): """ time_inv_ = copy(IndShockConsumerType.time_inv_) - time_inv_.remove("Rfree") time_inv_ += ["Rboro", "Rsave"] def __init__(self, **kwds):
remove Rfree from KinkedRconsumerType
py
diff --git a/ec2/spark_ec2.py b/ec2/spark_ec2.py index <HASH>..<HASH> 100755 --- a/ec2/spark_ec2.py +++ b/ec2/spark_ec2.py @@ -65,8 +65,7 @@ def parse_args(): "slaves across multiple (an additional $0.01/Gb for bandwidth" + "between zones applies)") parser.add_option("-a", "--ami", default="latest", - help="Amazon Machine Image ID to use, 'vX.Y.Z' to use version " + - "X.Y.Z of Spark, or 'latest' to use latest AMI (default: latest)") + help="Amazon Machine Image ID to use (default: latest)") parser.add_option("-v", "--spark-version", default="latest", help="Version of Spark to use (X.Y.Z or 'latest' to use most recent)") parser.add_option("-D", metavar="[ADDRESS:]PORT", dest="proxy_port",
Removing now defunct ami documentation
py
diff --git a/hydra_base/lib/units.py b/hydra_base/lib/units.py index <HASH>..<HASH> 100644 --- a/hydra_base/lib/units.py +++ b/hydra_base/lib/units.py @@ -292,6 +292,18 @@ def get_unit_dimension(measure_or_unit_abbreviation,**kwargs): dimension = db.DBSession.query(Dimension).filter(Dimension.id==units[0].dimension_id).one() return str(dimension.name) +def get_dimension_by_unit_id(unit_id,**kwargs): + """ + Return the physical dimension a given unit id refers to. + """ + + try: + dimension = db.DBSession.query(Dimension).join(Unit).filter(Unit.id==unit_id).filter().one() + return get_dimension(dimension.id) + except NoResultFound: + # The dimension does not exist + raise ResourceNotFoundError("Unit %s not found"%(unit_id)) + def get_unit_by_abbreviation(unit_abbreviation, **kwargs): """ @@ -372,7 +384,7 @@ def delete_dimension(dimension_id,**kwargs): db.DBSession.flush() return True except NoResultFound: - raise ResourceNotFoundError("Dimension (dimension_name=%s) does not exist"%(dimension_name)) + raise ResourceNotFoundError("Dimension (dimension_id=%s) does not exist"%(dimension_id)) """
added the way to recover a dimension by the id of one of its units
py
diff --git a/tests/base.py b/tests/base.py index <HASH>..<HASH> 100644 --- a/tests/base.py +++ b/tests/base.py @@ -44,8 +44,8 @@ class TestCaseBackendArchive(unittest.TestCase): def _test_fetch_from_archive(self, **kwargs): """Test whether the method fetch_from_archive works properly""" - items = [items for items in self.backend.fetch(**kwargs)] - items_archived = [item for item in self.backend.fetch_from_archive()] + items = [items for items in self.backend_write_archive.fetch(**kwargs)] + items_archived = [item for item in self.backend_write_archive.fetch_from_archive()] self.assertEqual(len(items), len(items_archived))
[tests] Modify tests when fetching from archive This patch modifies the generic test used to check the fetch_from_archive method. Now two different backend objects are used, thus it ensures that backend and method params are initialized in the same way independently from which method is called (fetch or fetch_from_archive)
py
diff --git a/ipyxact/ipyxact.py b/ipyxact/ipyxact.py index <HASH>..<HASH> 100644 --- a/ipyxact/ipyxact.py +++ b/ipyxact/ipyxact.py @@ -37,7 +37,7 @@ class IpxactInt(int): if not args: return int() - expr = args[0] + expr = args[0].strip() base = 10 if len(expr) > 2 and expr[0:2] == '0x':
Strip leading and trailing spaces from ints
py
diff --git a/pyramid_orb/__init__.py b/pyramid_orb/__init__.py index <HASH>..<HASH> 100644 --- a/pyramid_orb/__init__.py +++ b/pyramid_orb/__init__.py @@ -43,6 +43,12 @@ def includeme(config): # set the max limit when desired utils.DEFAULT_MAX_LIMIT = int(settings.pop('orb.settings.default_max_limit', utils.DEFAULT_MAX_LIMIT)) + # create the orb global settings + for key, value in settings.items(): + if key.startswith('orb.settings'): + sub_key = key.replace('orb.settings.', '') + setattr(orb.system.settings(), sub_key, value) + # create the database conneciton db_type = settings.get('orb.db.type') if db_type: @@ -57,16 +63,9 @@ def includeme(config): except StandardError: pass db.activate() - db.connect() config.registry.db = db - # create the orb global settings - for key, value in settings.items(): - if key.startswith('orb.settings'): - sub_key = key.replace('orb.settings.', '') - setattr(orb.system.settings(), sub_key, value) - # create the API factory api_root = settings.get('orb.api.root') if api_root:
* loading the settings before defining the database
py
diff --git a/code/lamost/li_giants/residuals.py b/code/lamost/li_giants/residuals.py index <HASH>..<HASH> 100644 --- a/code/lamost/li_giants/residuals.py +++ b/code/lamost/li_giants/residuals.py @@ -8,6 +8,8 @@ import matplotlib.gridspec as gridspec from matplotlib.colors import LogNorm plt.rc('text', usetex=True) from matplotlib.ticker import MaxNLocator +import sys +sys.path.insert(0, '/home/annaho/TheCannon') from TheCannon import model from TheCannon import dataset
add TheCannon to sys path
py
diff --git a/pecan/commands/serve.py b/pecan/commands/serve.py index <HASH>..<HASH> 100644 --- a/pecan/commands/serve.py +++ b/pecan/commands/serve.py @@ -1,7 +1,6 @@ """ PasteScript serve command for Pecan. """ -from paste import httpserver from paste.script.serve import ServeCommand as _ServeCommand from base import Command @@ -49,9 +48,14 @@ class ServeCommand(_ServeCommand, Command): _ServeCommand.command(self) def loadserver(self, server_spec, name, relative_to, **kw): - return (lambda app: httpserver.serve( - app, app.config.server.host, app.config.server.port - )) - + return (lambda app: WSGIRefServer(self.config.server.host, self.config.server.port, app)) + def loadapp(self, app_spec, name, relative_to, **kw): - return self.load_app() + return self.load_app(self.config) + + +def WSGIRefServer(host, port, app, **options): + from wsgiref.simple_server import make_server + port = int(port) + srv = make_server(host, port, app, **options) + srv.serve_forever()
a very simple approach for serving http requests
py
diff --git a/tests/sos_tests.py b/tests/sos_tests.py index <HASH>..<HASH> 100644 --- a/tests/sos_tests.py +++ b/tests/sos_tests.py @@ -420,7 +420,10 @@ class BaseSoSReportTest(BaseSoSTest): """ if not self.manifest: self.error("No manifest found, cannot check for %s execution" % plugin) - assert plugin in self.manifest['components']['report']['plugins'].keys(), 'Plugin not recorded in manifest' + if isinstance(plugin, str): + plugin = [plugin] + for plug in plugin: + assert plug in self.manifest['components']['report']['plugins'].keys(), "Plugin '%s' not recorded in manifest" % plug def assertPluginNotIncluded(self, plugin): """Ensure that the specified plugin did NOT run for the sos execution @@ -431,7 +434,10 @@ class BaseSoSReportTest(BaseSoSTest): """ if not self.manifest: self.error("No manifest found, cannot check for %s execution" % plugin) - assert plugin not in self.manifest['components']['report']['plugins'].keys(), 'Plugin is recorded in manifest' + if isinstance(plugin, str): + plugin = [plugin] + for plug in plugin: + assert plug not in self.manifest['components']['report']['plugins'].keys(), "Plugin '%s' is recorded in manifest" % plug def assertOnlyPluginsIncluded(self, plugins): """Ensure that only the specified plugins are in the manifest
[tests] Allow lists for plugin enablement assertions Updates `assertPlugin(Not)Included` to allow for lists as well as single strings.
py
diff --git a/test/test_command_line_interface.py b/test/test_command_line_interface.py index <HASH>..<HASH> 100644 --- a/test/test_command_line_interface.py +++ b/test/test_command_line_interface.py @@ -73,6 +73,7 @@ class ListingCommands(unittest.TestCase): " testuid2"] self.assertListEqual(text, expected) + @mock.patch.dict('os.environ', LANG='en_US.UTF-8') def test_simple_bdays_without_options(self): with mock_stdout() as stdout: khard.main(['birthdays'])
Fix test for international environment One test did fail with a $LANG that is not en_US.UTF-8 so we mock that variable. Fix: #<I>
py
diff --git a/tweepy/api.py b/tweepy/api.py index <HASH>..<HASH> 100644 --- a/tweepy/api.py +++ b/tweepy/api.py @@ -3894,9 +3894,14 @@ class API: It is recommended applications request this endpoint when they are loaded, but no more than once a day. - :rtype: :class:`JSON` object + Returns + ------- + :class:`dict` + JSON - :reference: https://developer.twitter.com/en/docs/twitter-api/v1/developer-utilities/configuration/api-reference/get-help-configuration + References + ---------- + https://developer.twitter.com/en/docs/twitter-api/v1/developer-utilities/configuration/api-reference/get-help-configuration """ return self.request('GET', 'help/configuration', **kwargs)
Update and improve documentation for API.configuration
py
diff --git a/astroid/tests/unittest_brain.py b/astroid/tests/unittest_brain.py index <HASH>..<HASH> 100644 --- a/astroid/tests/unittest_brain.py +++ b/astroid/tests/unittest_brain.py @@ -1429,8 +1429,8 @@ class TestFunctoolsPartial: ''') for node in ast_nodes: inferred = next(node.infer()) - assert isinstance(inferred, astroid.Instance) - assert inferred.qname() == 'functools.partial' + assert isinstance(inferred, (astroid.FunctionDef, astroid.Instance)) + assert inferred.qname() in ('functools.partial', 'functools.partial.newfunc') def test_inferred_partial_function_calls(self): ast_nodes = astroid.extract_node('''
Support versions of functools.partial for older Python versions
py
diff --git a/indy_node/server/domain_req_handler.py b/indy_node/server/domain_req_handler.py index <HASH>..<HASH> 100644 --- a/indy_node/server/domain_req_handler.py +++ b/indy_node/server/domain_req_handler.py @@ -272,7 +272,7 @@ class DomainReqHandler(PHandler): assert revoc_def_type tags = cred_def_id.split(":") - revoc_def = make_state_path_for_revoc_def(tags[0],cred_def_id,revoc_def_type,revoc_def_tag) + revoc_def = make_state_path_for_revoc_def(tags[0], cred_def_id, revoc_def_type, revoc_def_tag) revoc_def_id, _, _, _ = self.lookup(revoc_def, isCommitted=False) if revoc_def is None:
[INDY-<I>] flake8 fixes for domain req handler
py
diff --git a/pyghmi/ipmi/private/session.py b/pyghmi/ipmi/private/session.py index <HASH>..<HASH> 100644 --- a/pyghmi/ipmi/private/session.py +++ b/pyghmi/ipmi/private/session.py @@ -1076,7 +1076,7 @@ class Session(object): This watches for any activity on IPMI handles and handles registered by register_handle_callback. Callers are satisfied in the order that - packets return from nework, not in the order of calling. + packets return from network, not in the order of calling. :param timeout: Maximum time to wait for data to come across. If unspecified, will autodetect based on earliest timeout
Update "nework" to "network" in comments Change-Id: I<I>c<I>b<I>b<I>d7deb0e<I>b<I>e<I>c<I>
py
diff --git a/django_auth_pubtkt/auth_pubtkt.py b/django_auth_pubtkt/auth_pubtkt.py index <HASH>..<HASH> 100644 --- a/django_auth_pubtkt/auth_pubtkt.py +++ b/django_auth_pubtkt/auth_pubtkt.py @@ -16,7 +16,7 @@ # limitations under the License. -from M2Crypto import RSA, DSA +from M2Crypto import RSA, DSA, RSA import hashlib, time, base64 import urllib @@ -27,7 +27,12 @@ class Authpubtkt(object): def __init__(self, filename=None, pub_key=None): if filename: self.filename = file - pub_key = DSA.load_pub_key(filename) + try: + pub_key = DSA.load_pub_key(filename) + except DSA.DSAError: + pass + if pub_key is None: + pub_key = RSA.load_pub_key(filename) if pub_key is None: raise ValueError("Please specify filename or public key")
Adding loading of RSA public keys
py
diff --git a/identify/identify.py b/identify/identify.py index <HASH>..<HASH> 100644 --- a/identify/identify.py +++ b/identify/identify.py @@ -136,7 +136,7 @@ def parse_shebang(bytesio): return () first_line = bytesio.readline() try: - first_line = first_line.decode('US-ASCII') + first_line = first_line.decode('UTF-8') except UnicodeDecodeError: return ()
Use UTF-8 to decode the shebang line
py
diff --git a/pyqode/core/api/code_edit.py b/pyqode/core/api/code_edit.py index <HASH>..<HASH> 100644 --- a/pyqode/core/api/code_edit.py +++ b/pyqode/core/api/code_edit.py @@ -819,9 +819,14 @@ class CodeEdit(QtWidgets.QPlainTextEdit): super().cut() def copy(self): + pos = self.textCursor().position() if not self.textCursor().hasSelection(): TextHelper(self).select_whole_line() super().copy() + TextHelper(self).clear_selection() + tc = self.textCursor() + tc.setPosition(pos) + self.setTextCursor(tc) def resizeEvent(self, e): """
Implement pyQode/pyQode#<I> for cut & copy fix pyQode/pyQode#<I>
py
diff --git a/tests/calculators/hazard/classical/core_test.py b/tests/calculators/hazard/classical/core_test.py index <HASH>..<HASH> 100644 --- a/tests/calculators/hazard/classical/core_test.py +++ b/tests/calculators/hazard/classical/core_test.py @@ -64,8 +64,14 @@ class ClassicalHazardCalculatorTestCase(unittest.TestCase): mocks = [p.start() for p in patches] + # we don't expect the site collection to be loaded yet: + self.assertIsNone(self.calc.hc._site_collection) + self.calc.pre_execute() + # make sure the site_collection is loaded: + self.assertIsNotNone(self.calc.hc._site_collection) + for i, m in enumerate(mocks): self.assertEqual(1, m.call_count) m.stop()
tests/calcs/hazard/classical/core_test: Test that the site collection is cached during pre-execute. Former-commit-id: <I>e8a2f9a8b<I>f<I>f4f1d5a<I>c9fb1
py
diff --git a/LiSE/LiSE/examples/college.py b/LiSE/LiSE/examples/college.py index <HASH>..<HASH> 100644 --- a/LiSE/LiSE/examples/college.py +++ b/LiSE/LiSE/examples/college.py @@ -212,7 +212,7 @@ def install(eng): student.rule(rule) # Apply these previously written rules to each brain cell for rule in (learn, sober_up, catch_up): - student.node.rule(rule) + student.place.rule(rule) if __name__ == "__main__":
Apply student rules to .place, since .node won't take them anymore
py
diff --git a/torment/decorators.py b/torment/decorators.py index <HASH>..<HASH> 100644 --- a/torment/decorators.py +++ b/torment/decorators.py @@ -49,7 +49,7 @@ def log(prefix = ''): name = function.__self__.__class__.__name__ + '.' + function.__name__ elif len(args): members = dict(inspect.getmembers(args[0], predicate = lambda _: inspect.ismethod(_) and _.__name__ == function.__name__)) - logger.debug('members: %s', members) + logger.debug('members.keys(): %s', members.keys()) if len(members): name, my_args = args[0].__class__.__name__ + '.' + function.__name__, args[1:]
only log members' keys After seeing this in action, I'm pleased but the members logging is too verbose. This should clean it up and make it useful in the event it's needed.
py
diff --git a/lib/basepanel.py b/lib/basepanel.py index <HASH>..<HASH> 100644 --- a/lib/basepanel.py +++ b/lib/basepanel.py @@ -225,6 +225,11 @@ class BasePanel(wx.Panel): self.ForwardEvent(event=event.guiEvent) + def toggle_legend(self, evt=None, show=None): + pass + def toggle_grid(self, evt=None, show=None): + pass + def lassoHandler(self, vertices): try: print 'default lasso handler -- override!'
add stubs for toggle_legend/toggle_grid in basepanel
py
diff --git a/lib/svtplay_dl/fetcher/hls.py b/lib/svtplay_dl/fetcher/hls.py index <HASH>..<HASH> 100644 --- a/lib/svtplay_dl/fetcher/hls.py +++ b/lib/svtplay_dl/fetcher/hls.py @@ -47,7 +47,7 @@ def hlsparse(url): streams = {} for i in files: - bitrate = float(i[1]["BANDWIDTH"])/1024 + bitrate = float(i[1]["BANDWIDTH"])/1000 streams[int(bitrate)] = _get_full_url(i[0], url) return streams
HLS: <I> is the new thing
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -270,7 +270,7 @@ class ConfigCommand(Command): update_extend(extension_extra, config) # We don't need macros for these, since they all must exist. else: - errors.append('Could not find', name, 'with pkg-config.') + errors.append('Could not find ' + name + ' with pkg-config.') # Get the config for either swresample OR avresample. for name in 'libswresample', 'libavresample':
Fix small refactoring error in setup.py
py
diff --git a/ddmrp/models/stock_warehouse_orderpoint.py b/ddmrp/models/stock_warehouse_orderpoint.py index <HASH>..<HASH> 100644 --- a/ddmrp/models/stock_warehouse_orderpoint.py +++ b/ddmrp/models/stock_warehouse_orderpoint.py @@ -141,7 +141,7 @@ class StockWarehouseOrderpoint(models.Model): "qty_multiple", "product_uom", "procure_uom_id", "product_uom.rounding") def _compute_procure_recommended_qty(self): - subtract_qty = self._quantity_in_progress() + subtract_qty = self.sudo()._quantity_in_progress() for rec in self: procure_recommended_qty = 0.0 if rec.net_flow_position < rec.top_of_yellow:
[<I>][FIX] ddmrp: call _quantity_in_progress with sudo() to avoid ACL errors as is an internal ddmrp engine function
py
diff --git a/source/rafcon/mvc/state_machine_helper.py b/source/rafcon/mvc/state_machine_helper.py index <HASH>..<HASH> 100644 --- a/source/rafcon/mvc/state_machine_helper.py +++ b/source/rafcon/mvc/state_machine_helper.py @@ -27,7 +27,7 @@ def delete_model(model, raise_exceptions=False): data from the corresponding state machine. :param model: The model to delete - :param bool raise_exceptions: Whether to raise exceptions or only print error logs in case of failures + :param bool raise_exceptions: Whether to raise exceptions or only log errors in case of failures :return: True if successful, False else """ state_m = model.parent
Satisfy print counter: remove print from block comment
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -27,10 +27,6 @@ setup( install_requires=[ 'bd2k-python-lib==1.13.dev14', 'dill==0.2.5'], - tests_require=[ - 'mock==1.0.1', - 'pytest==2.8.3'], - test_suite='toil', extras_require={ 'mesos': [ 'psutil==3.0.1'],
Fix merge error introduced by <I>bf<I>e2f7f<I>d0baf<I>cb<I>dc<I>e<I>b
py
diff --git a/fc/stats.py b/fc/stats.py index <HASH>..<HASH> 100644 --- a/fc/stats.py +++ b/fc/stats.py @@ -110,14 +110,3 @@ def RCV(data, channel): q75, q25 = numpy.percentile(data[:,channel], [75 ,25]) return (q75 - q25)/numpy.median(data[:,channel]) - -def rate(data, channel='Time'): - ''' Calculate the flow rate of events. - - data - NxD FCSData object or numpy array - channel - Channel in which to calculate the statistic - ''' - if hasattr(channel, '__iter__'): - raise ValueError("Channel should be a scalar.") - - return float(len(data[:,channel]))/(data[-1,channel]-data[0,channel]) \ No newline at end of file
Eliminated rate from stats.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -43,7 +43,7 @@ sys.dont_write_bytecode = True setup( name='pipe2py', - version='0.23.1', + version='0.23.2', description=( 'A project to compile Yahoo! Pipes into Python. ' 'The pipe2py package can compile a Yahoo! Pipe into pure Python source'
Bump to version <I>
py
diff --git a/shoebot/data/typography.py b/shoebot/data/typography.py index <HASH>..<HASH> 100644 --- a/shoebot/data/typography.py +++ b/shoebot/data/typography.py @@ -70,20 +70,12 @@ class Text(Grob, ColorMixin): # then we set fontsize (multiplied by pango.SCALE) self._fontface.set_absolute_size(self._fontsize*pango.SCALE) -<<<<<<< HEAD - # missing styles? - if kwargs.has_key("style"): - if "italic" in kwargs["style"] or "oblique" in kwargs["style"]: - self._style = pango.STYLE_ITALIC - self._fontface.set_style(self._style) -======= # the style self._style = pango.STYLE_NORMAL if kwargs.has_key("style"): if kwargs["style"]=="italic" or kwargs["style"]=="oblique": self._style = pango.STYLE_ITALIC self._fontface.set_style(self._style) ->>>>>>> 0abd38f255ed7da6168e7728ba38938de0402342 #we need to pre-render some stuff to enable metrics sizing self._pre_render()
Merge changes to support default style for text
py
diff --git a/tests/basics/gc1.py b/tests/basics/gc1.py index <HASH>..<HASH> 100644 --- a/tests/basics/gc1.py +++ b/tests/basics/gc1.py @@ -27,3 +27,8 @@ if hasattr(gc, 'threshold'): assert(gc.threshold() == 0) assert(gc.threshold(-1) is None) assert(gc.threshold() == -1) + + # Setting a low threshold should trigger collection at the list alloc + gc.threshold(1) + [[], []] + gc.threshold(-1)
tests/basics/gc1: Add test which triggers GC threshold.
py
diff --git a/tests/shared.py b/tests/shared.py index <HASH>..<HASH> 100644 --- a/tests/shared.py +++ b/tests/shared.py @@ -52,12 +52,13 @@ def read_dir(startdir, excludes=()): assert isinstance(excludes, list) or isinstance(excludes, tuple), \ "excludes must be a list or a tuple, not " + repr(type(excludes)) startdir = Path(startdir) + exclude_tuples = [Path(e).parts for e in excludes] contents = {} for p in startdir.glob('**/*'): if not p.is_file(): continue relpath = p.relative_to(startdir) - if any(str(relpath).startswith(str(e)) for e in excludes): + if any(relpath.parts[:len(tup)] == tup for tup in exclude_tuples): continue with p.open() as f: try:
use Path in read_dir to handle Windows properly We were running into trouble with tests that passed in an exclude like "a/b", with forward slashes. Running everything through Path handles that.
py
diff --git a/rootpy/io/file.py b/rootpy/io/file.py index <HASH>..<HASH> 100644 --- a/rootpy/io/file.py +++ b/rootpy/io/file.py @@ -229,7 +229,7 @@ class _DirectoryBase(Object): """ cd to the gDirectory before this file was open. """ - if isinstance(self._prev_dir, ROOT.TROOT): + if self._prev_dir is None or isinstance(self._prev_dir, ROOT.TROOT): return False if isinstance(self._prev_dir, ROOT.TFile): if self._prev_dir.IsOpen() and self._prev_dir.IsWritable(): @@ -642,6 +642,7 @@ class _FileBase(_DirectoryBase): def __init__(self, name, *args, **kwargs): # trigger finalSetup ROOT.R.kTRUE + # grab previous directory before creating self self._prev_dir = ROOT.gDirectory.func() super(_FileBase, self).__init__(name, *args, **kwargs) self._post_init() @@ -649,6 +650,8 @@ class _FileBase(_DirectoryBase): def _post_init(self): self._path = self.GetName() self._parent = self + # need to set _prev_dir here again if using rootpy.ROOT.TFile + self._prev_dir = getattr(self, '_prev_dir', None) self._inited = True def _populate_cache(self):
fix #<I>: File needs to set _prev_dir in post_init
py
diff --git a/presser/exceptions.py b/presser/exceptions.py index <HASH>..<HASH> 100644 --- a/presser/exceptions.py +++ b/presser/exceptions.py @@ -1,15 +1,18 @@ -class PresserJavaScriptParseError(Exception): +class PresserError(Exception): + pass + +class PresserJavaScriptParseError(PresserError): pass -class PresserInvalidVineIdError(Exception): +class PresserInvalidVineIdError(PresserError): pass -class PresserURLError(Exception): +class PresserURLError(PresserError): pass -class Presser404Error(Exception): +class Presser404Error(PresserError): pass -class PresserRequestError(Exception): +class PresserRequestError(PresserError): pass \ No newline at end of file
exceptions now inherit from PresserError
py
diff --git a/saltcloud/clouds/softlayer-hw.py b/saltcloud/clouds/softlayer-hw.py index <HASH>..<HASH> 100644 --- a/saltcloud/clouds/softlayer-hw.py +++ b/saltcloud/clouds/softlayer-hw.py @@ -671,10 +671,17 @@ def destroy(name, call=None): {'name': name}, ) - ret = {} node = show_instance(name, call='action') - conn = get_conn() - response = conn.deleteObject(id=node['id']) + conn = get_conn(service='SoftLayer_Ticket') + response = conn.createCancelServerTicket( + { + 'id': node['id'], + 'reason': 'Salt Cloud Hardware Server Cancelation', + 'content': 'Please cancel this server', + 'cancelAssociatedItems': True, + 'attachmentType': 'HARDWARE', + } + ) saltcloud.utils.fire_event( 'event',
Change destroy() to correct usage for hardware instances on softlayer
py