diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,13 +1,10 @@ -from setuptools import setup +from setuptools import find_packages -setup( +setup_params = dict( name="pmxbot", version="1100b6", - packages=["pmxbot", "pmxbotweb", "pmxbot.popquotes"], - package_data={ - 'pmxbot' : ["popquotes.sqlite",], - 'pmxbotweb' : ["templates/*.html", "templates/pmxbot.png",], - }, + packages=find_packages(), + include_package_data=True, entry_points=dict( console_scripts = [ 'pmxbot=pmxbot.pmxbot:run', @@ -43,4 +40,11 @@ setup( 'Topic :: Internet :: WWW/HTTP :: WSGI :: Application', ], long_description = open('README').read(), + setup_requires=[ + 'hgtools', + ], ) + +if __name__ == '__main__': + from setuptools import setup + setup(**setup_params)
Updated setup script to find packages automatically. Fixes issue where popquotes.sqlite was not being included.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -26,7 +26,7 @@ setup( packages=['sanic'], platforms='any', install_requires=[ - 'uvloop>=0.5.3', + 'uvloop>=0.5.3;platform_system!="Windows"', 'httptools>=0.0.9', 'ujson>=1.35', 'aiofiles>=0.3.0',
Don't ask for uvloop on windows This is a tricky issue, but essentially uvloop is unavailable on windows. This means for windows users, we have to install Sanic with no requirements, and then manually specify all requirements apart from uvloop. However, Sanic will work with standard asyncio event loop. So, I propose we remove the uvloop requirement on windows. This patch doesn't touch any demo imports.
py
diff --git a/testsuite/test_provider.py b/testsuite/test_provider.py index <HASH>..<HASH> 100644 --- a/testsuite/test_provider.py +++ b/testsuite/test_provider.py @@ -519,6 +519,7 @@ class OAuth2ProviderTestCase(InvenioTestCase): # Access token is not valid for this scope r = self.client.get( '/oauth/info/', + query_string="access_token=%s" % self.personal_token.access_token, base_url=cfg['CFG_SITE_SECURE_URL'] ) self.assertStatus(r, 401)
oauth2server: missing access token in test case * Fixes missing access token in test case. (closes #<I>)
py
diff --git a/glances/plugins/glances_processlist.py b/glances/plugins/glances_processlist.py index <HASH>..<HASH> 100644 --- a/glances/plugins/glances_processlist.py +++ b/glances/plugins/glances_processlist.py @@ -44,14 +44,6 @@ def convert_timedelta(delta): def split_cmdline(cmdline): """Return path, cmd and arguments for a process cmdline.""" - # There is an issue in psutil for Electron/Atom processes (maybe others...) - # Tracked by https://github.com/nicolargo/glances/issues/1192 - # https://github.com/giampaolo/psutil/issues/1179 - # Add this dirty workarround (to be removed when the psutil is solved) - if len(cmdline) == 1: - cmdline = shlex.split(cmdline[0]) - # /End of the direty workarround - path, cmd = os.path.split(cmdline[0]) arguments = ' '.join(cmdline[1:]) return path, cmd, arguments
Remove dirty work arround for issue #<I> because PsUtil issue #<I> is now solved on PSUtil <I> and higher
py
diff --git a/django_mailbox/models.py b/django_mailbox/models.py index <HASH>..<HASH> 100644 --- a/django_mailbox/models.py +++ b/django_mailbox/models.py @@ -724,7 +724,10 @@ class MessageAttachment(models.Model): """Returns the original filename of this attachment.""" file_name = self._get_rehydrated_headers().get_filename() if isinstance(file_name, six.text_type): - return utils.convert_header_to_unicode(file_name) + result = utils.convert_header_to_unicode(file_name) + if result is None: + return file_name + return result else: return None
Return unicode-converted filename only if a response was returned.
py
diff --git a/hydpy/core/modeltools.py b/hydpy/core/modeltools.py index <HASH>..<HASH> 100644 --- a/hydpy/core/modeltools.py +++ b/hydpy/core/modeltools.py @@ -8,8 +8,7 @@ from hydpy import pub from hydpy.core import objecttools -class MetaModel(type): - +class MetaModelType(type): def __new__(cls, name, parents, dict_): methods = dict_.get('_METHODS') if methods is None: @@ -33,12 +32,12 @@ class MetaModel(type): dict_[shortname] = method return type.__new__(cls, name, parents, dict_) +MetaModelClass = MetaModelType('MetaModelClass', (), {'_METHODS': ()}) -class Model(object): - """Base class for hydrological models.""" +class Model(MetaModelClass): + """Base class for all hydrological models.""" - __metaclass__ = MetaModel - _METHODS = () + _METHODS = (MetaModelClass) def __init__(self): self.element = None
make the Model metaclass approach Python 3 compatible
py
diff --git a/stagpy/field.py b/stagpy/field.py index <HASH>..<HASH> 100644 --- a/stagpy/field.py +++ b/stagpy/field.py @@ -151,9 +151,9 @@ def plot_scalar(step, var, scaling=None, **extra): rotation='horizontal', va='center') if step.geom.spherical or conf.plot.ratio is None: plt.axis('equal') + plt.axis('off') else: axis.set_aspect(conf.plot.ratio / axis.get_data_ratio()) - plt.axis('off') axis.set_adjustable('box') axis.set_xlim(xmesh.min(), xmesh.max()) axis.set_ylim(ymesh.min(), ymesh.max())
Annotate axies when ratio is set
py
diff --git a/superset/migrations/versions/18532d70ab98_fix_table_unique_constraint_in_mysql.py b/superset/migrations/versions/18532d70ab98_fix_table_unique_constraint_in_mysql.py index <HASH>..<HASH> 100644 --- a/superset/migrations/versions/18532d70ab98_fix_table_unique_constraint_in_mysql.py +++ b/superset/migrations/versions/18532d70ab98_fix_table_unique_constraint_in_mysql.py @@ -17,14 +17,14 @@ """Delete table_name unique constraint in mysql Revision ID: 18532d70ab98 -Revises: e5ef6828ac4e +Revises: 3fbbc6e8d654 Create Date: 2020-09-25 10:56:13.711182 """ # revision identifiers, used by Alembic. revision = "18532d70ab98" -down_revision = "e5ef6828ac4e" +down_revision = "3fbbc6e8d654" from alembic import op
Fix alembic migration (#<I>)
py
diff --git a/fedmsg/commands/hub.py b/fedmsg/commands/hub.py index <HASH>..<HASH> 100644 --- a/fedmsg/commands/hub.py +++ b/fedmsg/commands/hub.py @@ -1,12 +1,30 @@ import fedmsg from fedmsg.commands import command -extra_args = [] +extra_args = [ + ([], { + 'dest': 'name', + 'metavar': "NAME", + 'help': "Name of the publishing endpoint to use.", + }), +] @command(extra_args=extra_args) def hub(**kw): """ Run the fedmsg hub. """ - from moksha.hub.hub import main + if kw['name'] not in kw['endpoints']: + raise ValueError("NAME must be one of %r" % kw['endpoints'].keys()) + + # Rephrase the fedmsg-config.py config as moksha *.ini format. + moksha_options = dict( + zmq_enabled=True, + zmq_publish_endpoints=kw['endpoints'][kw['name']], + zmq_subscribe_endpoints=','.join(kw['endpoints'].values()), + zmq_strict=True, + ) + kw.update(moksha_options) + + from moksha.hub import main main(options=kw)
Attempt to wrap moksha-hub with fedmsg-hub.
py
diff --git a/tcex/app_config_object/templates.py b/tcex/app_config_object/templates.py index <HASH>..<HASH> 100644 --- a/tcex/app_config_object/templates.py +++ b/tcex/app_config_object/templates.py @@ -313,8 +313,14 @@ class DownloadTemplates(TemplateBase): overwrite='prompt', default_choice='no', ) + self.download_file(url=f'{self.url}/app_lib.py', destination='app_lib.py', overwrite=True) + self.download_file( + f'{self.url}/app_lib.py', + destination='app_lib.py', + overwrite='prompt', + default_choice='no', + ) if template and not template.startswith('external'): - self.download_file(f'{self.url}/app_lib.py', destination='app_lib.py', overwrite=True) self.download_file( f'{self.url}/{template}/args.py', destination='args.py',
Updated templates.py to include app_lib.py for external_ingress template
py
diff --git a/seed_control_interface/__init__.py b/seed_control_interface/__init__.py index <HASH>..<HASH> 100644 --- a/seed_control_interface/__init__.py +++ b/seed_control_interface/__init__.py @@ -1,2 +1,2 @@ -__version__ = '0.9.6' +__version__ = '0.9.7' VERSION = __version__
Bumped version to <I>
py
diff --git a/lib/bibformat_engine.py b/lib/bibformat_engine.py index <HASH>..<HASH> 100644 --- a/lib/bibformat_engine.py +++ b/lib/bibformat_engine.py @@ -488,7 +488,8 @@ def eval_format_element(format_element, bfo, parameters={}, verbose=0): suffix = parameters.get('suffix', "") default_value = parameters.get('default', "") escape = parameters.get('escape', "") - + output_text = '' + # 3 possible cases: # a) format element file is found: we execute it # b) format element file is not found, but exist in tag table (e.g. bfe_isbn)
Fixed an uninitialized variable in case of exception.
py
diff --git a/juju/model.py b/juju/model.py index <HASH>..<HASH> 100644 --- a/juju/model.py +++ b/juju/model.py @@ -2402,9 +2402,11 @@ class Model: @param offer_name: over ride the offer name to help the consumer """ controller = await self.get_controller() - return await controller.create_offer(self.info.uuid, endpoint, - offer_name=offer_name, - application_name=application_name) + offer_result = await controller.create_offer(self.info.uuid, endpoint, + offer_name=offer_name, + application_name=application_name) + await controller.disconnect() + return offer_result async def list_offers(self): """
Close the connection spawned for create_offer after use
py
diff --git a/update_version_from_git.py b/update_version_from_git.py index <HASH>..<HASH> 100644 --- a/update_version_from_git.py +++ b/update_version_from_git.py @@ -76,7 +76,7 @@ def prerelease_version(): assert len(initpy_ver.split('.')) in [3, 4], 'moto/__init__.py version should be like 0.0.2 or 0.0.2.dev' assert initpy_ver > ver, 'the moto/__init__.py version should be newer than the last tagged release.' - return f'{initpy_ver}.dev{commits_since}.{githash}' + return f'{initpy_ver}.dev{commits_since}' def read(*parts): """ Reads in file from *parts.
Simplify version numbers for dev to make pypi happy.
py
diff --git a/alot/db/utils.py b/alot/db/utils.py index <HASH>..<HASH> 100644 --- a/alot/db/utils.py +++ b/alot/db/utils.py @@ -55,6 +55,7 @@ def add_signature_headers(mail, sigs, error_msg): sig_from = key.uids[0].uid except: sig_from = sigs[0].fpr + uid_trusted = False mail.add_header( X_SIGNATURE_VALID_HEADER,
Define local var in all code paths.
py
diff --git a/dbt/adapters/bigquery/impl.py b/dbt/adapters/bigquery/impl.py index <HASH>..<HASH> 100644 --- a/dbt/adapters/bigquery/impl.py +++ b/dbt/adapters/bigquery/impl.py @@ -495,7 +495,7 @@ class BigQueryAdapter(PostgresAdapter): client = conn.handle with cls.exception_handler(config, 'list dataset', model_name): - all_datasets = client.list_datasets() + all_datasets = client.list_datasets(include_all=True) return [ds.dataset_id for ds in all_datasets] @classmethod @@ -535,9 +535,10 @@ class BigQueryAdapter(PostgresAdapter): @classmethod def check_schema_exists(cls, config, schema, model_name=None): conn = cls.get_connection(config, model_name) + client = conn.handle with cls.exception_handler(config, 'get dataset', model_name): - all_datasets = conn.handle.list_datasets() + all_datasets = client.list_datasets(include_all=True) return any([ds.dataset_id == schema for ds in all_datasets]) @classmethod
Include datasets with underscores when listing BigQuery datasets
py
diff --git a/tests/test_stereonet_math.py b/tests/test_stereonet_math.py index <HASH>..<HASH> 100644 --- a/tests/test_stereonet_math.py +++ b/tests/test_stereonet_math.py @@ -210,10 +210,10 @@ class TestAngularDistance: def test_directional(self): first, second = smath.line(30, 270), smath.line(40, 90) dist = smath.angular_distance(first, second, bidirectional=True) - assert np.allclose(dist, 70) + assert np.allclose(dist, np.radians(70)) dist = smath.angular_distance(first, second, bidirectional=False) - assert np.allclose(dist, 110) + assert np.allclose(dist, np.radians(110)) def compare_lonlat(lon1, lat1, lon2, lat2): """Avoid ambiguities in strike/dip or lon/lat conventions."""
angular_distance returns distances in radians, not degrees
py
diff --git a/acceptancetests/assess_upgrade_series.py b/acceptancetests/assess_upgrade_series.py index <HASH>..<HASH> 100755 --- a/acceptancetests/assess_upgrade_series.py +++ b/acceptancetests/assess_upgrade_series.py @@ -39,7 +39,6 @@ def assess_juju_upgrade_series(client, args): reboot_machine(client, target_machine) upgrade_series_complete(client, target_machine) assert_correct_series(client, target_machine, args.to_series) - set_application_series(client, "dummy-subordinate", args.to_series) def upgrade_series_prepare(client, machine, series, **flags): @@ -72,10 +71,6 @@ def reboot_machine(client, machine): log.info("wait_for_started()") client.wait_for_started() -def set_application_series(client, application, series): - args = (application, series) - client.juju('set-series', args) - def assert_correct_series(client, machine, expected): """Verify that juju knows the correct series for the machine"""
Removes set_application_series command from assess_upgrade_series test.
py
diff --git a/jira/client.py b/jira/client.py index <HASH>..<HASH> 100644 --- a/jira/client.py +++ b/jira/client.py @@ -273,11 +273,12 @@ class JIRA(object): def dashboards(self, filter=None, startAt=0, maxResults=20): """ - Return a list of Dashboard resources. + Return a ResultList of Dashboard resources and a ``total`` count. :param filter: either "favourite" or "my", the type of dashboards to return :param startAt: index of the first dashboard to return - :param maxResults: maximum number of dashboards to return + :param maxResults: maximum number of dashboards to return. The total number of + results is always available in the ``total`` attribute of the returned ResultList. """ params = {} if filter is not None: @@ -1001,11 +1002,12 @@ class JIRA(object): def search_issues(self, jql_str, startAt=0, maxResults=50, fields=None, expand=None): """ - Get a list of issue Resources matching a JQL search string. + Get a ResultList of issue Resources matching a JQL search string. :param jql_str: the JQL search string to use :param startAt: index of the first issue to return - :param maxResults: maximum number of issues to return + :param maxResults: maximum number of issues to return. Total number of results + is available in the ``total`` attribute of the returned ResultList. :param fields: comma-separated string of issue fields to include in the results :param expand: extra information to fetch inside each resource """
Update docs with the new ResultList return type
py
diff --git a/dragnet/model_training.py b/dragnet/model_training.py index <HASH>..<HASH> 100644 --- a/dragnet/model_training.py +++ b/dragnet/model_training.py @@ -198,7 +198,7 @@ def accuracy_auc(y, ypred, weights=None): def evaluate_models_tokens(datadir, dragnet_model, figname_root=None, - tokenizer=simple_tokenizer): + tokenizer=simple_tokenizer, cetr=False): """ Evaluate a trained model on the token level. @@ -214,7 +214,7 @@ def evaluate_models_tokens(datadir, dragnet_model, figname_root=None, gold_standard_tokens = {} for fname, froot in all_files: - tokens = tokenizer(' '.join(read_gold_standard(datadir, froot))) + tokens = tokenizer(' '.join(read_gold_standard(datadir, froot, cetr))) if len(tokens) > 0: gold_standard_tokens[froot] = tokens
Pass in cetr flag to read gold standard
py
diff --git a/spyder/api/panel.py b/spyder/api/panel.py index <HASH>..<HASH> 100644 --- a/spyder/api/panel.py +++ b/spyder/api/panel.py @@ -133,8 +133,7 @@ class Panel(QWidget, EditorExtension): painter = QPainter(self) painter.fillRect(event.rect(), self._background_brush) else: - raise NotImplementedError( - f'paintEvent method must be defined in {self}') + logger.debug(f'paintEvent method must be defined in {self}') def sizeHint(self): """
Editor: Prevent error when switching layouts
py
diff --git a/src/rinoh/image.py b/src/rinoh/image.py index <HASH>..<HASH> 100644 --- a/src/rinoh/image.py +++ b/src/rinoh/image.py @@ -72,7 +72,7 @@ class Filename(str): """str subclass that provides system-independent path comparison""" def __eq__(self, other): - return posix_path(self) == posix_path(other) + return posix_path(str(self)) == posix_path(str(other)) def __ne__(self, other): return not (self == other)
Fix infinite recursion in Filename.__eq__
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -38,9 +38,9 @@ setup( zip_safe=False, include_package_data=True, install_requires=[ - 'virtualchain==0.0.8', + 'virtualchain==0.0.9', 'keychain==0.1.4', - 'blockstack-client==0.0.12.5', + 'blockstack-client==0.0.13', 'defusedxml>=0.4.1' ], classifiers=[
require latest virtualchain and blockstack client
py
diff --git a/salt/grains/core.py b/salt/grains/core.py index <HASH>..<HASH> 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py @@ -142,6 +142,25 @@ def _virtual(osdata): # Provides: # virtual grains = {'virtual': 'physical'} + try: + if not subprocess.check_call( "lspci", stdout = subprocess.PIPE, stderr = subprocess.PIPE ): + isLspciPresent = True + else: + isLspciPresent = False + except: + isLspciPresent = False + if isLspciPresent: + model = subprocess.Popen( + 'lspci|grep -i system', + shell = True, + stdout = subprocess.PIPE + ).communicate()[0] + if model.lower().count( "vmware" ): + grains['virtual'] = "VMware" + elif model.lower().count( "virtualbox" ): + grains['virtual'] = "VirtualBox" + elif model.lower().count( "qemu" ): + grains['virtual'] = 'kvm' choices = ['Linux', 'OpenBSD', 'SunOS', 'HP-UX'] isdir = os.path.isdir if osdata['kernel'] in choices:
Adding VirtualBox and VMware support for Linux. Should work for other OSes.
py
diff --git a/MAVProxy/modules/mavproxy_map/mp_slipmap_ui.py b/MAVProxy/modules/mavproxy_map/mp_slipmap_ui.py index <HASH>..<HASH> 100644 --- a/MAVProxy/modules/mavproxy_map/mp_slipmap_ui.py +++ b/MAVProxy/modules/mavproxy_map/mp_slipmap_ui.py @@ -577,13 +577,12 @@ class MPSlipMapPanel(wx.Panel): c = event.GetUniChar() if c == ord('+') or (c == ord('=') and event.ShiftDown()): self.change_zoom(1.0/1.2) - event.Skip() elif c == ord('-'): self.change_zoom(1.2) - event.Skip() - elif c == ord('G'): + elif c == ord('G') and not event.ControlDown(): self.enter_position() - event.Skip() elif c == ord('C'): self.clear_thumbnails() + else: + # propogate event: event.Skip()
mp_slipmap_ui: propogate events appropriately .Skip() means to skip *this* handler, not further handling
py
diff --git a/aiopvapi/resources/shade.py b/aiopvapi/resources/shade.py index <HASH>..<HASH> 100644 --- a/aiopvapi/resources/shade.py +++ b/aiopvapi/resources/shade.py @@ -107,6 +107,10 @@ class BaseShade(ApiResource): """Jog the shade.""" await self.request.put(self._resource_path, {"shade": {"motion": "jog"}}) + async def calibrate(self): + """Calibrate the shade.""" + await self.request.put(self._resource_path, {"shade": {"motion": "calibrate"}}) + async def stop(self): """Stop the shade.""" return await self.request.put(self._resource_path, {"shade": {"motion": "stop"}}) @@ -130,6 +134,12 @@ class BaseShade(ApiResource): self._raw_data = raw_data[ATTR_SHADE] + async def refreshBattery(self): + """Query the hub and request the most recent battery state.""" + raw_data = await self.request.get(self._resource_path, {"updateBatteryLevel": "true"}) + + self._raw_data = raw_data[ATTR_SHADE] + async def get_current_position(self, refresh=True) -> dict: """Return the current shade position.
Add calibrate and refreshBattery endpoints
py
diff --git a/colin/checks/labels.py b/colin/checks/labels.py index <HASH>..<HASH> 100644 --- a/colin/checks/labels.py +++ b/colin/checks/labels.py @@ -188,7 +188,7 @@ class IoK8sDisplayNameLabelCheck(LabelAbstractCheck): " of an image inside the Image / Repo Overview page.", reference_url="https://fedoraproject.org/wiki/Container:Guidelines#LABELS", tags=["io.k8s.display-name", "label"], - labels="io.k8s.display-name", + labels=["io.k8s.display-name"], required=True, value_regex=None)
Correct the 'labels' attribute of the check
py
diff --git a/holoviews/plotting/bokeh/element.py b/holoviews/plotting/bokeh/element.py index <HASH>..<HASH> 100644 --- a/holoviews/plotting/bokeh/element.py +++ b/holoviews/plotting/bokeh/element.py @@ -845,6 +845,6 @@ class OverlayPlot(GenericOverlayPlot, ElementPlot): "were not initialized correctly and could not be " "rendered.") - if not self.overlaid and not self.tabs and not self.batched: + if element and not self.overlaid and not self.tabs and not self.batched: self._update_ranges(element, ranges) self._update_plot(key, self.handles['plot'], element)
Ensure that empty overlay frame does not break
py
diff --git a/lib/stsci/tools/stpyfits.py b/lib/stsci/tools/stpyfits.py index <HASH>..<HASH> 100644 --- a/lib/stsci/tools/stpyfits.py +++ b/lib/stsci/tools/stpyfits.py @@ -142,6 +142,7 @@ class _ConstantValueImageBaseHDU(pyfits.hdu.image._ImageBaseHDU): super(_ConstantValueImageBaseHDU, self).__init__( data, header, do_not_scale_image_data, uint) + @property def size(self): """ The HDU's size should always come up as zero so long as there's no @@ -151,7 +152,7 @@ class _ConstantValueImageBaseHDU(pyfits.hdu.image._ImageBaseHDU): if 'PIXVALUE' in self._header: return 0 else: - return super(_ConstantValueImageBaseHDU, self).size() + return super(_ConstantValueImageBaseHDU, self).size @pyfits.util.lazyproperty
The .size() method on HDUs is a property now, so updating to reflect that so this doesn't break with the PyFITS changes from trunk git-svn-id: <URL>
py
diff --git a/djangocms_spa/views.py b/djangocms_spa/views.py index <HASH>..<HASH> 100644 --- a/djangocms_spa/views.py +++ b/djangocms_spa/views.py @@ -85,11 +85,17 @@ class SpaApiView(APIView): @cache_view def dispatch(self, request, **kwargs): # Take the language from the URL kwarg and set it as request language - language_code = kwargs.pop('language_code') - available_languages = [language[0] for language in settings.LANGUAGES] - request.LANGUAGE_CODE = language_code if language_code in available_languages else settings.LANGUAGES[0][0] + self.set_language(kwargs, request) return super(SpaApiView, self).dispatch(request, **kwargs) - + + def set_language(self, kwargs, request): + if hasattr(request, "LANGUAGE_CODE"): + language_code = request.LANGUAGE_CODE + else: + language_code = kwargs.pop('language_code') + available_languages = {language[0] for language in settings.LANGUAGES} + request.LANGUAGE_CODE = language_code if language_code in available_languages else settings.LANGUAGES[0][0] + def get(self, *args, **kwargs): data = { 'data': self.get_fetched_data()
[language_activation] Try using request language rather then from the url
py
diff --git a/krypy/utils.py b/krypy/utils.py index <HASH>..<HASH> 100644 --- a/krypy/utils.py +++ b/krypy/utils.py @@ -455,6 +455,12 @@ def arnoldi(A, v, maxiter=None, ortho='mgs', inner_product=ip): Computes V and H such that :math:`AV_{n}=V_{n+1}\\underline{H}_n` :param ortho: may be 'mgs' (Modified Gram-Schmidt), 'dmgs' (double Modified Gram-Schmidt), 'house' (Householder) + + If the Householder orthogonalization is used, the inner product has to be + the Euclidean inner product. It's unclear to me (andrenarchy), how a + variant of the Householder QR algorithm can be used with a non-Euclidean + inner product. Compare + http://math.stackexchange.com/questions/433644/is-householder-orthogonalization-qr-practicable-for-non-euclidean-inner-products """ dtype = find_common_dtype(A, v) N = v.shape[0]
add comment on Householder + non-Euclidean IP
py
diff --git a/serfclient/client.py b/serfclient/client.py index <HASH>..<HASH> 100644 --- a/serfclient/client.py +++ b/serfclient/client.py @@ -3,6 +3,7 @@ try: except ImportError: from connection import SerfConnection + class SerfClient(object): def __init__(self, host='localhost', port=7373, timeout=3): self.host = host
Pep8: blank links fix serfclient/client.py:6:1: E<I> expected 2 blank lines, found 1
py
diff --git a/djstripe/fields.py b/djstripe/fields.py index <HASH>..<HASH> 100644 --- a/djstripe/fields.py +++ b/djstripe/fields.py @@ -68,7 +68,8 @@ class StripeFieldMixin(object): except (KeyError, TypeError): if self.stripe_required: raise FieldError("Required stripe field '{field_name}' was not" - " provided in stripe object.".format(field_name=self.name)) + " provided in {model_name} data object.".format(field_name=self.name, + model_name=self.model._meta.object_name)) else: result = None
made field error handling a bit better
py
diff --git a/bddrest/documentary/documenter.py b/bddrest/documentary/documenter.py index <HASH>..<HASH> 100644 --- a/bddrest/documentary/documenter.py +++ b/bddrest/documentary/documenter.py @@ -58,10 +58,13 @@ class Documenter: info = self.fieldinfo(call.url, call.verb, k) \ if self.fieldinfo else None - if info is None: - info = dict(not_none='?', required='?') - - rows.append((k, info['required'], info['not_none'], v)) + info = info or {} + rows.append(( + k, + info.get('required', '?'), + info.get('not_none', '?'), + v + )) formatter.write_table( rows,
Adding required and nullable fields for form table, closes #<I> and also closes #<I>
py
diff --git a/image_scraper/mains.py b/image_scraper/mains.py index <HASH>..<HASH> 100644 --- a/image_scraper/mains.py +++ b/image_scraper/mains.py @@ -56,7 +56,7 @@ def console_main(): break pbar.finish() - print "\nDone!\nDownloaded %s images" % (count-failed-over_max_filesize) + print "\nDone!\nDownloaded %s images\nFailed: %s\n" % (count-failed-over_max_filesize, failed) return
Printing number of failed downloads as well.
py
diff --git a/centinel/backend.py b/centinel/backend.py index <HASH>..<HASH> 100644 --- a/centinel/backend.py +++ b/centinel/backend.py @@ -119,6 +119,7 @@ def sync(config): logging.error("Unable to send result file: %s" % str(e)) if time.time() - start > config['server']['total_timeout']: logging.error("Interaction with server took too long. Preempting") + return # get all experiment names available_experiments = [] @@ -129,6 +130,7 @@ def sync(config): available_experiments = set(available_experiments) if time.time() - start > config['server']['total_timeout']: logging.error("Interaction with server took too long. Preempting") + return # download new experiments from server for experiment in (set(user.experiments) - available_experiments): @@ -138,6 +140,7 @@ def sync(config): logging.error("Unable to download experiment files %s", str(e)) if time.time() - start > config['server']['total_timeout']: logging.error("Interaction with server took too long. Preempting") + return logging.info("Finished sync with %s", config['server']['server_url'])
ensured that we break out when over time
py
diff --git a/tests/test_ellipsoid.py b/tests/test_ellipsoid.py index <HASH>..<HASH> 100644 --- a/tests/test_ellipsoid.py +++ b/tests/test_ellipsoid.py @@ -22,6 +22,7 @@ def test_sample(): for i in range(nsim): R.append(mu.sample()[0]) R = np.array(R) + assert (all([mu.contains(_) for _ in R])) # here I'm checking that all the points are uniformly distributed # within each ellipsoid @@ -60,7 +61,7 @@ def test_sample_q(): R.append(x) break R = np.array(R) - + assert (all([mu.contains(_) for _ in R])) # here I'm checking that all the points are uniformly distributed # within each ellipsoid for curc in [cen1, cen2]:
add additional ellipsoidal test
py
diff --git a/coaster/views/decorators.py b/coaster/views/decorators.py index <HASH>..<HASH> 100644 --- a/coaster/views/decorators.py +++ b/coaster/views/decorators.py @@ -545,7 +545,7 @@ def cors(origins, def wrapper(*args, **kwargs): origin = request.headers.get('Origin') if request.method not in methods: - abort(401) + abort(405) if origins == '*': pass @@ -554,7 +554,7 @@ def cors(origins, elif callable(origins) and origins(origin): pass else: - abort(401) + abort(403) if request.method == 'OPTIONS': # pre-flight request
Fix HTTP status codes for invalid method and unauthorized access (#<I>)
py
diff --git a/plexapi/base.py b/plexapi/base.py index <HASH>..<HASH> 100644 --- a/plexapi/base.py +++ b/plexapi/base.py @@ -204,7 +204,7 @@ class PlexObject(object): data = self._server.query(ekey, params=url_kw) items = self.findItems(data, cls, ekey, **kwargs) - librarySectionID = data.attrib.get('librarySectionID') + librarySectionID = utils.cast(int, data.attrib.get('librarySectionID')) if librarySectionID: for item in items: item.librarySectionID = librarySectionID
Cast librarySectionID to int
py
diff --git a/ladybug/_datacollectionbase.py b/ladybug/_datacollectionbase.py index <HASH>..<HASH> 100644 --- a/ladybug/_datacollectionbase.py +++ b/ladybug/_datacollectionbase.py @@ -629,9 +629,10 @@ class BaseCollection(object): elif isinstance(dat_type, typ_clss._time_aggregated_type): time_class = typ_clss break - else: - raise ValueError('Data type "{}" does not have a time-rate-of-' - 'change metric.'.format(head.data_type)) + # if nothing was found, throw an error + if time_class is None: + raise ValueError('Data type "{}" does not have a time-rate-of-' + 'change metric.'.format(head.data_type)) # create the new data collection and assign normalized values new_data_c = self.to_unit(head.data_type.units[0])
style(datacollection): Improve error message in the event of no type
py
diff --git a/sh.py b/sh.py index <HASH>..<HASH> 100644 --- a/sh.py +++ b/sh.py @@ -1674,6 +1674,8 @@ class StreamWriter(object): # support try: chunk = self.get_chunk() + if chunk is None: + raise DoneReadingForever except DoneReadingForever: self.log.debug("done reading")
edge case where our stdin callback doesn't return anything
py
diff --git a/openstack_dashboard/dashboards/project/volumes/volumes/tests.py b/openstack_dashboard/dashboards/project/volumes/volumes/tests.py index <HASH>..<HASH> 100644 --- a/openstack_dashboard/dashboards/project/volumes/volumes/tests.py +++ b/openstack_dashboard/dashboards/project/volumes/volumes/tests.py @@ -955,7 +955,7 @@ class VolumeViewTests(test.TestCase): self.assertContains(res, expected_string, html=True, msg_prefix="The create button is not disabled") - @test.create_stubs({cinder: ('volume_get',), + @test.create_stubs({cinder: ('volume_get', 'tenant_absolute_limits'), api.nova: ('server_get',)}) def test_detail_view(self): volume = self.cinder_volumes.first() @@ -965,6 +965,8 @@ class VolumeViewTests(test.TestCase): cinder.volume_get(IsA(http.HttpRequest), volume.id).AndReturn(volume) api.nova.server_get(IsA(http.HttpRequest), server.id).AndReturn(server) + cinder.tenant_absolute_limits(IsA(http.HttpRequest))\ + .AndReturn(self.cinder_limits['absolute']) self.mox.ReplayAll()
Stubout cinder.tenant_absolute_limits in cinder test To render dropdown actions to detail page, allowed() method in the volume table is called, so a method called in allowed() needs to be stubout. Change-Id: Ib<I>eee0fa2c8b<I>e1e<I>fcd1bdb0f<I>bc1 Closes-Bug: #<I>
py
diff --git a/pyt/vulnerability_log.py b/pyt/vulnerability_log.py index <HASH>..<HASH> 100644 --- a/pyt/vulnerability_log.py +++ b/pyt/vulnerability_log.py @@ -25,7 +25,7 @@ class VulnerabilityLog(object): print('%s vulnerabilities found:' % number_of_vulnerabilities) for i, vulnerability in enumerate(self.vulnerabilities, start=1): - print('Vulnerability {}:\n{}'.format(i, vulnerability)) + print('Vulnerability {}:\n{}\n'.format(i, vulnerability)) class Vulnerability(object):
more newlines in vuln presentation
py
diff --git a/ansible_runner/runner_config.py b/ansible_runner/runner_config.py index <HASH>..<HASH> 100644 --- a/ansible_runner/runner_config.py +++ b/ansible_runner/runner_config.py @@ -732,25 +732,8 @@ class RunnerConfig(object): new_args.extend(["--ipc=host"]) - # These directories need to exist before they are mounted in the container, - # or they will be owned by root. - private_subdirs = [ - d for d in os.listdir(self.private_data_dir) if os.path.isdir( - os.path.join(self.private_data_dir, d) - ) - ] - - if 'artifacts' not in private_subdirs: - private_subdirs += ['artifacts'] - - for d in private_subdirs: - if not os.path.exists(os.path.join(self.private_data_dir, d)): - if d == 'artifacts': - os.mkdir(os.path.join(self.private_data_dir, d), 0o700) - else: - continue - - new_args.extend(["-v", "{}:/runner/{}:Z".format(os.path.join(self.private_data_dir, d), d)]) + # Mount the private_data_dir + new_args.extend(["-v", "{}:/runner:Z".format(self.private_data_dir)]) container_volume_mounts = self.container_volume_mounts if container_volume_mounts:
Simplify by mounting the private_data_dir
py
diff --git a/fabfile/eg.py b/fabfile/eg.py index <HASH>..<HASH> 100644 --- a/fabfile/eg.py +++ b/fabfile/eg.py @@ -24,3 +24,9 @@ def basic_tagger(): def cnn_tagger(): with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD): local('python examples/cnn_tagger.py') + + +@task +def spacy_tagger(): + with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD): + local('python examples/spacy_tagger.py')
Add spacy_tagger to fabfile
py
diff --git a/tests/device_tests/test_op_return.py b/tests/device_tests/test_op_return.py index <HASH>..<HASH> 100644 --- a/tests/device_tests/test_op_return.py +++ b/tests/device_tests/test_op_return.py @@ -65,6 +65,7 @@ class TestOpReturn(common.TrezorTest): proto.TxRequest(request_type=proto_types.TXOUTPUT, details=proto_types.TxRequestDetailsType(request_index=0)), proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput), proto.TxRequest(request_type=proto_types.TXOUTPUT, details=proto_types.TxRequestDetailsType(request_index=1)), + proto.ButtonRequest(code=proto_types.ButtonRequest_ConfirmOutput), proto.ButtonRequest(code=proto_types.ButtonRequest_SignTx), proto.TxRequest(request_type=proto_types.TXINPUT, details=proto_types.TxRequestDetailsType(request_index=0)), proto.TxRequest(request_type=proto_types.TXOUTPUT, details=proto_types.TxRequestDetailsType(request_index=0)),
device_tests: op_return now requires confirmation by user
py
diff --git a/Scout2.py b/Scout2.py index <HASH>..<HASH> 100755 --- a/Scout2.py +++ b/Scout2.py @@ -176,6 +176,9 @@ def main(args): aws_config['services'][service]['violations'][rule]['service'] = service except Exception as e: printError('Failed to process rule defined in %s.json' % rule) + # Fallback if process rule failed to ensure report creation and data dump still happen + aws_config['services'][service]['violations'][rule]['checked_items'] = 0 + aws_config['services'][service]['violations'][rule]['flagged_items'] = 0 printException(e) # Tweaks
If rule processing fails, create flagged and checked items keys and set values to 0
py
diff --git a/insights/specs/insights_archive.py b/insights/specs/insights_archive.py index <HASH>..<HASH> 100644 --- a/insights/specs/insights_archive.py +++ b/insights/specs/insights_archive.py @@ -74,10 +74,10 @@ class InsightsArchiveSpecs(Specs): installed_rpms = head(all_installed_rpms) hostname = first_of([simple_file("insights_commands/hostname_-f"), simple_file("insights_commands/hostname")]) hponcfg_g = simple_file("insights_commands/hponcfg_-g") - httpd_M = glob_file("insights_commands/httpd*_-M") + httpd_M = glob_file("insights_commands/*httpd*_-M") httpd_on_nfs = simple_file("insights_commands/python_-m_insights.tools.cat_--no-header_httpd_on_nfs") httpd_pid = simple_file("insights_commands/pgrep_-o_httpd") - httpd_V = glob_file("insights_commands/httpd*_-V") + httpd_V = glob_file("insights_commands/*httpd*_-V") ifconfig = simple_file("insights_commands/ifconfig_-a") ip6tables = simple_file("insights_commands/ip6tables-save") ip_addr = simple_file("insights_commands/ip_addr")
Update httpd_V and httpd_M to make it compatible with other httpd (#<I>) * should compatible with httpd<I>-httpd and jbcs-httpd<I>-httpd
py
diff --git a/python/dllib/src/bigdl/dllib/keras/utils.py b/python/dllib/src/bigdl/dllib/keras/utils.py index <HASH>..<HASH> 100644 --- a/python/dllib/src/bigdl/dllib/keras/utils.py +++ b/python/dllib/src/bigdl/dllib/keras/utils.py @@ -74,7 +74,7 @@ def to_bigdl_metric(metric): elif metric == "mae": return MAE() elif metric == "auc": - return AUC(1000) + return AUC() elif metric == "loss": return Loss() elif metric == "treennaccuracy":
Update default value of AUC to <I> (#<I>) * update * typo
py
diff --git a/modules/livestatus/livestatus_regenerator.py b/modules/livestatus/livestatus_regenerator.py index <HASH>..<HASH> 100644 --- a/modules/livestatus/livestatus_regenerator.py +++ b/modules/livestatus/livestatus_regenerator.py @@ -24,7 +24,7 @@ # along with Shinken. If not, see <http://www.gnu.org/licenses/>. import types -import time +#import time from shinken.objects import Contact from shinken.objects import NotificationWay from shinken.misc.regenerator import Regenerator @@ -36,8 +36,9 @@ from livestatus_query_metainfo import HINT_NONE, HINT_HOST, HINT_HOSTS, HINT_SER def itersorted(self, hints=None): preselected_ids = [] preselection = False - logger.debug("[Livestatus Regenerator] Hint is %s" % hints["target"]) - if hints == None: + if hints is not None: + logger.debug("[Livestatus Regenerator] Hint is %s" % hints["target"]) + if hints is None: # return all items hints = {} elif hints['target'] == HINT_HOST:
Fix : Livestatus regenerator crash on log debug is hints is None
py
diff --git a/intranet/settings/base.py b/intranet/settings/base.py index <HASH>..<HASH> 100644 --- a/intranet/settings/base.py +++ b/intranet/settings/base.py @@ -104,8 +104,8 @@ STATICFILES_FINDERS = ( ) AUTHENTICATION_BACKENDS = ( - "intranet.apps.auth.backends.KerberosAuthenticationBackend", "intranet.apps.auth.backends.MasterPasswordAuthenticationBackend", + "intranet.apps.auth.backends.KerberosAuthenticationBackend", ) AUTH_USER_MODEL = "users.User"
master pwd before kerberos
py
diff --git a/examples/synthesis_json.py b/examples/synthesis_json.py index <HASH>..<HASH> 100755 --- a/examples/synthesis_json.py +++ b/examples/synthesis_json.py @@ -137,7 +137,7 @@ if __name__ == '__main__': ["n_neurites", "number", "axon", 1, None, {"neurite_type": ezy.TreeType.axon}]] - comps = ["soma"] + comps = ["soma", "basal_dendrite", "apical_dendrite", "axon"] for d in data_dirs: for f in get_morph_files(d):
Correct components input in synthesis_json
py
diff --git a/secedgar/filings/cik_validator.py b/secedgar/filings/cik_validator.py index <HASH>..<HASH> 100644 --- a/secedgar/filings/cik_validator.py +++ b/secedgar/filings/cik_validator.py @@ -17,6 +17,8 @@ class _CIKValidator(object): .. versionadded:: 0.1.5 """ + # See Stack Overflow's answer to how-do-you-pep-8-name-a-class-whose-name-is-an-acronym + # if you are wondering whether CIK should be capitalized in the class name or not. def __init__(self, lookups, client=None, **kwargs): # Make sure lookups is not empty string if lookups and isinstance(lookups, str):
MAINT: Note on abbrev in class names
py
diff --git a/openquake/calculators/base.py b/openquake/calculators/base.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/base.py +++ b/openquake/calculators/base.py @@ -76,8 +76,12 @@ PRECALC_MAP = dict( classical_risk=['classical'], classical_bcr=['classical'], classical_damage=['classical'], - event_based=['event_based_risk'], - event_based_risk=['event_based', 'event_based_rupture'], + ebrisk=['event_based', 'event_based_rupture', 'ebrisk', + 'event_based_risk'], + event_based=['event_based', 'event_based_rupture', 'ebrisk', + 'event_based_risk'], + event_based_risk=['event_based', 'ebrisk', 'event_based_risk', + 'event_based_rupture'], ucerf_classical=['ucerf_psha'])
Relaxed the PRECALC_MAP for event_based calculators
py
diff --git a/neo/api/JSONRPC/JsonRpcApi.py b/neo/api/JSONRPC/JsonRpcApi.py index <HASH>..<HASH> 100644 --- a/neo/api/JSONRPC/JsonRpcApi.py +++ b/neo/api/JSONRPC/JsonRpcApi.py @@ -286,7 +286,7 @@ class JsonRpcApi(object): appengine = ApplicationEngine.Run(script=script) return { - "script": script.hex(), + "script": script.decode('utf-8'), "state": appengine.State, "gas_consumed": appengine.GasConsumed().ToString(), "stack": [ContractParameter.ToParameter(item).ToJson() for item in appengine.EvaluationStack.Items]
Fix `script` value in JSON-RPC invokes * Fixed the `script` value being returned in JSON-RPC invokes. It was being double-encoded in HEX previously. Now, the proper script value is being returned consistent with the NEO reference implementation.
py
diff --git a/gwpy/table/tests/test_table.py b/gwpy/table/tests/test_table.py index <HASH>..<HASH> 100644 --- a/gwpy/table/tests/test_table.py +++ b/gwpy/table/tests/test_table.py @@ -107,7 +107,7 @@ class TestTable(object): dtp = None # use map() to support non-primitive types if dtype(dtp).name == 'object': - data.append(map(dtp, random.rand(n) * 1000)) + data.append(list(map(dtp, random.rand(n) * 1000))) else: data.append((random.rand(n) * 1000).astype(dtp)) return cls.TABLE(data, names=names)
gwpy.table: use list in test fixture
py
diff --git a/src/dolo/compiler/compiler_python.py b/src/dolo/compiler/compiler_python.py index <HASH>..<HASH> 100644 --- a/src/dolo/compiler/compiler_python.py +++ b/src/dolo/compiler/compiler_python.py @@ -51,7 +51,7 @@ class PythonCompiler(Compiler): # Model equations # import numpy as np - from numpy import exp,log + from numpy import exp,log, sin, cos, tan, asin, acos, atan, sinh, cosh, tanh it_ = 1 # should remove this !
Added usual functions to python compiled files.
py
diff --git a/climata/snotel/__init__.py b/climata/snotel/__init__.py index <HASH>..<HASH> 100644 --- a/climata/snotel/__init__.py +++ b/climata/snotel/__init__.py @@ -16,7 +16,7 @@ _server = None def get_server(): global _server - if not _server: + if _server is None: _server = Client(url).service return _server
don't attempt to evaluate _server as boolean (fixes #<I>)
py
diff --git a/flawless/server/service.py b/flawless/server/service.py index <HASH>..<HASH> 100644 --- a/flawless/server/service.py +++ b/flawless/server/service.py @@ -370,7 +370,7 @@ class FlawlessService(object): mod_time = mod_time.strftime("%Y-%m-%d %H:%M:%S") known_entry = self._get_entry(blamed_entry, self.known_errors) err_info = api.ErrorInfo(error_count=1, - developer_email=email, + developer_email=self._get_email(email), date=mod_time, email_sent=False, last_occurrence=cur_time,
Get correct email address when creating err info
py
diff --git a/optlang/inspyred_interface.py b/optlang/inspyred_interface.py index <HASH>..<HASH> 100644 --- a/optlang/inspyred_interface.py +++ b/optlang/inspyred_interface.py @@ -28,6 +28,7 @@ import types log = logging.getLogger(__name__) import sympy +import inspyred import interface
inspyred import got lost during cleanup
py
diff --git a/telethon/client/telegrambaseclient.py b/telethon/client/telegrambaseclient.py index <HASH>..<HASH> 100644 --- a/telethon/client/telegrambaseclient.py +++ b/telethon/client/telegrambaseclient.py @@ -291,11 +291,14 @@ class TelegramBaseClient(abc.ABC): """ Disconnects from Telegram. """ - await self._sender.disconnect() + # All properties may be ``None`` if `__init__` fails, and this + # method will be called from `__del__` which would crash then. + if self._sender: + await self._sender.disconnect() if self._updates_handle: await self._updates_handle - - self.session.close() + if self.session: + self.session.close() def __del__(self): # Python 3.5.2's ``asyncio`` mod seems to have a bug where it's not
Assert properties exist on disconnect
py
diff --git a/tests/db/models_test.py b/tests/db/models_test.py index <HASH>..<HASH> 100644 --- a/tests/db/models_test.py +++ b/tests/db/models_test.py @@ -391,8 +391,8 @@ class SESRuptureTestCase(unittest.TestCase): id=self.source_rupture.id) self.assertEqual((1, 2, 0.1), source_rupture.top_left_corner) self.assertEqual((3, 4, 0.2), source_rupture.top_right_corner) - self.assertEqual((5, 6, 0.3), source_rupture.bottom_right_corner) - self.assertEqual((7, 8, 0.4), source_rupture.bottom_left_corner) + self.assertEqual((5, 6, 0.3), source_rupture.bottom_left_corner) + self.assertEqual((7, 8, 0.4), source_rupture.bottom_right_corner) def test__validate_planar_surface(self): source_rupture = models.SESRupture.objects.get(
db/models_test: Update to test according to the change in the order of planar surface corner points. Former-commit-id: <I>ecf0a4fae<I>fec7c<I>af<I>ca<I>
py
diff --git a/modelforge/registry.py b/modelforge/registry.py index <HASH>..<HASH> 100644 --- a/modelforge/registry.py +++ b/modelforge/registry.py @@ -35,15 +35,15 @@ def publish_model(args: argparse.Namespace, backend: StorageBackend, log: loggin path = os.path.abspath(args.model) try: model = GenericModel(source=path, dummy=True) - except ValueError: - log.critical('"model" must be a path') + except ValueError as e: + log.critical('"model" must be a path: %s', e) return 1 except Exception as e: log.critical("Failed to load the model: %s: %s" % (type(e).__name__, e)) return 1 meta = model.meta - model_url = backend.upload_model(path, meta, args.force) with backend.lock(): + model_url = backend.upload_model(path, meta, args.force) log.info("Uploaded as %s", model_url) log.info("Updating the models index...") index = backend.fetch_index()
Fix TransactionRequiredError while uploading a model
py
diff --git a/travis_docs_builder.py b/travis_docs_builder.py index <HASH>..<HASH> 100755 --- a/travis_docs_builder.py +++ b/travis_docs_builder.py @@ -168,9 +168,12 @@ def setup_GitHub_push(repo): print("Fetching token remote") run(['git', 'fetch', 'origin_token']) #create gh-pages empty branch with .nojekyll if it doesn't already exist - create_gh_pages() + new_gh_pages = create_gh_pages() print("Checking out gh-pages") - run(['git', 'checkout', '-b', 'gh-pages', '--track', 'origin_token/gh-pages']) + if new_gh_pages: + run(['git', 'checkout', 'gh-pages']) + else: + run(['git', 'checkout', '-b', 'gh-pages', '--track', 'origin_token/gh-pages']) print("Done") return True
be careful which checkout command is used if the branch was just created, then don't use `-b`
py
diff --git a/messages/email.py b/messages/email.py index <HASH>..<HASH> 100644 --- a/messages/email.py +++ b/messages/email.py @@ -20,10 +20,10 @@ class Email: server_name: str, i.e. 'smtp.gmail.com' server_port: int, i.e. 465 password: str - From: str, i.e. '[email protected]' - To: str or list, i.e. '[email protected]' or ['[email protected]', '[email protected]'] - Cc: str or list - Bcc: str or list + from_: str, i.e. '[email protected]' + to: str or list, i.e. '[email protected]' or ['[email protected]', '[email protected]'] + cc: str or list + bcc: str or list subject: str body_text: str attachments: list, i.e. ['/home/you/file1.txt', '/home/you/file2.pdf']
updates docstrings to more accurately reflect argument names
py
diff --git a/src/transformers/generation_utils.py b/src/transformers/generation_utils.py index <HASH>..<HASH> 100644 --- a/src/transformers/generation_utils.py +++ b/src/transformers/generation_utils.py @@ -959,12 +959,6 @@ class GenerationMixin: ) stopping_criteria = self._get_stopping_criteria(max_length=max_length, max_time=max_time) - if max_length is not None: - warnings.warn( - "`max_length` is deprecated in this function, use `stopping_criteria=StoppingCriteriaList(MaxLengthCriteria(max_length=max_length))` instead.", - UserWarning, - ) - stopping_criteria = validate_stopping_criteria(stopping_criteria, max_length) if is_greedy_gen_mode: if num_return_sequences > 1:
Fixes a useless warning. (#<I>) Fixes #<I>
py
diff --git a/easymoney/models.py b/easymoney/models.py index <HASH>..<HASH> 100644 --- a/easymoney/models.py +++ b/easymoney/models.py @@ -16,6 +16,10 @@ class Money(Decimal): def __deepcopy__(self, memo): return self + def __float__(self): + """Float representation.""" + return float(Decimal(self)) + def __str__(self): return '$%s' % Decimal(self) # TODO: use babel
Fix convertion to float and hashing of Money
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -31,7 +31,7 @@ setup( zip_safe=False, entry_points={ 'flake8.extension': [ - 'flake8-string-format = flake8_string_format:StringFormatChecker', + 'P = flake8_string_format:StringFormatChecker', ], }, test_suite='test_flake8_string_format',
Fix entry point name The entry point previously used the plugin name as its name, which actually breaks in Flake8 3.x as it won't automatically select this plugin. By using the error code space as the name it'll be automatically selected.
py
diff --git a/bang/stack.py b/bang/stack.py index <HASH>..<HASH> 100644 --- a/bang/stack.py +++ b/bang/stack.py @@ -246,6 +246,9 @@ class Stack(object): callbacks=playbook_cb, runner_callbacks=runner_cb, stats=stats, + + # this allows connection reuse using "ControlPersist": + transport='ssh', ) pb.inventory = BangsibleInventory( copy.deepcopy(self.groups_and_vars.lists),
Use the "ssh" transport for ansible playbooks. This sets ControlPersist, which allows connection reuse.
py
diff --git a/src/rez/utils/colorize.py b/src/rez/utils/colorize.py index <HASH>..<HASH> 100644 --- a/src/rez/utils/colorize.py +++ b/src/rez/utils/colorize.py @@ -193,12 +193,7 @@ def _color(str_, fore_color=None, back_color=None, styles=None): .. _Colorama: https://pypi.python.org/pypi/colorama """ - # TODO: Colorama is documented to work on Windows and trivial test case - # proves this to be the case, but it doesn't work in Rez. If the initialise - # is called in sec/rez/__init__.py then it does work, however as discussed - # in the following comment this is not always desirable. So until we can - # work out why we forcibly turn it off. - if not config.get("color_enabled", False) or platform_.name == "windows": + if not config.get("color_enabled", False): return str_ # lazily init colorama. This is important - we don't want to init at startup,
Remove windows exception from colorize.py
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -16,12 +16,11 @@ from setuptools import setup -import oa2 setup_params = dict( name = 'oa2', - description = oa2.__doc__.splitlines()[0], - version = oa2.OA2_VERSION, + description = 'OAuth2.0 client library', + version = '0.0', author = 'Ali Afshar', author_email = '[email protected]', url = 'http://github.com/aliafshar/oa2',
Prematurely importing oa2.
py
diff --git a/pysat/tests/test_registry.py b/pysat/tests/test_registry.py index <HASH>..<HASH> 100644 --- a/pysat/tests/test_registry.py +++ b/pysat/tests/test_registry.py @@ -216,3 +216,19 @@ class TestRegistration(): ensure_updated_stored_modules(self.modules) return + + def test_platform_removal(self): + """Test registering multiple instruments at once""" + + # register all modules at once + registry.register(self.module_names) + # verify instantiation + verify_platform_name_instantiation(self.modules) + # verify registration + ensure_live_registry_updated(self.modules) + # verify stored update + ensure_updated_stored_modules(self.modules) + # remove them using only platform + registry.remove(self.platform_names) + + return
TST: Added test for removal using platform only
py
diff --git a/mailchimp3/mailchimpclient.py b/mailchimp3/mailchimpclient.py index <HASH>..<HASH> 100644 --- a/mailchimp3/mailchimpclient.py +++ b/mailchimp3/mailchimpclient.py @@ -69,6 +69,8 @@ class MailChimpClient(object): raise e else: r.raise_for_status() + if r.status_code == 204: + return None return r.json()
Return None when status code is <I> for no content
py
diff --git a/lib/autokey/scripting/keyboard.py b/lib/autokey/scripting/keyboard.py index <HASH>..<HASH> 100644 --- a/lib/autokey/scripting/keyboard.py +++ b/lib/autokey/scripting/keyboard.py @@ -16,6 +16,7 @@ import typing import autokey.model.phrase +import autokey.iomediator.waiter from autokey import iomediator, model
keyboard.wait_for_keypress() is broken without this import.
py
diff --git a/pout/__main__.py b/pout/__main__.py index <HASH>..<HASH> 100644 --- a/pout/__main__.py +++ b/pout/__main__.py @@ -170,7 +170,6 @@ class SiteCustomizeFile(str): " pout.inject()", "", ])) - logger.debug("Injected pout into {}".format(self)) return True @@ -200,8 +199,10 @@ def main_inject(args): logger.info("Pout has already been injected into {}".format(filepath)) else: - filepath.inject() - logger.debug("Injected pout into {}".format(filepath)) + if filepath.inject(): + logger.info("Injected pout into {}".format(filepath)) + else: + logger.info("Failed to inject pout into {}".format(filepath)) except IOError as e: ret = 1
had a debug when I wanted an info
py
diff --git a/scoop/reduction.py b/scoop/reduction.py index <HASH>..<HASH> 100644 --- a/scoop/reduction.py +++ b/scoop/reduction.py @@ -22,17 +22,26 @@ total = {} # Set the number of ran futures for a given group sequence = defaultdict(itertools.count) + def reduction(inFuture, operation): """Generic reduction method. Subclass it (using partial() is recommended) to specify an operation or enhance its features if needed.""" global total + uniqueReferences = [] try: - uniqueReference = [cb.groupID for cb in inFuture.callback] + for cb in inFuture.callback: + if cb.groupID: + uniqueReferences.append(cb.groupID) except IndexError: raise Exception("Could not find reduction reference.") - total[uniqueReference[0]] = operation(inFuture.result(), - total.get(uniqueReference[0], 0)) - inFuture.resultValue = total[uniqueReference[0]] + for uniqueReference in uniqueReferences: + if uniqueReference not in total: + total[uniqueReference] = inFuture.result() + else: + total[uniqueReference] = operation(total[uniqueReference], + inFuture.result()) + inFuture.resultValue = total[uniqueReferences[0]] + def cleanGroupID(inGroupID): global total
Reduction operation now correctly begins using the two first map results
py
diff --git a/visidata/freeze.py b/visidata/freeze.py index <HASH>..<HASH> 100644 --- a/visidata/freeze.py +++ b/visidata/freeze.py @@ -17,9 +17,9 @@ def StaticColumn(sheet, col): # no need to undo, addColumn undo is enough for r in Progress(rows, 'calculating'): try: - frozencol.setValue(r, col.getTypedValue(r)) + frozencol.putValue(r, col.getTypedValue(r)) except Exception as e: - frozencol.setValue(r, e) + frozencol.putValue(r, e) calcRows_async(frozencol, sheet.rows, col) return frozencol
[frozen] use putValue instead of setValue with frozen columns. - means frozen columns are not deferred Closes #<I>
py
diff --git a/paypal/standard/models.py b/paypal/standard/models.py index <HASH>..<HASH> 100644 --- a/paypal/standard/models.py +++ b/paypal/standard/models.py @@ -301,27 +301,7 @@ class PayPalStandardBase(Model): def send_signals(self): """Shout for the world to hear whether a txn was successful.""" - - # Don't do anything if we're not notifying! - if self.from_view != 'notify': - return - - # Transaction signals: - if self.is_transaction(): - if self.flag: - payment_was_flagged.send(sender=self) - else: - payment_was_successful.send(sender=self) - # Subscription signals: - else: - if self.is_subscription_cancellation(): - subscription_cancel.send(sender=self) - elif self.is_subscription_signup(): - subscription_signup.send(sender=self) - elif self.is_subscription_end_of_term(): - subscription_eot.send(sender=self) - elif self.is_subscription_modified(): - subscription_modify.send(sender=self) + raise NotImplementedError def initialize(self, request): """Store the data we'll need to make the postback from the request object."""
Removed unreachable (and broken) code
py
diff --git a/labsuite/compilers/pfusx.py b/labsuite/compilers/pfusx.py index <HASH>..<HASH> 100644 --- a/labsuite/compilers/pfusx.py +++ b/labsuite/compilers/pfusx.py @@ -201,8 +201,15 @@ def get_plasmid_wells(sequence, backbone='DNA'): return well_locs -def _make_transfer(from_plate, from_well, to_plate, to_well, volume=0, touch=True): - +def _make_transfer(fplate, fwell, tplate, twell, volume=0, touch=True): + """ + Creates a new transfer object for injection into an instruction group + within the OpenTrons JSON Protocol format. + + Code could be simplified; but it's an experiment on how to do it on a + more general scale. + """ + transfer = { "transfer": [{ "from": { @@ -223,10 +230,10 @@ def _make_transfer(from_plate, from_well, to_plate, to_well, volume=0, touch=Tru fm = args['from'] to = args['to'] - fm['container'] = from_plate - fm['location'] = from_well - to['container'] = to_plate - to['location'] = to_well + fm['container'] = fplate + fm['location'] = fwell + to['container'] = tplate + to['location'] = twell to['touch-tip'] = touch args['volume'] = volume
Refactor: Argument tweak for line length.
py
diff --git a/src/hszinc/parser.py b/src/hszinc/parser.py index <HASH>..<HASH> 100644 --- a/src/hszinc/parser.py +++ b/src/hszinc/parser.py @@ -240,6 +240,19 @@ def parse_coord(coordinate_node): lng = float(coordinate_node.children[4].text) return Coordinate(lat, lng) +def parse_ref(ref_node): + assert ref_node.expr_name == 'ref' + assert len(ref_node.children) == 3 + # We have an @ symbol, the reference name and in a child node, + # the value string. + ref = ref_node.children[1].text + has_value = bool(ref_node.children[2].children) + if has_value: + value = parse_str(ref_node.children[2].children[0].children[1]) + else: + value = None + return Ref(ref, value, has_value) + def parse_date(date_node): assert date_node.expr_name == 'date' # Date is in 3 parts, separated by hyphens.
parser: Add parsing of Refs.
py
diff --git a/tests/teststoreoptions.py b/tests/teststoreoptions.py index <HASH>..<HASH> 100644 --- a/tests/teststoreoptions.py +++ b/tests/teststoreoptions.py @@ -6,6 +6,7 @@ import numpy as np from holoviews import Overlay, Curve, Image from holoviews.core.options import Store, StoreOptions from holoviews.element.comparison import ComparisonTestCase +from holoviews import plotting # pyflakes:ignore Register backends class TestStoreOptionsMerge(ComparisonTestCase):
Fixed missing import needed to run tests/teststoreoptions.py as standalone test
py
diff --git a/test_mnemonic.py b/test_mnemonic.py index <HASH>..<HASH> 100755 --- a/test_mnemonic.py +++ b/test_mnemonic.py @@ -203,6 +203,7 @@ class MnemonicTest(unittest.TestCase): self.assertEqual(seed_nfkd, seed_nfc) self.assertEqual(seed_nfkd, seed_nfkc) + self.assertEqual(seed_nfkd, seed_nfd) def __main__(): unittest.main()
add one missing check to unit utf-8 test
py
diff --git a/src/rituals/util/notify.py b/src/rituals/util/notify.py index <HASH>..<HASH> 100644 --- a/src/rituals/util/notify.py +++ b/src/rituals/util/notify.py @@ -66,4 +66,5 @@ def error(msg): def failure(msg): """Emit a fatal message and exit.""" error(msg) - raise exceptions.Exit(1) + sys.exit(1) + #raise exceptions.Exit(1)
:arrow_upper_right: use sys.exit, since Exit dumps a traceback
py
diff --git a/djangosaml2idp/views.py b/djangosaml2idp/views.py index <HASH>..<HASH> 100644 --- a/djangosaml2idp/views.py +++ b/djangosaml2idp/views.py @@ -236,7 +236,7 @@ class SSOInitView(LoginRequiredMixin, IdPHandlerViewMixin, View): sign_assertion = self.IDP.config.getattr("sign_assertion", "idp") or False authn_resp = self.IDP.create_authn_response( identity=identity, - in_response_to="IdP_Initiated_Login", + in_response_to=None, destination=destination, sp_entity_id=sp_entity_id, userid=user_id,
SAML fails when "InResponseTo" attribute is sent in the SAML response to Service Provider (such as Salesforce) (#<I>)
py
diff --git a/python_modules/dagster/dagster/core/instance/__init__.py b/python_modules/dagster/dagster/core/instance/__init__.py index <HASH>..<HASH> 100644 --- a/python_modules/dagster/dagster/core/instance/__init__.py +++ b/python_modules/dagster/dagster/core/instance/__init__.py @@ -76,7 +76,7 @@ def _dagster_home_dir(): def _dagster_compute_log_manager(base_dir): config = _dagster_config(base_dir) compute_log_base = os.path.join(base_dir, 'storage') - if config and config['compute_logs']: + if config and config.get('compute_logs'): if 'module' in config['compute_logs'] and 'class' in config['compute_logs']: from dagster.core.storage.compute_log_manager import ComputeLogManager
Fix loading config.yaml Summary: If we had a `config.yaml` without `compute_logs` defined, then loading an instance would break. Simply changing to `.get()` to fix this. Test Plan: unit Reviewers: max Reviewed By: max Differential Revision: <URL>
py
diff --git a/aioimaplib/aioimaplib.py b/aioimaplib/aioimaplib.py index <HASH>..<HASH> 100644 --- a/aioimaplib/aioimaplib.py +++ b/aioimaplib/aioimaplib.py @@ -596,8 +596,7 @@ class IMAP4ClientProtocol(asyncio.Protocol): command.close(response_text, result=response_result) def _continuation(self, line): - if 'literal data' in line: - # APPEND case + if self.pending_sync_command is not None and self.pending_sync_command.name == 'APPEND': if self.literal_data is None: Abort('asked for literal data but have no literal data to send') self.transport.write(self.literal_data)
[fix] issue #<I>
py
diff --git a/mongo_connector/doc_managers/formatters.py b/mongo_connector/doc_managers/formatters.py index <HASH>..<HASH> 100644 --- a/mongo_connector/doc_managers/formatters.py +++ b/mongo_connector/doc_managers/formatters.py @@ -12,6 +12,11 @@ if PY3: long = int RE_TYPE = type(re.compile("")) +try: + from bson.regex import Regex + RE_TYPES = (RE_TYPE, Regex) +except ImportError: + RE_TYPES = (RE_TYPE,) class DocumentFormatter(object): @@ -53,7 +58,7 @@ class DefaultDocumentFormatter(DocumentFormatter): return self.format_document(value) elif isinstance(value, list): return [self.transform_value(v) for v in value] - if isinstance(value, (RE_TYPE, bson.Regex)): + if isinstance(value, RE_TYPES): flags = "" if value.flags & re.IGNORECASE: flags += "i"
Fix import of bson.Regex, since it doesn't exist until pymongo <I>.
py
diff --git a/ubersmith/calls/client.py b/ubersmith/calls/client.py index <HASH>..<HASH> 100644 --- a/ubersmith/calls/client.py +++ b/ubersmith/calls/client.py @@ -12,6 +12,8 @@ from ubersmith.utils import prepend_base __all__ = [ 'GetCall', 'ListCall', + 'PaymentMethodListCall', + 'InvoiceCountCall', ] _ = prepend_base(__name__.split('.')[-1]) @@ -50,6 +52,10 @@ class ListCall(_ClientCallMixin, GroupCall): method = _('list') +class PaymentMethodListCall(GroupCall): + method = _('payment_method_list') + + class InvoiceCountCall(BaseCall): method = _('invoice_count') required_fields = ['client_id']
Added payment_method_list cleaner
py
diff --git a/python/ray/autoscaler/command_runner.py b/python/ray/autoscaler/command_runner.py index <HASH>..<HASH> 100644 --- a/python/ray/autoscaler/command_runner.py +++ b/python/ray/autoscaler/command_runner.py @@ -659,10 +659,11 @@ class DockerCommandRunner(CommandRunnerInterface): self.container_name) def _check_docker_installed(self): - try: - self.ssh_command_runner.run("command -v docker") - return - except Exception: + no_exist = "NoExist" + output = self.ssh_command_runner.run( + f"command -v docker || echo '{no_exist}'", with_output=True) + cleaned_output = output.decode().strip() + if no_exist in cleaned_output or "docker" not in cleaned_output: install_commands = [ "curl -fsSL https://get.docker.com -o get-docker.sh", "sudo sh get-docker.sh", "sudo usermod -aG docker $USER",
[Autoscaler] Actually try to catch when docker does not exist (#<I>)
py
diff --git a/osuapi/__init__.py b/osuapi/__init__.py index <HASH>..<HASH> 100644 --- a/osuapi/__init__.py +++ b/osuapi/__init__.py @@ -2,7 +2,7 @@ __title__ = "osuapi" __author__ = "khazhyk" __license__ = "MIT" __copyright__ = "Copyright khazhyk" -__version__ = "0.0.37" +__version__ = "0.0.38" from .osu import OsuApi from .connectors import *
<I> support count_normal, count_slider, count_spinner, download_unavailable, audio_unavailable
py
diff --git a/salt/utils/cloud.py b/salt/utils/cloud.py index <HASH>..<HASH> 100644 --- a/salt/utils/cloud.py +++ b/salt/utils/cloud.py @@ -423,6 +423,9 @@ def bootstrap(vm_, opts): 'win_installer', vm_, opts ) if win_installer: + deploy_kwargs['port'] = salt.config.get_cloud_config_value( + 'smb_port', vm_, opts, default=445 + ) deploy_kwargs['win_installer'] = win_installer minion = salt.utils.cloud.minion_config(opts, vm_) deploy_kwargs['master'] = minion['master']
fix windows bootstrapping this is a regression bug
py
diff --git a/cerberus/cerberus.py b/cerberus/cerberus.py index <HASH>..<HASH> 100644 --- a/cerberus/cerberus.py +++ b/cerberus/cerberus.py @@ -285,10 +285,7 @@ class Validator(object): error = errors.ValidationError(document_path, schema_path, code, rule, constraint, value, info) - self._errors.append(error) - self._errors.sort() - self.document_error_tree += error - self.schema_error_tree += error + self._error([error]) def __get_child_validator(self, document_crumb=None, schema_crumb=None, **kwargs):
Reduces boilerplate for submitting errors
py
diff --git a/tools/make_saml_metadata.py b/tools/make_saml_metadata.py index <HASH>..<HASH> 100644 --- a/tools/make_saml_metadata.py +++ b/tools/make_saml_metadata.py @@ -30,7 +30,7 @@ parser.add_argument('-s', dest='sign', action='store_true', help="sign the metad parser.add_argument('-x', dest='xmlsec', help="xmlsec binaries to be used for the signing") parser.add_argument('-f', dest="frontend", help='generate frontend metadata', action="store_true") parser.add_argument('-b', dest="backend", help='generate backend metadata', action="store_true") -parser.add_argument('-o', dest="output", help='output path') +parser.add_argument('-o', dest="output", default=".", help='output path') parser.add_argument(dest="config", nargs="+") args = parser.parse_args()
Write output to working directory as default.
py
diff --git a/raiden/api/python.py b/raiden/api/python.py index <HASH>..<HASH> 100644 --- a/raiden/api/python.py +++ b/raiden/api/python.py @@ -232,8 +232,17 @@ class RaidenAPI(object): if not isaddress(partner_address): raise InvalidAddress('Expected binary address format for partner in channel deposit') - graph = self.raiden.token_to_channelgraph[token_address] - channel = graph.partneraddress_to_channel[partner_address] + graph = self.raiden.token_to_channelgraph.get(token_address) + if graph is None: + raise InvalidAddress('Unknown token address') + + channel = graph.partneraddress_to_channel.get(partner_address) + if channel is None: + raise InvalidAddress('No channel with partner_address for the given token') + + if channel.token_address != token_address: + raise InvalidAddress('token_address does not match the netting channel attribute') + token = self.raiden.chain.token(token_address) netcontract_address = channel.external_state.netting_channel.address old_balance = channel.contract_balance
extended checks for the python api
py
diff --git a/openquake/engine/job/validation.py b/openquake/engine/job/validation.py index <HASH>..<HASH> 100644 --- a/openquake/engine/job/validation.py +++ b/openquake/engine/job/validation.py @@ -557,6 +557,7 @@ class ClassicalBCRRiskForm(BaseOQModelForm): calc_mode = 'classical_bcr' class Meta: + model = models.RiskCalculation fields = ( 'description', 'no_progress_timeout', @@ -572,6 +573,7 @@ class EventBasedBCRRiskForm(BaseOQModelForm): calc_mode = 'event_based_bcr' class Meta: + model = models.RiskCalculation fields = ( 'description', 'no_progress_timeout', @@ -589,6 +591,7 @@ class EventBasedRiskForm(BaseOQModelForm): calc_mode = 'event_based' class Meta: + model = models.RiskCalculation fields = ( 'description', 'no_progress_timeout', @@ -626,6 +629,7 @@ class ScenarioDamageRiskForm(BaseOQModelForm): calc_mode = 'scenario_damage' class Meta: + model = models.RiskCalculation fields = ( 'description', 'region_constraint', @@ -637,6 +641,7 @@ class ScenarioRiskForm(BaseOQModelForm): calc_mode = 'scenario' class Meta: + model = models.RiskCalculation fields = ( 'description', 'no_progress_timeout',
added reference to risk calculation model Former-commit-id: dcb<I>ede<I>f0e<I>c7d<I>f<I>c<I>b<I>daeff
py
diff --git a/validator/__init__.py b/validator/__init__.py index <HASH>..<HASH> 100644 --- a/validator/__init__.py +++ b/validator/__init__.py @@ -576,14 +576,14 @@ def validate(validation, dictionary): if isinstance(validation[key], (list, tuple)): if Required in validation[key]: if not Required(key, dictionary): - errors[key] = "must be present" + errors[key] = ["must be present"] continue _validate_list_helper(validation, dictionary, key, errors) else: v = validation[key] if v == Required: if not Required(key, dictionary): - errors[key] = "must be present" + errors[key] = ["must be present"] else: _validate_and_store_errs(v, dictionary, key, errors) if len(errors) > 0:
return "must be present" within an array
py
diff --git a/oath/_hotp.py b/oath/_hotp.py index <HASH>..<HASH> 100644 --- a/oath/_hotp.py +++ b/oath/_hotp.py @@ -22,7 +22,9 @@ See also http://tools.ietf.org/html/rfc4226 __all__ = ( 'hotp', 'accept_hotp' ) def truncated_value(h): - offset = ord(h[-1]) & 0xF + v = h[-1] + if not isinstance(v, int): v = ord(v) # Python 2.x + offset = v & 0xF (value,) = struct.unpack('>I', h[offset:offset + 4]) return value & 0x7FFFFFFF
hotp: in Python 3, the secret will by a bytes instance and we can just skip calling ord
py
diff --git a/sqlalchemy_hana/dialect.py b/sqlalchemy_hana/dialect.py index <HASH>..<HASH> 100644 --- a/sqlalchemy_hana/dialect.py +++ b/sqlalchemy_hana/dialect.py @@ -309,7 +309,7 @@ class HANABaseDialect(default.DefaultDialect): result = connection.execute( sql.text( - "SELECT TABLE_NAME, IS_TEMPORARY FROM TABLES WHERE SCHEMA_NAME=:schema", + "SELECT TABLE_NAME, IS_TEMPORARY FROM TABLES WHERE SCHEMA_NAME=:schema AND IS_USER_DEFINED_TYPE='FALSE'", ).bindparams( schema=self.denormalize_name(schema), )
Ignore user defined tables types in get_table_names (#<I>)
py
diff --git a/omxplayer/player.py b/omxplayer/player.py index <HASH>..<HASH> 100644 --- a/omxplayer/player.py +++ b/omxplayer/player.py @@ -436,7 +436,7 @@ class OMXPlayer(object): Returns: str: filename currently playing """ - return self._filename + return self._filename # MediaPlayer2.Player types:
Fix indentation (again !)
py
diff --git a/salt/modules/mount.py b/salt/modules/mount.py index <HASH>..<HASH> 100644 --- a/salt/modules/mount.py +++ b/salt/modules/mount.py @@ -418,7 +418,7 @@ def vfstab(config='/etc/vfstab'): salt '*' mount.vfstab ''' - ## NOTE: vfstab is a wrapper, we share all code with fstab + ## NOTE: vfstab is a wrapper for fstab return fstab(config) @@ -427,9 +427,6 @@ def rm_fstab(name, device, config='/etc/fstab'): .. versionchanged:: 2016.3.2 Remove the mount point from the fstab - config : string - optional path of fstab - CLI Example: .. code-block:: bash @@ -480,16 +477,13 @@ def rm_vfstab(name, device, config='/etc/vfstab'): .. versionadded:: 2016.3.2 Remove the mount point from the vfstab - config : string - optional path of vfstab - CLI Example: .. code-block:: bash salt '*' mount.rm_vfstab /mnt/foo /device/c0t0d0p0 ''' - ## NOTE: rm_vfstab is a wrapper, we share all code with fstab + ## NOTE: rm_vfstab is a wrapper for rm_fstab return rm_fstab(name, device, config)
actually do the cleanup, oops
py