diff
stringlengths
139
3.65k
message
stringlengths
8
627
diff_languages
stringclasses
1 value
diff --git a/src/interface/cli.py b/src/interface/cli.py index <HASH>..<HASH> 100644 --- a/src/interface/cli.py +++ b/src/interface/cli.py @@ -38,8 +38,7 @@ TODO: None """ -import sys -import getopt +import sys, copy, getopt DEFAULT_TYPE = str
fixed missing include for copy module in cli.py
py
diff --git a/geoplot/geoplot.py b/geoplot/geoplot.py index <HASH>..<HASH> 100644 --- a/geoplot/geoplot.py +++ b/geoplot/geoplot.py @@ -1317,14 +1317,14 @@ def kdeplot( if self.projection: sns.kdeplot( - pd.Series([p.x for p in self.df.geometry]), - pd.Series([p.y for p in self.df.geometry]), + x=pd.Series([p.x for p in self.df.geometry]), + y=pd.Series([p.y for p in self.df.geometry]), transform=ccrs.PlateCarree(), ax=ax, cmap=self.cmap, **self.kwargs ) else: sns.kdeplot( - pd.Series([p.x for p in self.df.geometry]), - pd.Series([p.y for p in self.df.geometry]), + x=pd.Series([p.x for p in self.df.geometry]), + y=pd.Series([p.y for p in self.df.geometry]), ax=ax, cmap=self.cmap, **self.kwargs ) return ax
Set explicit x/y params in KDEPlot (#<I>)
py
diff --git a/nanoplot/NanoPlot.py b/nanoplot/NanoPlot.py index <HASH>..<HASH> 100755 --- a/nanoplot/NanoPlot.py +++ b/nanoplot/NanoPlot.py @@ -26,7 +26,7 @@ import pysam import nanoget import nanoplotter import nanomath -__version__="0.9.2" +__version__="0.9.3" def main(): @@ -156,10 +156,12 @@ def filterData(datadf, args): logging.info("Removing length outliers for plotting.") if args.maxlength: datadf=datadf[datadf[readlengthsPointer] < args.maxlength] - lengthprefix.append("MaxLength" + str(args.maxlength) + '_') + lengthprefix.append("MaxLength-" + str(args.maxlength) + '_') logging.info("Removing reads longer than {}.".format(str(args.maxlength))) + if args.loglength: - datadf[readlengthsPointer] = np.log10(datadf[readlengthsPointer]) + datadf["log_" + readlengthsPointer] = np.log10(datadf[readlengthsPointer]) + readlengthsPointer = "log_" + readlengthsPointer lengthprefix.append("Log_") logging.info("Using Log10 scaled read lengths.") logBool = True
fixed mistake in log plot readlengthPointer
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ setup( requirements=[ 'pygooglechart', 'django-paging', - 'django-indexer', + 'django-indexer==0.2', ], include_package_data=True, classifiers=[
Update version for indexer in reqs
py
diff --git a/tests/test_pfs_client.py b/tests/test_pfs_client.py index <HASH>..<HASH> 100644 --- a/tests/test_pfs_client.py +++ b/tests/test_pfs_client.py @@ -462,7 +462,8 @@ def test_flush_commit(pfs_client): with pfs_client.commit('test-repo-1', 'master') as c: pfs_client.put_file_bytes(c, 'input.json', b'hello world') - pfs_client.flush_commit(['test-repo-1/{}'.format(c.id)]) + # Just block until all of the commits are yielded + list(pfs_client.flush_commit(['test-repo-1/{}'.format(c.id)])) files = pfs_client.get_files('test-repo-1/master', '/', recursive=True) assert files == {'/input.json': b'hello world'}
Updated test to use new generator-based `flush_commit`
py
diff --git a/test/test_dedupe.py b/test/test_dedupe.py index <HASH>..<HASH> 100644 --- a/test/test_dedupe.py +++ b/test/test_dedupe.py @@ -156,7 +156,8 @@ class PredicatesTest(unittest.TestCase): assert dedupe.predicates.nearIntegersPredicate(field) == (15, 16, 17, 122, 123, 124) assert dedupe.predicates.commonFourGram(field) == ('123 ', '23 1', '3 16', ' 16t', '16th', '6th ', 'th s', 'h st') assert dedupe.predicates.commonSixGram(field) == ('123 16', '23 16t', '3 16th', ' 16th ', '16th s', '6th st') - + assert dedupe.predicates.initials(field,12) == () + assert dedupe.predicates.initials(field,7) == ('123 16t',) if __name__ == "__main__": unittest.main()
Add test for 'initials' predicate
py
diff --git a/phoebe/algorithms/marching.py b/phoebe/algorithms/marching.py index <HASH>..<HASH> 100644 --- a/phoebe/algorithms/marching.py +++ b/phoebe/algorithms/marching.py @@ -887,7 +887,7 @@ def discretize(delta=0.1, max_triangles=None, potential='BinaryRoche', *args): omega[i] = (atan2(zeta2, eta2)-atan2(zeta1, eta1)) % (2*pi) #~ logger.debug("%d: r[%d]=(% 3.3f, % 3.3f, % 3.3f), r[%d]=(% 3.3f, % 3.3f, % 3.3f), front angle=% 3.3f", i, i-1 if i > 0 else len(P)-1, xi1, eta1, zeta1, i+1 if i < len(P)-1 else 0, xi2, eta2, zeta2, omega[i]/pi*180) - if np.allclose(omega, omega[0]): + if np.allclose(omega, omega[0], atol=1e-5): minidx = 0 else: minidx = omega.argmin()
Half-fixed! The primary star mesh is now identical, but the secondary isn't. I suspect it's np.allclose() tolerance. Trying 1e-5.
py
diff --git a/indra/assemblers/html/assembler.py b/indra/assemblers/html/assembler.py index <HASH>..<HASH> 100644 --- a/indra/assemblers/html/assembler.py +++ b/indra/assemblers/html/assembler.py @@ -74,7 +74,8 @@ class HtmlAssembler(object): rest_api_results=self.rest_api_results) return self.model - def format_evidence_text(self, stmt): + @staticmethod + def format_evidence_text(stmt): """Returns evidence metadata with highlighted evidence text. Parameters
Make format_evidence_text into staticmethod
py
diff --git a/lambda_decorators.py b/lambda_decorators.py index <HASH>..<HASH> 100644 --- a/lambda_decorators.py +++ b/lambda_decorators.py @@ -265,14 +265,14 @@ def after(func): >>> # to create a reusable decorator >>> @after - ... def teapot(retval): - ... retval['statusCode'] = 418 + ... def gnu_terry_pratchett(retval): + ... retval.setdefault('Headers', {})['X-Clacks-Overhead'] = 'GNU Terry Pratchett' ... return retval - >>> @teapot + >>> @gnu_terry_pratchett ... def handler(event, context): - ... return {} + ... return {'body': ''} >>> handler({}, object()) - {'statusCode': 418} + {'body': '', 'Headers': {'X-Clacks-Overhead': 'GNU Terry Pratchett'}} """ class AfterDecorator(LambdaDecorator): def after(self, retval):
gnu terry pratchet, why not
py
diff --git a/safe/utilities/i18n.py b/safe/utilities/i18n.py index <HASH>..<HASH> 100644 --- a/safe/utilities/i18n.py +++ b/safe/utilities/i18n.py @@ -8,6 +8,7 @@ __author__ = 'timlinux' def tr(text): """We define a tr() alias here since the utilities implementation below is not a class and does not inherit from QObject. + .. note:: see http://tinyurl.com/pyqt-differences :param text: String to be translated @@ -17,6 +18,8 @@ def tr(text): the original string. :rtype: str """ + # Ensure that the text is a string + text = str(text) # noinspection PyCallByClass,PyTypeChecker,PyArgumentList return QCoreApplication.translate('@default', text)
Ensure that the text input to our tr is a string.
py
diff --git a/demcoreg/dem_align.py b/demcoreg/dem_align.py index <HASH>..<HASH> 100755 --- a/demcoreg/dem_align.py +++ b/demcoreg/dem_align.py @@ -181,9 +181,9 @@ def getparser(): parser.add_argument('-mask_list', nargs='+', type=str, default=['glaciers',], choices=dem_mask.mask_choices, \ help='Define masks to use to limit reference surfaces for co-registration') parser.add_argument('-tiltcorr', action='store_true', \ - help='After preliminary translation, fit 2D polynomial to residual elevation offsets and remove') + help='After preliminary translation, fit polynomial to residual elevation offsets and remove') parser.add_argument('-polyorder', type=int, default=1, \ - help='Specify order of 2D polynomial fit') + help='Specify order of polynomial fit') parser.add_argument('-tol', type=float, default=0.02, \ help='When iterative translation magnitude is below this tolerance (meters), break and write out corrected DEM') parser.add_argument('-max_offset', type=float, default=100, \
dem_align: update usage for polynomial fits
py
diff --git a/traces/timeseries.py b/traces/timeseries.py index <HASH>..<HASH> 100644 --- a/traces/timeseries.py +++ b/traces/timeseries.py @@ -505,6 +505,7 @@ class TimeSeries(object): period_time = sampling_period temp = deepcopy(self) + temp.default = EXTEND_BACK temp.domain = Domain(self.domain.start() - buffer_time, self.domain.end() + buffer_time)
setting default of temp timeseries in moving average
py
diff --git a/tests/test_jobs.py b/tests/test_jobs.py index <HASH>..<HASH> 100644 --- a/tests/test_jobs.py +++ b/tests/test_jobs.py @@ -131,7 +131,7 @@ def test_long_error_stack(): # create long error stack STACK_SIZE = 89942 # Does not fit into small blob (should be 64k, but found to be higher) long_error_stack = ''.join(random.choice(string.ascii_letters) for _ in range(STACK_SIZE)) - assert_true(subjects) + assert subjects table_name = 'fake_table' key = subjects.fetch('KEY')[0]
Update tests/test_jobs.py
py
diff --git a/pymemcache/test/test_client_hash.py b/pymemcache/test/test_client_hash.py index <HASH>..<HASH> 100644 --- a/pymemcache/test/test_client_hash.py +++ b/pymemcache/test/test_client_hash.py @@ -137,10 +137,11 @@ class TestHashClient(ClientTestMixin, unittest.TestCase): client._get_client = get_clients - result = client.set(b'key1', b'value1', noreply=False) - result = client.set(b'key3', b'value2', noreply=False) + assert client.set(b'key1', b'value1', noreply=False) is True + assert client.set(b'key3', b'value2', noreply=False) is True result = client.gets_many([b'key1', b'key3']) - assert result == {b'key1': (b'value1', b'1'), b'key3': (b'value2', b'1')} + assert (result == + {b'key1': (b'value1', b'1'), b'key3': (b'value2', b'1')}) def test_no_servers_left(self): from pymemcache.client.hash import HashClient
Introduce some missing .set() assertions Also wrap a line that exceeds <I> characters.
py
diff --git a/stash.py b/stash.py index <HASH>..<HASH> 100755 --- a/stash.py +++ b/stash.py @@ -51,17 +51,19 @@ class StashException(Exception): class Stash(object): + PATCHES_PATH = os.path.expanduser('~/.patches') + def __init__(self): # Check if the patches path exists, and in case it does not, create it. - self.patches_path = os.path.expanduser('~/.patches') - if not os.path.exists(self.patches_path): - os.mkdir(self.patches_path) + if not os.path.exists(self.PATCHES_PATH): + os.mkdir(self.PATCHES_PATH) + + self.patches = os.listdir(self.PATCHES_PATH) - self.patches = os.listdir(self.patches_path) def _get_patch_path(self, patch_name): """Returns the absolute path for patch *patch_name*.""" - return os.path.join(self.patches_path, patch_name) if patch_name else None + return os.path.join(self.PATCHES_PATH, patch_name) if patch_name else None def list_patches(self): """Prints a list of all patches present in the current stash."""
Make a class constant out of the patches path. Storing the patches path in a class constant allows us to easily override the location where patches are stored in unit tests.
py
diff --git a/test/test_zotero.py b/test/test_zotero.py index <HASH>..<HASH> 100644 --- a/test/test_zotero.py +++ b/test/test_zotero.py @@ -35,7 +35,10 @@ import time import httpretty from dateutil import parser from httpretty import HTTPretty -from pyzotero.pyzotero import zotero as z +try: + from pyzotero.pyzotero import zotero as z +except ModuleNotFoundError: + from pyzotero import zotero as z # Python 3 compatibility faffing try:
Don't throw an import error if tests are being run in "test" dir
py
diff --git a/test/test_types_values.py b/test/test_types_values.py index <HASH>..<HASH> 100644 --- a/test/test_types_values.py +++ b/test/test_types_values.py @@ -352,9 +352,9 @@ class ConstantsTest(ClangTest): #define B 1.0 #define C 0.8249 """) - self.failUnlessAlmostEqual(self.namespace.A, 0.9642) - self.failUnlessAlmostEqual(self.namespace.B, 1.0) - self.failUnlessAlmostEqual(self.namespace.C, 0.8249) + self.assertAlmostEqual(self.namespace.A, 0.9642) + self.assertAlmostEqual(self.namespace.B, 1.0) + self.assertAlmostEqual(self.namespace.C, 0.8249) def test_anonymous_struct(self): flags = ['-target', 'i386-linux']
remove Deprecated assertEquals
py
diff --git a/realtime/shake_event.py b/realtime/shake_event.py index <HASH>..<HASH> 100644 --- a/realtime/shake_event.py +++ b/realtime/shake_event.py @@ -813,7 +813,8 @@ class ShakeEvent(QObject): None """ - myRGBList = ['#FFFFFF', '#209fff', '#00cfff', '#55ffff', '#aaffff', + myRGBList = ['#FFFFFF', '#FFFFFF', '#209fff', '#00cfff', '#55ffff', + '#aaffff', '#fff000', '#ffa800', '#ff7000', '#ff0000', '#D00', '#800', '#400'] myRGB = myRGBList[int(theMMIValue)]
Fix off by one issue in cities table colouring
py
diff --git a/unleash/plugin.py b/unleash/plugin.py index <HASH>..<HASH> 100644 --- a/unleash/plugin.py +++ b/unleash/plugin.py @@ -4,6 +4,7 @@ from pluginbase import PluginBase from . import plugins from .depgraph import DependencyGraph +from .exc import InvocationError plugin_base = PluginBase(package='unleash.plugins') @@ -43,6 +44,11 @@ class PluginGraph(DependencyGraph): rvs = [] for plugin_name in self.resolve_order(): + if not plugin_name in self.plugin_mods: + raise InvocationError( + 'Could not find plugin {}, which is required by {}' + .format(plugin_name, self.get_dependants(plugin_name))) + plugin = self.plugin_mods[plugin_name] func = getattr(plugin, funcname, None)
If a plugin dependency is missing, raise an InvocationError.
py
diff --git a/tests/test_backend.py b/tests/test_backend.py index <HASH>..<HASH> 100644 --- a/tests/test_backend.py +++ b/tests/test_backend.py @@ -33,11 +33,11 @@ def test_render_missing_file(quiet, engine='dot', format_='pdf'): @pytest.exe def test_render(capsys, tmpdir, engine='dot', format_='pdf', filename='hello.gv', data=b'digraph { hello -> world }'): - source = tmpdir.join(filename) - source.write(data) - rendered = source.new(ext='%s.%s' % (source.ext, format_)) + lpath = tmpdir.join(filename) + lpath.write(data) + rendered = lpath.new(ext='%s.%s' % (lpath.ext, format_)) - assert render(engine, format_, str(source)) == str(rendered) + assert render(engine, format_, str(lpath)) == str(rendered) assert rendered.size() assert capsys.readouterr() == ('', '')
use lpath for LocalPath
py
diff --git a/json5/lib.py b/json5/lib.py index <HASH>..<HASH> 100644 --- a/json5/lib.py +++ b/json5/lib.py @@ -231,27 +231,38 @@ def _is_reserved_word(k): global _reserved_word_re if _reserved_word_re is None: + # List taken from section 7.6.1 of ECMA-262. _reserved_word_re = re.compile('|'.join([ 'break', 'case', 'catch', + 'class', + 'const', 'continue', 'debugger', 'default', 'delete', 'do', 'else', + 'enum', + 'export', + 'extends', + 'false', 'finally', 'for', 'function', 'if', + 'import', 'in', 'instanceof', 'new', + 'null', 'return', + 'super', 'switch', 'this', 'throw', + 'true', 'try', 'typeof', 'var',
add future reserved words and null/false/true to reserved words for idenitifiers
py
diff --git a/lastmatch.py b/lastmatch.py index <HASH>..<HASH> 100755 --- a/lastmatch.py +++ b/lastmatch.py @@ -11,6 +11,14 @@ either Gstreamer (and its Python bindings) or pymad installed. """ import sys import os + +# Just a little trickery to avoid importing the "lastfp" package that's +# in the source distribution, because it won't contain the built +# _fplib.so extension module. We need to import from the built verison, +# and this script is likely to be run from the distribution root. +for path in '', os.path.abspath(os.path.dirname(__file__)): + if path in sys.path: + sys.path.remove(path) import lastfp # This API key is specifically for this script, lastmatch.py. If you
trick to prevent importing of wrong lastfp package
py
diff --git a/saltcloud/cloud.py b/saltcloud/cloud.py index <HASH>..<HASH> 100644 --- a/saltcloud/cloud.py +++ b/saltcloud/cloud.py @@ -223,6 +223,20 @@ class Cloud(object): ) return + deploy = vm_.get( + 'deploy', self.opts.get( + '{0}.deploy'.format(self.provider(vm_).upper()), + self.opts.get('deploy') + ) + ) + + if deploy is True and 'master' not in vm_['minion']: + raise ValueError( + 'There\'s no master defined in the {0} VM settings'.format( + vm_['name'] + ) + ) + priv, pub = saltcloud.utils.gen_keys( saltcloud.utils.get_option('keysize', self.opts, vm_) ) @@ -535,7 +549,7 @@ class Map(Cloud): def create_multiprocessing(config): """ - This function will be called from another process when running a map in + This function will be called from another process when running a map in parallel mode. The result from the create is always a json object. """ config['opts']['output'] = 'json'
Fail as soon as we realize we're deploying and master was not specified anywhere in the configuration.
py
diff --git a/x10_any/_version.py b/x10_any/_version.py index <HASH>..<HASH> 100644 --- a/x10_any/_version.py +++ b/x10_any/_version.py @@ -1,2 +1,2 @@ -version_tuple = __version_info__ = (0, 0, 9) +version_tuple = __version_info__ = (0, 0, 9, 'git') version = version_string = __version__ = '.'.join(map(str, __version_info__))
Set version string to indicate its from git checkout
py
diff --git a/lwr/manager_factory.py b/lwr/manager_factory.py index <HASH>..<HASH> 100644 --- a/lwr/manager_factory.py +++ b/lwr/manager_factory.py @@ -28,7 +28,7 @@ def build_managers(app, conf): managers = {} if not job_managers_config: - managers[DEFAULT_MANAGER_NAME] = _build_manager(QueueManager, app) + managers[DEFAULT_MANAGER_NAME] = _build_manager(QueueManager, app, DEFAULT_MANAGER_NAME, default_options) else: config = ConfigParser() config.readfp(open(job_managers_config))
Fix for applying assign_ids property to default job manager.
py
diff --git a/src/front-door/azext_front_door/custom.py b/src/front-door/azext_front_door/custom.py index <HASH>..<HASH> 100644 --- a/src/front-door/azext_front_door/custom.py +++ b/src/front-door/azext_front_door/custom.py @@ -285,7 +285,7 @@ def configure_fd_frontend_endpoint_disable_https(cmd, resource_group_name, front def configure_fd_frontend_endpoint_enable_https(cmd, resource_group_name, front_door_name, item_name, secret_name=None, secret_version=None, - certificate_source=None, vault_id=None): + certificate_source='FrontDoor', vault_id=None): keyvault_usage = ('usage error: --certificate-source AzureKeyVault --vault-id ID ' '--secret-name NAME --secret-version VERSION') if certificate_source != 'AzureKeyVault' and any([vault_id, secret_name, secret_version]):
Making 'FrontDoor' Default for certificate source (#<I>)
py
diff --git a/djangocms_page_meta/__init__.py b/djangocms_page_meta/__init__.py index <HASH>..<HASH> 100644 --- a/djangocms_page_meta/__init__.py +++ b/djangocms_page_meta/__init__.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals -__version__ = '0.8.2' +__version__ = '0.8.2.post1' __author__ = 'Iacopo Spalletti <[email protected]>' default_app_config = 'djangocms_page_meta.apps.PageMetaConfig'
Bump develop version [ci skip]
py
diff --git a/ocrd/ocrd/resource_manager.py b/ocrd/ocrd/resource_manager.py index <HASH>..<HASH> 100644 --- a/ocrd/ocrd/resource_manager.py +++ b/ocrd/ocrd/resource_manager.py @@ -137,7 +137,7 @@ class OcrdResourceManager(): def location_to_resource_dir(self, location): return '/usr/local/share/ocrd-resources' if location == 'system' else \ join(XDG_DATA_HOME, 'ocrd-resources') if location == 'data' else \ - getcwd() + join(getcwd(), 'ocrd-resources') def resource_dir_to_location(self, resource_path): resource_path = str(resource_path)
:bug: resmgr: --location cwd should still include "ocrd-resources"
py
diff --git a/falafel/config/specs.py b/falafel/config/specs.py index <HASH>..<HASH> 100644 --- a/falafel/config/specs.py +++ b/falafel/config/specs.py @@ -121,6 +121,7 @@ static_specs = { "ntpq_pn" : CommandSpec("/usr/sbin/ntpq -pn"), "ovirt_engine_confd" : PatternSpec(r"etc/ovirt-engine/engine\.conf\.d/.*"), "ovs-vsctl_show" : CommandSpec("/usr/bin/ovs-vsctl show"), + "parted_-l" : CommandSpec("/sbin/parted -l"), "password-auth" : SimpleFileSpec("etc/pam.d/password-auth"), "ps_aux" : CommandSpec("/bin/ps aux"), "ps_auxcww" : CommandSpec("/bin/ps auxcww"),
Add "parted_-l" to the spec
py
diff --git a/glue/pipeline.py b/glue/pipeline.py index <HASH>..<HASH> 100644 --- a/glue/pipeline.py +++ b/glue/pipeline.py @@ -1229,7 +1229,8 @@ class CondorDAG: if not self.__dag_file_path: raise CondorDAGError, "No path for DAG file" try: - outfilename = self.__dag_file_path.replace(".dag", ".sh") + dfp = self.__dag_file_path + outfilename = ".".join(dfp.split(".")[:-1]) outfile = open(outfilename, "w") except: raise CondorDAGError, "Cannot open file " + self.__dag_file_path
Fixed the write_script() method for dax
py
diff --git a/squad/http.py b/squad/http.py index <HASH>..<HASH> 100644 --- a/squad/http.py +++ b/squad/http.py @@ -53,7 +53,7 @@ def auth(func, mode=AuthMode.READ): group = get_object_or_404(models.Group, slug=group_slug) request.group = group - user = request.user + user = auth_user_from_request(request, request.user) if len(args) < 3: # no project, authenticate against group only @@ -66,8 +66,6 @@ def auth(func, mode=AuthMode.READ): project = get_object_or_404(group.projects, slug=project_slug) request.project = project - user = auth_user_from_request(request, user) - if not (project.is_public or user.is_authenticated): raise PermissionDenied()
http: authenticate users before checking permissions Authentication with tokens is now performed before checking group and project permissions. Since auth_user_from_request passes AnonymousUser back if no authentication can be done this pattch will not change the way we deal with anonoymous requests. It fixes issues with access to private groups.
py
diff --git a/ariadne/asgi.py b/ariadne/asgi.py index <HASH>..<HASH> 100644 --- a/ariadne/asgi.py +++ b/ariadne/asgi.py @@ -58,9 +58,9 @@ class GraphQL: async def handle_http(self, receive: Receive, send: Send, *, scope: Scope): request = Request(scope=scope, receive=receive) - if request.method == "GET" and not request.query_params.get("query"): + if request.method == "GET": response = await self.render_playground(request) - elif request.method in {"GET", "POST"}: + elif request.method == "POST": response = await self.graphql_http_server(request) else: response = Response(status_code=400)
Temporarily disable query execution over GET
py
diff --git a/sendgrid/helpers/mail/open_tracking.py b/sendgrid/helpers/mail/open_tracking.py index <HASH>..<HASH> 100644 --- a/sendgrid/helpers/mail/open_tracking.py +++ b/sendgrid/helpers/mail/open_tracking.py @@ -11,7 +11,7 @@ class OpenTracking(object): :param enable: If open tracking is enabled. :type enable: boolean, optional :param substitution_tag: Tag in body to be replaced by tracking pixel. - :type substitution_tag: string, optional + :type substitution_tag: OpenTrackingSubstitionTag, optional """ self._enable = None self._substitution_tag = None
Update substitution_tag requirements The notes state that type text is required for substition_tag, but it requires a type OpenTrackingSubstitutionTag
py
diff --git a/scout/server/blueprints/variants/controllers.py b/scout/server/blueprints/variants/controllers.py index <HASH>..<HASH> 100644 --- a/scout/server/blueprints/variants/controllers.py +++ b/scout/server/blueprints/variants/controllers.py @@ -471,17 +471,18 @@ def hide_compounds_query(store, variant_obj, query_form): ) for item in compound_mirror_lt_items: - compound_item = compound_var_obj.get(item) - if compound_item is None: - LOG.debug( - "Shading %s since it has has no value for %s", - compound.get("display_name"), - item, - ) - compound["is_dismissed"] = True - continue query_form_item = query_form.get(item) if query_form_item is not None: + compound_item = compound_var_obj.get(item) + if compound_item is None: + LOG.debug( + "Shading %s since it has has no value for %s", + compound.get("display_name"), + item, + ) + compound["is_dismissed"] = True + continue + if compound_item < query_form_item: LOG.debug( "Shading %s since it has has too low value for %s",
relax the cadd check a tad
py
diff --git a/python/lowdim.py b/python/lowdim.py index <HASH>..<HASH> 100644 --- a/python/lowdim.py +++ b/python/lowdim.py @@ -112,7 +112,7 @@ if len(argsIn) > 10 : if analMode == 'mean' : resp = X.map(lambda x : dot(y,x)) -if analMode == 'standardize' : +if analMode == 'corr' : resp = X.map(lambda x : dot(y,(x-mean(x))/norm(x))) if analMode == 'regress' : yhat = dot(inv(dot(y,transpose(y))),y)
Changed name of correlation based analysis
py
diff --git a/pypresence/baseclient.py b/pypresence/baseclient.py index <HASH>..<HASH> 100644 --- a/pypresence/baseclient.py +++ b/pypresence/baseclient.py @@ -16,6 +16,7 @@ class BaseClient: pipe = kwargs.get('pipe', 0) loop = kwargs.get('loop', None) handler = kwargs.get('handler', None) + async = kwargs.get('async', False) client_id = str(client_id) if sys.platform == 'linux' or sys.platform == 'darwin':
Prep for async support Prepare for full async support?
py
diff --git a/salt/modules/ps.py b/salt/modules/ps.py index <HASH>..<HASH> 100644 --- a/salt/modules/ps.py +++ b/salt/modules/ps.py @@ -4,8 +4,23 @@ See http://code.google.com/p/psutil. ''' import time -import psutil - +try: + import psutil + has_psutil = True +except ImportError: + has_psutil = False + +def __virtual__(): + if not has_psutil: + return False + + # The python 2.6 version of psutil lacks several functions + # used in this salt module so instead of spaghetti string + # code to try to bring sanity to everything, disable it. + if sys.version_info[0] == 2 and sys.version_info[1] < 7: + return False + + return "ps" def top(num_processes=5, interval=3): '''
Disable the ps salt module on python <I> There are too many issues with it. Fixes #<I>
py
diff --git a/sos/plugins/devicemapper.py b/sos/plugins/devicemapper.py index <HASH>..<HASH> 100644 --- a/sos/plugins/devicemapper.py +++ b/sos/plugins/devicemapper.py @@ -21,6 +21,8 @@ class DeviceMapper(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin): plugin_name = 'devicemapper' profiles = ('storage',) + packages = ('device-mapper',) + files = ('/dev/mapper',) def setup(self): self.add_cmd_output([
[devicemapper] add missing files and packages lists
py
diff --git a/pdf/gui/_version.py b/pdf/gui/_version.py index <HASH>..<HASH> 100644 --- a/pdf/gui/_version.py +++ b/pdf/gui/_version.py @@ -1 +1 @@ -__version__ = '1.0.0' +__version__ = '1.1.0'
Updated to version <I> Added MergeGUI
py
diff --git a/tests/smoketest.py b/tests/smoketest.py index <HASH>..<HASH> 100755 --- a/tests/smoketest.py +++ b/tests/smoketest.py @@ -47,7 +47,7 @@ run("pip install -r data-store-cli/requirements.txt") run("python -c 'import sys, hca.regenerate_api as r; r.generate_python_bindings(sys.argv[1])' swagger.json", cwd="data-store-cli") run("find data-store-cli/hca -name '*.pyc' -delete") -run("pip install --upgrade .", cwd="data-store-cli") +run("pip install --upgrade --no-deps .", cwd="data-store-cli") sample_id = str(uuid.uuid4()) bundle_dir = "data-bundle-examples/10X_v2/pbmc8k"
Smoketest: Don't auto-upgrade unpinned deps for data-store-cli This can interfere with other tests.
py
diff --git a/law/contrib/glite/__init__.py b/law/contrib/glite/__init__.py index <HASH>..<HASH> 100644 --- a/law/contrib/glite/__init__.py +++ b/law/contrib/glite/__init__.py @@ -95,7 +95,7 @@ class GLiteWorkflowProxy(WorkflowProxy): self.delegation_ids = None self.submission_data = self.submission_data_cls(tasks_per_job=self.task.tasks_per_job) self.skipped_job_nums = None - self.last_counts = len(self.job_manager.status_names) * (0,) + self.last_counts = None self.retry_counts = defaultdict(int) def requires(self): @@ -451,6 +451,8 @@ class GLiteWorkflowProxy(WorkflowProxy): # log the status line counts = (n_pending, n_running, n_finished, n_retry, n_failed) + if not self.last_counts: + self.last_counts = counts status_line = self.job_manager.status_line(counts, self.last_counts, color=True, align=4) task.publish_message(status_line)
Improve glite worklflow status line.
py
diff --git a/opentrons/instruments/pipette.py b/opentrons/instruments/pipette.py index <HASH>..<HASH> 100644 --- a/opentrons/instruments/pipette.py +++ b/opentrons/instruments/pipette.py @@ -288,12 +288,12 @@ class Pipette(Instrument): # TODO: raise warning/exception if looped back to first tip location = next(self.tip_rack_iter) else: - self.robot.warning( + self.robot.add_warning( 'pick_up_tip called with no reference to a tip') if location: placeable, _ = containers.unpack_location(location) - self.move_to(placeable.bottom(), strategy='direct', now=True) + self.move_to(placeable.bottom(), strategy='arc', now=True) self.current_tip_home_well = location @@ -321,7 +321,7 @@ class Pipette(Instrument): if location: placeable, _ = containers.unpack_location(location) - self.move_to(placeable.bottom(), strategy='direct', now=True) + self.move_to(placeable.bottom(), strategy='arc', now=True) self.plunger.move(self.positions['drop_tip']) self.plunger.home()
3 bugs found in Pipette when running on physical robot
py
diff --git a/parsl/executors/high_throughput/probe.py b/parsl/executors/high_throughput/probe.py index <HASH>..<HASH> 100644 --- a/parsl/executors/high_throughput/probe.py +++ b/parsl/executors/high_throughput/probe.py @@ -39,7 +39,7 @@ def probe_addresses(addresses, task_port, timeout=2): start_t = time.time() first_connected = None - while time.time() < start_t + timeout: + while time.time() < start_t + timeout and not first_connected: for addr in addr_map: try: recv_monitor_message(addr_map[addr]['mon_sock'], zmq.NOBLOCK)
Exit htex probe loop with first working address (#<I>) Prior to this commit, the probe will run for the entire timeout period. This results in slow worker startup if the timeout is manually changed to something much longer.
py
diff --git a/vies/fields.py b/vies/fields.py index <HASH>..<HASH> 100644 --- a/vies/fields.py +++ b/vies/fields.py @@ -16,8 +16,8 @@ class VATINField(forms.MultiValueField): forms.ChoiceField(required=False, choices=choices), forms.CharField(required=False, max_length=max_length) ) - widget = VATINWidget(choices=choices) - super(VATINField, self).__init__(widget=widget, fields=fields, *args, **kwargs) + kwargs['widget'] = VATINWidget(choices=choices) + super(VATINField, self).__init__(fields=fields, *args, **kwargs) def compress(self, data_list): if data_list:
Fix VATINField in django.contrib.admin
py
diff --git a/tests/test_80_p11_backend.py b/tests/test_80_p11_backend.py index <HASH>..<HASH> 100644 --- a/tests/test_80_p11_backend.py +++ b/tests/test_80_p11_backend.py @@ -59,6 +59,10 @@ class FakeConfig(): self.debug = False self.cert_handler_extra_class = None self.generate_cert_info = False + self.generate_cert_info = False + self.tmp_cert_file = None + self.tmp_key_file = None + self.validate_certificate = False class TestPKCS11():
Fix for EncryptedAssertion
py
diff --git a/eli5/sklearn/permutation_importance.py b/eli5/sklearn/permutation_importance.py index <HASH>..<HASH> 100644 --- a/eli5/sklearn/permutation_importance.py +++ b/eli5/sklearn/permutation_importance.py @@ -18,7 +18,7 @@ from eli5.permutation_importance import get_score_importances from eli5.sklearn.utils import pandas_available if pandas_available: - import pandas as pd + import pandas as pd # type: ignore CAVEATS_CV_NONE = """ Feature importances are computed on the same data as used for training,
Update eli5/sklearn/permutation_importance.py
py
diff --git a/starbound/sbbf02.py b/starbound/sbbf02.py index <HASH>..<HASH> 100644 --- a/starbound/sbbf02.py +++ b/starbound/sbbf02.py @@ -70,6 +70,7 @@ class FileSBBF02(filebase.File): self.header_size = None self.free_block_is_dirty = None self.free_block = None + self.num_blocks = None def get_block(self, block_index): self._stream.seek(self.header_size + self.block_size * block_index) @@ -95,6 +96,10 @@ class FileSBBF02(filebase.File): self.free_block_is_dirty = fields[2] self.free_block = fields[3] + # Calculate the number of blocks in the file. + stream.seek(0, 2) + self.num_blocks = (stream.tell() - self.header_size) // self.block_size + # Read the user header data. stream.seek(32) self._user_header = stream.read(self.header_size - 32)
Add property for total number of blocks in SBBF<I>
py
diff --git a/websocket.py b/websocket.py index <HASH>..<HASH> 100644 --- a/websocket.py +++ b/websocket.py @@ -14,9 +14,8 @@ WS_VERSION = '13' class WebSocket(object): - def __init__(self, sock, address, encoding=None): + def __init__(self, sock, encoding=None): self.sock = sock - self.address = address self.encoding = encoding self.received_close_params = None
Websocket constructor does not need an address anymore
py
diff --git a/zone_file/zone_file.py b/zone_file/zone_file.py index <HASH>..<HASH> 100644 --- a/zone_file/zone_file.py +++ b/zone_file/zone_file.py @@ -117,11 +117,14 @@ def processSOA( data, template ): soadat.append("IN") soadat.append("SOA") + soadat.append("(") for key in fields: value = str(data[key]) soadat.append( value ) + soadat.append(")") + soa_txt = " ".join(soadat) ret = ret.replace("{soa}", soa_txt) @@ -417,7 +420,9 @@ def remove_comments( text ): def flatten( text ): """ - Flatten the text: make sure each record is on one line. + Flatten the text: + * make sure each record is on one line. + * remove parenthesis """ lines = text.split("\n") @@ -545,8 +550,6 @@ def parse_line( parser, RRtok, parsed_records ): # with ttl RRtok = [RRtok[2]] + RRtok - print RRtok - rr, unmatched = parser.parse_known_args( RRtok ) assert len(unmatched) == 0, "Unmatched fields: %s" % unmatched
Include () in SOA serialization
py
diff --git a/salt/log.py b/salt/log.py index <HASH>..<HASH> 100644 --- a/salt/log.py +++ b/salt/log.py @@ -149,7 +149,7 @@ class Logging(LoggingLoggerClass): msg.decode('utf-8', 'replace'), args, exc_info, func, extra ) - except UnicodeEncodeError: + except UnicodeDecodeError: return LoggingLoggerClass.makeRecord( self, name, level, fn, lno, msg.decode('utf-8', 'ignore'),
We're decoding Unicode, not Encoding. Refs #<I>.
py
diff --git a/bin/dbs3DASAccess.py b/bin/dbs3DASAccess.py index <HASH>..<HASH> 100755 --- a/bin/dbs3DASAccess.py +++ b/bin/dbs3DASAccess.py @@ -8,6 +8,7 @@ from LifeCycleTests.LifeCycleTools.StatsClient import StatsPipeClient import os import sys import tempfile +import urllib options = get_command_line_options(__name__, sys.argv) @@ -29,7 +30,9 @@ api_call_name = das_query.keys()[0] api_call = getattr(api, api_call_name) query = das_query[api_call_name] -timing = {'stats':{'query' : str(query).replace(' ', '+'), 'api' : api_call_name}} +encoded_query = urllib.urlencode(query,doseq=True) + +timing = {'stats':{'query' : encoded_query, 'api' : api_call_name}} with TimingStat(timing, stat_client) as timer: result = api_call(**das_query[api_call_name])
Urlencode query for sqlite
py
diff --git a/firefox/src/py/firefoxlauncher.py b/firefox/src/py/firefoxlauncher.py index <HASH>..<HASH> 100644 --- a/firefox/src/py/firefoxlauncher.py +++ b/firefox/src/py/firefoxlauncher.py @@ -29,7 +29,7 @@ from webdriver_firefox.extensionconnection import ExtensionConnection from webdriver_firefox.firefox_profile import FirefoxProfile from webdriver_firefox import utils -MAX_START_ATTEMPTS = 3 +MAX_START_ATTEMPTS = 20 class FirefoxLauncher(object): """Launches the firefox browser."""
JiayaoYu:Minor fix on the firefox python driver for browsers that take longer than usual to start r<I>
py
diff --git a/djcelery/__init__.py b/djcelery/__init__.py index <HASH>..<HASH> 100644 --- a/djcelery/__init__.py +++ b/djcelery/__init__.py @@ -2,7 +2,7 @@ import os -VERSION = (2, 2, 0, "a3") +VERSION = (2, 2, 0, "a5") __version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:]) __author__ = "Ask Solem"
Bumped version to <I>a5
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -62,7 +62,7 @@ install_requires = [ 'pycrypto==2.6.1', 'pytz==2017.2', 'redis==2.10.5', - 'requests==2.14.1', + 'requests==2.14.2', 'SQLAlchemy==1.1.9', 'tornado==4.5.1', 'validate_email==1.3',
requests, up-to-date.
py
diff --git a/satpy/tests/writer_tests/test_ninjotiff.py b/satpy/tests/writer_tests/test_ninjotiff.py index <HASH>..<HASH> 100644 --- a/satpy/tests/writer_tests/test_ninjotiff.py +++ b/satpy/tests/writer_tests/test_ninjotiff.py @@ -74,7 +74,7 @@ class TestNinjoTIFFWriter(unittest.TestCase): img = FakeImage(dataset, 'L') ret = ntw.save_image(img, filename='bla.tif', compute=False) nt.save.assert_called() - assert(nt.save.mock_calls[0].kwargs['compute'] is False) + assert(nt.save.mock_calls[0][2]['compute'] is False) assert(ret == nt.save.return_value)
Fix ninjotiff writer test
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -63,7 +63,7 @@ install_requires = [ # 'pyparsing==2.0.3', # 'scipy==0.15.1', 'pybrain==0.3', - 'pug-nlp>=0.0.15', + #'pug-nlp>=0.0.15', # .{}'.format(version), ] dependency_links = ['http://github.com/hobson/pug-nlp/tarball/master#egg=pug-nlp-master'] # ['git+https://github.com/hobson/pug-nlp.git@master'] @@ -81,15 +81,18 @@ dependency_links = ['http://github.com/hobson/pug-nlp/tarball/master#egg=pug-nlp # from traceback import print_exc # print_exc() +EXCLUDE_FROM_PACKAGES = [] + print('install_requires: {}'.format(install_requires)) +packages = list(set([package_name] + list(find_packages(exclude=EXCLUDE_FROM_PACKAGES)))) +print('packages being installed: {}'.format(packages)) -EXCLUDE_FROM_PACKAGES = [] setup( name=project_name, - packages=find_packages(exclude=EXCLUDE_FROM_PACKAGES), + packages=packages, namespace_packages=[__namespace_package__], # install non-.py files listed in MANIFEST.in (.js, .html, .txt, .md, etc)
install pug-nlp from master and print packages to be installed and add self to that packages list
py
diff --git a/openquake/commonlib/source.py b/openquake/commonlib/source.py index <HASH>..<HASH> 100644 --- a/openquake/commonlib/source.py +++ b/openquake/commonlib/source.py @@ -413,15 +413,19 @@ class CompositeSourceModel(collections.Sequence): logging.info('Creating %d realization(s) for model ' '%s, %s', len(rlzs), smodel.name, smodel.path) idx = assoc._add_realizations(idx, smodel, rlzs) - # sanity check + if assoc.realizations: if num_samples: assert len(assoc.realizations) == num_samples for rlz in assoc.realizations: rlz.weight = 1. / num_samples else: - delta = abs(sum(rlz.weight for rlz in assoc.realizations) - 1) - assert delta < 1E-12, delta + tot_weight = sum(rlz.weight for rlz in assoc.realizations) + if tot_weight < 1: + logging.warn('Some source models are not contributing, ' + 'weights are being rescaled') + for rlz in assoc.realizations: + rlz.weight = rlz.weight / tot_weight return assoc def __repr__(self):
Added weight rescaling
py
diff --git a/notario/tests/validators/test_recursive.py b/notario/tests/validators/test_recursive.py index <HASH>..<HASH> 100644 --- a/notario/tests/validators/test_recursive.py +++ b/notario/tests/validators/test_recursive.py @@ -64,6 +64,10 @@ class TestMultiSchema(object): multi = recursive.MultiSchema(*schemas) assert multi(data, []) is None + def test_fail_on_non_callable(self): + with raises(TypeError): + recursive.MultiSchema(False) + def test_pass_two_data_items(self): data = Data({'a': 2, 'b': 1}, {}).normalized() schemas = (('a', 2), ('b', 1))
a test for typeErrors in MultiSchema
py
diff --git a/test/unit/test_context.py b/test/unit/test_context.py index <HASH>..<HASH> 100644 --- a/test/unit/test_context.py +++ b/test/unit/test_context.py @@ -4,7 +4,7 @@ import unittest from dbt.contracts.graph.parsed import ParsedNode from dbt.context import parser, runtime import dbt.exceptions -from test.unit.mock_adapter import adapter_factory +from .mock_adapter import adapter_factory
update test_context.py to use a local import of mock_adapter This is consistent with the way that unit tests import utils.py and also fixes an import issue with our test environment.
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -35,7 +35,7 @@ requirements = [ # Protocol and data packages "pytmpdir >= 0.2.3", # A temporary directory, useful for extracting archives to "txhttputil >= 0.3.0", # Utility class for http requests - "vortexpy >= 1.2.8", # Data serialisation and transport layer, observable based + "vortexpy >= 1.2.9", # Data serialisation and transport layer, observable based # SOAP interface packages "SOAPpy-py3 >= 0.52.24", # See http://soappy.ooz.ie for tutorials
Updated to vortexpy <I> PEEK-<I>
py
diff --git a/src/flapjack/authentication.py b/src/flapjack/authentication.py index <HASH>..<HASH> 100644 --- a/src/flapjack/authentication.py +++ b/src/flapjack/authentication.py @@ -16,14 +16,17 @@ class Authentication(object): """Describes the base authentication protocol. """ - def __init__(self, require_active=True, **kwargs): + def __init__(self, **kwargs): """ Initializes any configuration properties specific for this authentication protocol. """ #! Whether to require users to have `is_active` flags in django set to #! `True`. - self.require_active = require_active + self.require_active = kwargs.get('require_active', True) + + #! Whether to allow anonymous users being returned from `authenticate`. + self.allow_anonymous = kwargs.get('allow_anonymous', True) def authenticate(self, request): """Gets the a user if they are authenticated; else None. @@ -50,6 +53,13 @@ class Header(six.with_metaclass(abc.ABCMeta, Authentication)): HTTP/1.1 header. """ + def __init__(self, **kwargs): + if 'allow_anonymous' not in kwargs: + # Unless explictly allowed; anon accesses are disallowed. + kwargs['allow_anonymous'] = False + + super(Header, self).__init__(**kwargs) + def authenticate(self, request): """Gets the a user if they are authenticated; else None.
Added `allow_anonymous` flag.
py
diff --git a/versions_tests/tests/test_models.py b/versions_tests/tests/test_models.py index <HASH>..<HASH> 100644 --- a/versions_tests/tests/test_models.py +++ b/versions_tests/tests/test_models.py @@ -2167,9 +2167,9 @@ class FilterOnForeignKeyRelationTest(TestCase): michael.save() baseball_hat.save() hat = WineDrinkerHat.objects.filter(wearer__name='michael') - self.assertEqual(hat, baseball_hat) + self.assertEqual(hat.pk, baseball_hat.pk) person = WineDrinker.objects.filter(hats__shape='baseball hat') - self.assertEqual(person, michael) + self.assertEqual(person.pk, michael.pk) class SpecifiedUUIDTest(TestCase):
Checking on primary key instead of object
py
diff --git a/src/canmatrix/formats/sym.py b/src/canmatrix/formats/sym.py index <HASH>..<HASH> 100644 --- a/src/canmatrix/formats/sym.py +++ b/src/canmatrix/formats/sym.py @@ -256,9 +256,13 @@ Title=\"{}\" if signal.multiplex == 'Multiplexor': mux_signal = signal - # ticker all possible mux-groups as i (0 - 2^ (number of bits of multiplexor)) first = 0 - for i in range(0, 1 << int(mux_signal.size)): + # find all used muxer-values + multiplexer_list = set([a.multiplex for a in frame.signals]) + # ticker all used muxer-values only + for i in multiplexer_list: + if type(i) != int: + continue found = 0 mux_out = "" # ticker all signals
better mux export hanling in sym
py
diff --git a/seed/commands/release.py b/seed/commands/release.py index <HASH>..<HASH> 100644 --- a/seed/commands/release.py +++ b/seed/commands/release.py @@ -102,6 +102,14 @@ class ReleaseCommand(Command): print "Checks on setup.py failed. Messages were:\n%s" % "\n".join(warnings) sys.exit(1) + # Checking pypi login details are in place + print "Checking we have our PyPi login details in place" + if not os.path.exists(os.path.expanduser("~/.pypirc")): + print "Could not find your ~/.pypirc file. See http://seed.readthedocs.org/en/latest/#pypi-registration for help." + sys.exit(1) + else: + print "You have a ~/.pypirc file. Assuming the details in there are correct" + # Update the version number if options.dry_run:
Checking that a user's .pypirc file exists
py
diff --git a/script/upload-node-headers.py b/script/upload-node-headers.py index <HASH>..<HASH> 100755 --- a/script/upload-node-headers.py +++ b/script/upload-node-headers.py @@ -40,11 +40,15 @@ def main(): args = parse_args() node_headers_dir = os.path.join(DIST_DIR, 'node-{0}'.format(args.version)) iojs_headers_dir = os.path.join(DIST_DIR, 'iojs-{0}'.format(args.version)) + iojs2_headers_dir = os.path.join(DIST_DIR, + 'iojs-{0}-headers'.format(args.version)) copy_headers(node_headers_dir) create_header_tarball(node_headers_dir) copy_headers(iojs_headers_dir) create_header_tarball(iojs_headers_dir) + copy_headers(iojs2_headers_dir) + create_header_tarball(iojs2_headers_dir) # Upload node's headers to S3. bucket, access_key, secret_key = s3_config()
Upload headers with new filenames
py
diff --git a/glymur/test/fixtures.py b/glymur/test/fixtures.py index <HASH>..<HASH> 100644 --- a/glymur/test/fixtures.py +++ b/glymur/test/fixtures.py @@ -10,10 +10,12 @@ import numpy as np import glymur -# Need to know the openjpeg version. If openjpeg is not installed, we use -# '0.0.0' +# Need to know the version of the openjpeg software. If openjpeg is not +# installed, we use # '0.0.0' OPENJPEG_VERSION = '0.0.0' -if glymur.lib.openjpeg.OPENJPEG is not None: +if glymur.lib.openjp2.OPENJP2 is not None: + OPENJPEG_VERSION = glymur.lib.openjp2.version() +elif glymur.lib.openjpeg.OPENJPEG is not None: OPENJPEG_VERSION = glymur.lib.openjpeg.version() # Need to know of the libopenjp2 version is the official 2.0.0 release and NOT
OPENJPEG_VERSION is now the version of the software, not the library. For #<I>
py
diff --git a/test_deploy_stack.py b/test_deploy_stack.py index <HASH>..<HASH> 100644 --- a/test_deploy_stack.py +++ b/test_deploy_stack.py @@ -389,10 +389,24 @@ class DumpEnvLogsTestCase(TestCase): """)) with patch.object(client, 'get_status', autospec=True, return_value=status): - machine_addrs = get_machines_for_logs(client, '10.11.12.13') + machine_addrs = get_machines_for_logs(client, None) self.assertEqual( {'0': '10.11.12.13', '1': '10.11.12.14'}, machine_addrs) + def test_get_machines_for_log_with_boostrap_host(self): + client = EnvJujuClient( + SimpleEnvironment('cloud', {'type': 'ec2'}), '1.23.4', None) + status = Status.from_text(dedent("""\ + machines: + "0": + dns-name: 10.11.12.13 + """)) + with patch.object(client, 'get_status', autospec=True, + return_value=status): + machine_addrs = get_machines_for_logs(client, '10.11.111.222') + self.assertEqual( + {'0': '10.11.111.222'}, machine_addrs) + def test_retain_jenv(self): with temp_dir() as jenv_dir: jenv_path = os.path.join(jenv_dir, "temp.jenv")
Added test for get_machines_for_log with bootstrap_host
py
diff --git a/skl_groups/tests/test_divs_knn.py b/skl_groups/tests/test_divs_knn.py index <HASH>..<HASH> 100644 --- a/skl_groups/tests/test_divs_knn.py +++ b/skl_groups/tests/test_divs_knn.py @@ -20,7 +20,6 @@ if __name__ == '__main__': _this_dir = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, os.path.dirname(os.path.dirname(_this_dir))) -from skl_groups.divergences import KNNDivergenceEstimator from skl_groups.features import Features @@ -33,12 +32,15 @@ except ImportError: except ImportError: have_flann = False -try: - import skl_groups_accel.knn_divs -except ImportError: - have_accel = False -else: - have_accel = True +if have_flann: + from skl_groups.divergences import KNNDivergenceEstimator + + try: + import skl_groups_accel.knn_divs + except ImportError: + have_accel = False + else: + have_accel = True ################################################################################
tests: only import KNNDivergencenceEstimator if it's going to work
py
diff --git a/cuttlepool/cuttlepool.py b/cuttlepool/cuttlepool.py index <HASH>..<HASH> 100644 --- a/cuttlepool/cuttlepool.py +++ b/cuttlepool/cuttlepool.py @@ -169,7 +169,8 @@ class PoolConnection(object): """ Gets attributes of connection object. """ - return getattr(self._connection, attr) + if attr != 'close': + return getattr(self._connection, attr) def close(self): """
prevent return of close method attribute of connection
py
diff --git a/airflow/models.py b/airflow/models.py index <HASH>..<HASH> 100644 --- a/airflow/models.py +++ b/airflow/models.py @@ -467,14 +467,12 @@ class TaskInstance(Base): TI.state == State.SUCCESS, ).first() if not previous_ti: - logging.info('depends_on_past not fulfilled') return False # Applying wait_for_downstream previous_ti.task = self.task if task.wait_for_downstream and not \ previous_ti.are_dependents_done(session): - logging.info('wait_for_downstream not fulfilled') return False # Checking that all upstream dependencies have succeeded @@ -488,7 +486,6 @@ class TaskInstance(Base): ) count = ti[0][0] if count < len(task._upstream_list): - logging.info('Direct dependencies not fulfilled') return False if not main_session:
Reverting overlogging on dependencies, was printing more than intended
py
diff --git a/tests/conftest.py b/tests/conftest.py index <HASH>..<HASH> 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -56,7 +56,7 @@ def start_consul_instance(acl_master_token=None): ['http', 'rpc', 'serf_lan', 'serf_wan', 'server', 'dns'], get_free_ports(5) + [-1])) - config = {'ports': ports} + config = {'ports': ports, 'performance': {'raft_multiplier': 1}} if acl_master_token: config['acl_datacenter'] = 'dc1' config['acl_master_token'] = acl_master_token
Make test config use raft_multpilier. In an attempt to make the performance of the tests more reliable, use the performance.raft_multiplier key to return the test instance back to pre <I> performance levels. Locally this seems to improve the odds of the various async tests failing from about <I>% down to sub <I>%. see: <URL>
py
diff --git a/bids/layout/layout.py b/bids/layout/layout.py index <HASH>..<HASH> 100644 --- a/bids/layout/layout.py +++ b/bids/layout/layout.py @@ -744,7 +744,6 @@ class BIDSLayout(object): # Entity filtering if filters: - # query = query.join(BIDSFile.tags) regex = kwargs.get('regex_search', False) filters = self._sanitize_query_dtypes(filters)
Remove unnecessary code comment - Code that should have been deleted was only commented out for debugging purposes - The comment was not deleted prior to my last commit
py
diff --git a/peyotl/test/test_phylesystem_api.py b/peyotl/test/test_phylesystem_api.py index <HASH>..<HASH> 100644 --- a/peyotl/test/test_phylesystem_api.py +++ b/peyotl/test/test_phylesystem_api.py @@ -33,9 +33,15 @@ class TestPhylesystemAPI(unittest.TestCase): @unittest.skipIf(not HAS_LOCAL_PHYLESYSTEM_REPOS, 'only available if you are have a [phylesystem] section ' \ 'with "parent" variable in your peyotl config') - def testStudyList(self): + def testLocalStudyList(self): pa = PhylesystemAPI(self.domains, get_from='local') sl = pa.study_list + # local repo should have just a few studies + self.assertTrue(len(sl) < 10) + def testRemoteStudyList(self): + pa = PhylesystemAPI(self.domains, get_from='api') + sl = pa.study_list + # dev/production repos should have hundreds of studies self.assertTrue(len(sl) > 100) def testPushFailureState(self): pa = PhylesystemAPI(self.domains, get_from='api')
New and modified tests of local vs. remote APIs
py
diff --git a/tools/scenario-player/scenario_player/tasks/channels.py b/tools/scenario-player/scenario_player/tasks/channels.py index <HASH>..<HASH> 100644 --- a/tools/scenario-player/scenario_player/tasks/channels.py +++ b/tools/scenario-player/scenario_player/tasks/channels.py @@ -125,10 +125,10 @@ class AssertAllTask(ChannelActionTask): channel_field_values.remove(value) except ValueError: channel_field_values_str = ", ".join( - str(val) for val in channel_field_values_all, + str(val) for val in channel_field_values_all ) assert_field_values_str = ', '.join( - str(val) for val in self._config[assert_field], + str(val) for val in self._config[assert_field] ) raise ScenarioAssertionError( f'Expected value "{value}" for field "{field}" not found in any channel. '
Remove overeager trailing commas
py
diff --git a/flask_permissions/models.py b/flask_permissions/models.py index <HASH>..<HASH> 100644 --- a/flask_permissions/models.py +++ b/flask_permissions/models.py @@ -10,7 +10,7 @@ from .utils import is_sequence user_role_table = db.Table('fp_user_role', db.Column( - 'user_id', db.Integer, db.ForeignKey('fp_user.uid')), + 'user_id', db.Integer, db.ForeignKey('fp_user.id')), db.Column( 'role_id', db.Integer, db.ForeignKey('fp_role.id')) )
Changes uid to id
py
diff --git a/twython/twython.py b/twython/twython.py index <HASH>..<HASH> 100644 --- a/twython/twython.py +++ b/twython/twython.py @@ -258,10 +258,10 @@ class Twython(object): This will return None if the header is not present Most useful for the following header information: - x-ratelimit-limit - x-ratelimit-remaining - x-ratelimit-class - x-ratelimit-reset + x-rate-limit-limit + x-rate-limit-remaining + x-rate-limit-class + x-rate-limit-reset """ if self._last_call is None: raise TwythonError('This function must be called after an API call. It delivers header information.')
Small correction in comments Headers have changed and a - is now needed between rate and limit.
py
diff --git a/ddmrp/models/stock_move.py b/ddmrp/models/stock_move.py index <HASH>..<HASH> 100644 --- a/ddmrp/models/stock_move.py +++ b/ddmrp/models/stock_move.py @@ -24,10 +24,16 @@ class StockMove(models.Model): def write(self, vals): res = super(StockMove, self).write(vals) - if "state" in vals and self.env.company.ddmrp_auto_update_nfp: - # Stock moves state changes can be triggered by users without + if self and self.env.company.ddmrp_auto_update_nfp: + # Stock moves changes can be triggered by users without # access to write stock buffers, thus we do it with sudo. - self.sudo()._update_ddmrp_nfp() + if "state" in vals: + self.sudo()._update_ddmrp_nfp() + elif "location_id" in vals or "location_dest_id" in vals: + self.sudo().filtered( + lambda m: m.state + in ("confirmed", "partially_available", "assigned") + )._update_ddmrp_nfp() return res @api.model_create_multi
ddmrp: when ddmrp_auto_update_nfp option is enabled, buffers should also be updated when move location src/dest is changed. This fixes compatibility with modules like stock_dynamic_routing in OCA/wms that rewrites the location on pull/push rules.
py
diff --git a/tests/test_lp.py b/tests/test_lp.py index <HASH>..<HASH> 100644 --- a/tests/test_lp.py +++ b/tests/test_lp.py @@ -26,7 +26,6 @@ def test_lp(): badsolution = s.createSol() s.setSolVal(badsolution, x, 2.0) s.setSolVal(badsolution, y, 2.0) - s.addSol(badsolution, free = False) assert s.getSlack(c, badsolution) == 0.0 assert s.getSlack(c, badsolution, 'lhs') == 1.0 assert s.getSlack(c, badsolution, 'rhs') == 0.0
don't add solution in test_lp
py
diff --git a/selectable/forms/widgets.py b/selectable/forms/widgets.py index <HASH>..<HASH> 100644 --- a/selectable/forms/widgets.py +++ b/selectable/forms/widgets.py @@ -150,6 +150,8 @@ class AutoCompleteSelectMultipleWidget(forms.MultiWidget, SelectableMediaMixin): self.widgets[0].update_query_parameters(qs_dict) def decompress(self, value): + if value and isinstance(value, list) and len(value) == 2 and isinstance(value[1], list): + return value if value: if not hasattr(value, '__iter__'): value = [value] @@ -175,6 +177,8 @@ class AutoComboboxSelectMultipleWidget(forms.MultiWidget, SelectableMediaMixin): self.widgets[0].update_query_parameters(qs_dict) def decompress(self, value): + if value and isinstance(value, list) and len(value) == 2 and isinstance(value[1], list): + return value if value: if not hasattr(value, '__iter__'): value = [value]
Quick fix to decompress to recognize data coming from value_from_datadict and not decompress again. Better may be to standardize to a list format at a higher level, but that requires a bit more thought. This works for now.
py
diff --git a/gophish/models.py b/gophish/models.py index <HASH>..<HASH> 100644 --- a/gophish/models.py +++ b/gophish/models.py @@ -87,6 +87,7 @@ class Result(Model): for key, val in json.items(): if key in cls._valid_properties: setattr(result, key, val) + return result class TimelineEntry(object): @@ -101,6 +102,7 @@ class TimelineEntry(object): setattr(entry, key, details) elif key in cls._valid_properties: setattr(entry, key, val) + return entry class User(Model):
Fixed TimelineEntry and Result parsing
py
diff --git a/ceph_deploy/tests/unit/hosts/test_suse.py b/ceph_deploy/tests/unit/hosts/test_suse.py index <HASH>..<HASH> 100644 --- a/ceph_deploy/tests/unit/hosts/test_suse.py +++ b/ceph_deploy/tests/unit/hosts/test_suse.py @@ -8,7 +8,7 @@ class TestSuseInit(object): def test_choose_init_default(self): self.host.release = None init_type = self.host.choose_init(self.host) - assert init_type == "sysvinit" + assert init_type == "systemd" def test_choose_init_SLE_11(self): self.host.release = '11'
ceph_deploy.tests.unit.hosts.test_suse: Fix default init The default init system for SUSE should be systemd, as all future releases of SUSE are expected to be based upon systemd
py
diff --git a/vprof/profile_wrappers.py b/vprof/profile_wrappers.py index <HASH>..<HASH> 100644 --- a/vprof/profile_wrappers.py +++ b/vprof/profile_wrappers.py @@ -238,3 +238,12 @@ class MemoryProfile(BaseProfile): (i + 1, lineno, mem, e, fname) for i, (lineno, mem, e, fname) in enumerate(prof.events_list)] run_stats['totalEvents'] = len(prof.events_list) + + def run(self): + """Runs profile and returns collected stats. + + Runs memory stats collection in current process to avoid copy-on-write. + """ + memory_stats = {} + self.collect_stats(memory_stats) + return memory_stats
Run memory stats collection in current process to avoid copy-on-write.
py
diff --git a/pyensembl/gtf.py b/pyensembl/gtf.py index <HASH>..<HASH> 100644 --- a/pyensembl/gtf.py +++ b/pyensembl/gtf.py @@ -168,7 +168,8 @@ class GTF(object): "gene_name", "gene_biotype", "transcript_name", - "transcript_biotype" + "transcript_biotype", + "protein_id", }.intersection(column_names) }) return df
added protein_id to inferred columns for transcript in GTF missing that feature
py
diff --git a/lib/auxly/_modu.py b/lib/auxly/_modu.py index <HASH>..<HASH> 100644 --- a/lib/auxly/_modu.py +++ b/lib/auxly/_modu.py @@ -8,6 +8,9 @@ import os.path as op import subprocess import sys +if sys.version_info < (3, 0): + from __future__ import print_function + ##==============================================================# ## SECTION: Global Definitions # ##==============================================================#
Attempt to fix Py2 issue.
py
diff --git a/host/scan/scan.py b/host/scan/scan.py index <HASH>..<HASH> 100644 --- a/host/scan/scan.py +++ b/host/scan/scan.py @@ -90,8 +90,9 @@ class ScanBase(object): # self.readout.reset_sram_fifo() if not any(self.readout.print_readout_status()): - logging.error('Stopping scan: no sync') - return + raise NoSyncError('No data sync on any input channel') +# logging.error('Stopping scan: no sync') +# return self.stop_thread_event.clear() @@ -150,6 +151,9 @@ class ScanBase(object): def analyze(self, **kwargs): raise NotImplementedError('scan.analyze() not implemented') + +class NoSyncError(Exception): + pass from functools import wraps def set_event_when_keyboard_interrupt(_lambda):
ENH: throw exception when there is no sync
py
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ __author__ = 'Naor Livne' __author_email__ = '[email protected]' -__version__ = '2.0.0a' +__version__ = '2.0.0a1' from setuptools import setup, find_packages
changed version of package setup.py to <I>a1
py
diff --git a/denovonear/gene_plot/consequences.py b/denovonear/gene_plot/consequences.py index <HASH>..<HASH> 100644 --- a/denovonear/gene_plot/consequences.py +++ b/denovonear/gene_plot/consequences.py @@ -138,11 +138,11 @@ class Consequences(object): # the frameshift variants are ones where the difference in length # between the ref and alt alleles is not divisible by 3 if abs(len(ref) - len(alt)) % 3 != 0: - return "{}{}fs".format(initial_aa, codon["codon_number"]) + return "{}{}fs".format(codon["initial_aa"], codon["codon_number"]) elif len(ref) > len(alt): - return "{}{}del".format(initial_aa, codon["codon_number"]) + return "{}{}del".format(codon["initial_aa"], codon["codon_number"]) elif len(ref) < len(alt): - return "{}{}ins".format(initial_aa, codon["codon_number"]) + return "{}{}ins".format(codon["initial_aa"], codon["codon_number"]) def _get_splice_consequence(self, pos, ref, alt): """ figure out the HGVS-like code for splice site variants.
fixed typo to use entry from codon dictionary
py
diff --git a/autopep8.py b/autopep8.py index <HASH>..<HASH> 100755 --- a/autopep8.py +++ b/autopep8.py @@ -422,15 +422,19 @@ class FixPEP8(object): original_line = self.source[line_index] is_logical_fix = len(inspect.getargspec(fix).args) > 2 - if is_logical_fix and logical_support: - logical = self._get_logical(result, - logical_start, - logical_end) - if logical and set(range( - logical[0][0], - logical[1][0] + 1)).intersection( - completed_lines): - continue + if is_logical_fix: + if logical_support: + logical = self._get_logical(result, + logical_start, + logical_end) + if logical and set(range( + logical[0][0], + logical[1][0] + 1)).intersection( + completed_lines): + continue + else: + logical = None + modified_lines = fix(result, logical) else: modified_lines = fix(result)
Handle cases where there is no logical fix support
py
diff --git a/caniusepython3/test/test_dependencies.py b/caniusepython3/test/test_dependencies.py index <HASH>..<HASH> 100644 --- a/caniusepython3/test/test_dependencies.py +++ b/caniusepython3/test/test_dependencies.py @@ -73,6 +73,8 @@ class NetworkTests(unittest.TestCase): py3 = {'py3_project': ''} breaking_project = 'jekyll2nikola' got = dependencies.blocking_dependencies([breaking_project], py3) + # If you'd like to test that a message is logged we can use + # testfixtures.LogCapture or stdout redirects. def test_top_level_project_normalization(self): py3 = {'wsgi_intercept': ''}
Added a comment to test case.
py
diff --git a/tensor2tensor/insights/transformer_model.py b/tensor2tensor/insights/transformer_model.py index <HASH>..<HASH> 100644 --- a/tensor2tensor/insights/transformer_model.py +++ b/tensor2tensor/insights/transformer_model.py @@ -15,6 +15,10 @@ """A QueryProcessor using the Transformer framework.""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + from collections import deque import glob @@ -24,6 +28,7 @@ import time import numpy as np +from six.moves import range from tensor2tensor.bin import t2t_trainer from tensor2tensor.data_generators import text_encoder from tensor2tensor.insights import graph
Automated refactoring to make code Python 3 compatible. PiperOrigin-RevId: <I>
py
diff --git a/pycoin/tx/Tx.py b/pycoin/tx/Tx.py index <HASH>..<HASH> 100644 --- a/pycoin/tx/Tx.py +++ b/pycoin/tx/Tx.py @@ -343,9 +343,7 @@ class Tx(object): if not self.txs_out: raise ValidationFailureError("txs_out = []") # Size limits - f = io.BytesIO() - self.stream(f) - size = len(f.getvalue()) + size = len(self.as_bin()) if size > self.MAX_BLOCK_SIZE: raise ValidationFailureError("size > MAX_BLOCK_SIZE") # Check for negative or overflow output values
Simplify Tx.check slightly.
py
diff --git a/openquake/server/urls.py b/openquake/server/urls.py index <HASH>..<HASH> 100644 --- a/openquake/server/urls.py +++ b/openquake/server/urls.py @@ -48,7 +48,7 @@ if settings.LOCKDOWN: admin.autodiscover() urlpatterns += [ - url(r'^admin/', include(admin.site.urls)), + url(r'^admin/', admin.site.urls), url(r'^accounts/login/$', login, {'template_name': 'account/login.html'}, name="login"), url(r'^accounts/logout/$', logout,
Fix error with Django admin urls
py
diff --git a/salt/runners/manage.py b/salt/runners/manage.py index <HASH>..<HASH> 100644 --- a/salt/runners/manage.py +++ b/salt/runners/manage.py @@ -678,7 +678,7 @@ def bootstrap(version='develop', .. versionchanged:: 2016.11.0 - .. deprecated:: 2016.11.0 + .. deprecated:: Oxygen script_args Any additional arguments that you want to pass to the script.
Update deprecated version info in manage.bootstrap fucn for root_user The warnings are for this argument to be removed in Oxygen. The documentation tag should match. Fixes #<I>
py
diff --git a/py/conftest.py b/py/conftest.py index <HASH>..<HASH> 100644 --- a/py/conftest.py +++ b/py/conftest.py @@ -1,6 +1,6 @@ dist_rsync_roots = ['.'] # XXX -pytest_plugins = 'pytest_doctest', 'pytest_pytester', 'pytest_restdoc', 'pytest_apigen' +pytest_plugins = 'pytest_doctest', 'pytest_pytester', 'pytest_restdoc' import py class PylibTestPlugin:
[svn r<I>] Oops, accidentally checked in a change, now rolling back (added 'apigen' to enabled plugins, which fails if the apigen package is not on the PYTHONPATH). --HG-- branch : trunk
py
diff --git a/launch_control/tests/sw_profile.py b/launch_control/tests/sw_profile.py index <HASH>..<HASH> 100644 --- a/launch_control/tests/sw_profile.py +++ b/launch_control/tests/sw_profile.py @@ -75,10 +75,8 @@ class PackagesWithDifferentVersions(SoftwarePackageTestCase): self.assertNotEqual(self.pkg1, self.pkg2) class PackageVersionComparison(SoftwarePackageTestCase): - # XXX: There is no point in testing odd versions agains each other - # as we didn't really write the comparator for that. - # Besides after googling for an hour I *still* cannot find - # any Debian-blessed document explaining the format. + # There is no point in testing odd versions agains each other as we + # didn't really write the comparator for that. def test_obvious(self): pkg1 = self.factory(version='1.0')
Rework the comment next to code testing debian version comparator
py
diff --git a/kerncraft/incore_model.py b/kerncraft/incore_model.py index <HASH>..<HASH> 100755 --- a/kerncraft/incore_model.py +++ b/kerncraft/incore_model.py @@ -625,6 +625,7 @@ def osaca_analyse_instrumented_assembly(instrumented_assembly_file, micro_archit sum([instr_form['latency_lcd'] for instr_form in lcd_dict[dep]['dependencies']])) result['output'] = frontend.full_analysis(kernel, kernel_graph, verbose=True) + result['analyzed kernel'] = kernel result['port cycles'] = OrderedDict(list(zip(osaca_machine_model['ports'], throughput_values))) result['throughput'] = max(throughput_values + [max_lcd]) result['uops'] = None # Not given by OSACA
including parsed kernel with osaca output
py
diff --git a/mwtab/mwrest.py b/mwtab/mwrest.py index <HASH>..<HASH> 100755 --- a/mwtab/mwrest.py +++ b/mwtab/mwrest.py @@ -479,7 +479,7 @@ class MWRESTFile(object): """ input_str = filehandle.read().decode("utf-8") self.text = input_str - self.text = re.sub(r"<.*?>", "", self.text) # included to remove remaining HTML tags + self.text = re.sub(r"</br>", "", self.text) # included to remove remaining HTML tags filehandle.close() def write(self, filehandle):
Refactors MWRESTFile class to specifically remove only '</br>' html tags which are left in files.
py
diff --git a/core.py b/core.py index <HASH>..<HASH> 100644 --- a/core.py +++ b/core.py @@ -12,8 +12,12 @@ __revision__ = "$Id$" import sys, os from types import * from distutils.errors import * + +# Mainly import these so setup scripts can "from distutils.core import" them. from distutils.dist import Distribution from distutils.cmd import Command +from distutils.extension import Extension + # This is a barebones help message generated displayed when the user # runs the setup script with no arguments at all. More useful help
Import the new Extension class, so setup scripts can "from distutils.core import" it.
py
diff --git a/djangocms_text_ckeditor/cms_plugins.py b/djangocms_text_ckeditor/cms_plugins.py index <HASH>..<HASH> 100644 --- a/djangocms_text_ckeditor/cms_plugins.py +++ b/djangocms_text_ckeditor/cms_plugins.py @@ -195,10 +195,9 @@ class TextPlugin(CMSPluginBase): # to avoid non-auth users from triggering validation mechanism. plugin._no_reorder = True - if plugin.parent_id: + if plugin.parent and plugin.parent.numchild > 0: CMSPlugin.objects.filter( pk=plugin.parent_id, - numchild__gt=0, ).update(numchild=F('numchild') - 1) plugin.delete(no_mp=True) # 204 -> request was successful but no response returned.
one more shot at db error
py
diff --git a/crashreporter/__init__.py b/crashreporter/__init__.py index <HASH>..<HASH> 100644 --- a/crashreporter/__init__.py +++ b/crashreporter/__init__.py @@ -1,3 +1,6 @@ __version__ = '1.02' -from crashreporter import CrashReporter \ No newline at end of file +try: + from crashreporter import CrashReporter +except ImportError: + pass \ No newline at end of file
wrap import of CrashReporter in __init__.py in a try/except
py