commit
stringlengths
40
40
old_file
stringlengths
4
106
new_file
stringlengths
4
106
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
2.95k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
43k
ndiff
stringlengths
52
3.31k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
diff
stringlengths
49
3.61k
daf9c8e39cd141194f8000cb3b8f4694e96401ed
pep438/core.py
pep438/core.py
"""Core pep438 utility functions""" from __future__ import unicode_literals import requests import xmlrpclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)]
"""Core pep438 utility functions""" from __future__ import unicode_literals import requests try: import xmlrpclib except: import xmlrpc.client as xmlprclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)]
Fix broxen import in Python 3
Fix broxen import in Python 3
Python
mit
treyhunner/pep438
"""Core pep438 utility functions""" from __future__ import unicode_literals import requests + try: - import xmlrpclib + import xmlrpclib + except: + import xmlrpc.client as xmlprclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)]
Fix broxen import in Python 3
## Code Before: """Core pep438 utility functions""" from __future__ import unicode_literals import requests import xmlrpclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)] ## Instruction: Fix broxen import in Python 3 ## Code After: """Core pep438 utility functions""" from __future__ import unicode_literals import requests try: import xmlrpclib except: import xmlrpc.client as xmlprclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)]
"""Core pep438 utility functions""" from __future__ import unicode_literals import requests + try: - import xmlrpclib + import xmlrpclib ? ++++ + except: + import xmlrpc.client as xmlprclib import lxml.html from requirements import parse def valid_package(package_name): """Return bool if package_name is a valid package on PyPI""" response = requests.head('https://pypi.python.org/pypi/%s' % package_name) if response.status_code != 404: response.raise_for_status() return response.status_code != 404 def get_urls(package_name): """Return list of URLs on package's PyPI page that would be crawled""" response = requests.get('https://pypi.python.org/simple/%s' % package_name) response.raise_for_status() page = lxml.html.fromstring(response.content) crawled_urls = {link.get('href') for link in page.xpath('//a') if link.get('rel') in ("homepage", "download")} return crawled_urls def get_pypi_packages(fileobj): """Return all PyPI-hosted packages from file-like object""" return [p['name'] for p in parse(fileobj) if not p.get('uri')] def get_pypi_user_packages(user): client = xmlrpclib.ServerProxy('https://pypi.python.org/pypi') return [x[1] for x in client.user_packages(user)]
af4e705138887f950ebdb3cef06dfab77d89a1b9
indra/util/plot_formatting.py
indra/util/plot_formatting.py
from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import matplotlib fontsize=7 def set_fig_params(): matplotlib.rcParams['font.sans-serif'] = 'Arial' matplotlib.rcParams['text.usetex'] = True matplotlib.rcParams['text.latex.preamble'] = [ '\\usepackage{helvet}', '\\usepackage{sansmath}', '\\sansmath', '\\usepackage{underscore}',] def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'): ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position(yticks_position) ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize, pad=tick_padding, length=2, width=0.5) ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize, pad=tick_padding, length=2, width=0.5) ax.xaxis.labelpad = label_padding ax.yaxis.labelpad = label_padding ax.xaxis.label.set_size(fontsize) ax.yaxis.label.set_size(fontsize)
from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import matplotlib fontsize=7 def set_fig_params(): matplotlib.rcParams['font.sans-serif'] = 'Arial' matplotlib.rcParams['text.usetex'] = True matplotlib.rcParams['text.latex.preamble'] = [ '\\usepackage{helvet}', '\\usepackage{sansmath}', '\\sansmath', '\\usepackage{underscore}',] def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'): ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position(yticks_position) ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize, pad=tick_padding, length=2, width=0.5) ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize, pad=tick_padding, length=2, width=0.5) ax.xaxis.labelpad = label_padding ax.yaxis.labelpad = label_padding ax.xaxis.label.set_size(fontsize) ax.yaxis.label.set_size(fontsize) # 3-color swatch from colorbrewer2.org GREEN = "#66C2A5" ORANGE = "#FC8D62" PURPLE = "#8DA0CB"
Add default colors to plot formatting
Add default colors to plot formatting
Python
bsd-2-clause
sorgerlab/belpy,pvtodorov/indra,sorgerlab/indra,johnbachman/belpy,johnbachman/belpy,pvtodorov/indra,pvtodorov/indra,johnbachman/indra,pvtodorov/indra,johnbachman/belpy,sorgerlab/belpy,bgyori/indra,johnbachman/indra,bgyori/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/indra,bgyori/indra,sorgerlab/belpy
from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import matplotlib fontsize=7 def set_fig_params(): matplotlib.rcParams['font.sans-serif'] = 'Arial' matplotlib.rcParams['text.usetex'] = True matplotlib.rcParams['text.latex.preamble'] = [ '\\usepackage{helvet}', '\\usepackage{sansmath}', '\\sansmath', '\\usepackage{underscore}',] def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'): ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position(yticks_position) ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize, pad=tick_padding, length=2, width=0.5) ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize, pad=tick_padding, length=2, width=0.5) ax.xaxis.labelpad = label_padding ax.yaxis.labelpad = label_padding ax.xaxis.label.set_size(fontsize) ax.yaxis.label.set_size(fontsize) + # 3-color swatch from colorbrewer2.org + GREEN = "#66C2A5" + ORANGE = "#FC8D62" + PURPLE = "#8DA0CB" +
Add default colors to plot formatting
## Code Before: from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import matplotlib fontsize=7 def set_fig_params(): matplotlib.rcParams['font.sans-serif'] = 'Arial' matplotlib.rcParams['text.usetex'] = True matplotlib.rcParams['text.latex.preamble'] = [ '\\usepackage{helvet}', '\\usepackage{sansmath}', '\\sansmath', '\\usepackage{underscore}',] def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'): ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position(yticks_position) ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize, pad=tick_padding, length=2, width=0.5) ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize, pad=tick_padding, length=2, width=0.5) ax.xaxis.labelpad = label_padding ax.yaxis.labelpad = label_padding ax.xaxis.label.set_size(fontsize) ax.yaxis.label.set_size(fontsize) ## Instruction: Add default colors to plot formatting ## Code After: from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import matplotlib fontsize=7 def set_fig_params(): matplotlib.rcParams['font.sans-serif'] = 'Arial' matplotlib.rcParams['text.usetex'] = True matplotlib.rcParams['text.latex.preamble'] = [ '\\usepackage{helvet}', '\\usepackage{sansmath}', '\\sansmath', '\\usepackage{underscore}',] def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'): ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position(yticks_position) ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize, pad=tick_padding, length=2, width=0.5) ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize, pad=tick_padding, length=2, width=0.5) ax.xaxis.labelpad = label_padding ax.yaxis.labelpad = label_padding ax.xaxis.label.set_size(fontsize) ax.yaxis.label.set_size(fontsize) # 3-color swatch from colorbrewer2.org GREEN = "#66C2A5" ORANGE = "#FC8D62" PURPLE = "#8DA0CB"
from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import matplotlib fontsize=7 def set_fig_params(): matplotlib.rcParams['font.sans-serif'] = 'Arial' matplotlib.rcParams['text.usetex'] = True matplotlib.rcParams['text.latex.preamble'] = [ '\\usepackage{helvet}', '\\usepackage{sansmath}', '\\sansmath', '\\usepackage{underscore}',] def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'): ax.xaxis.set_ticks_position('bottom') ax.yaxis.set_ticks_position(yticks_position) ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize, pad=tick_padding, length=2, width=0.5) ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize, pad=tick_padding, length=2, width=0.5) ax.xaxis.labelpad = label_padding ax.yaxis.labelpad = label_padding ax.xaxis.label.set_size(fontsize) ax.yaxis.label.set_size(fontsize) + + # 3-color swatch from colorbrewer2.org + GREEN = "#66C2A5" + ORANGE = "#FC8D62" + PURPLE = "#8DA0CB"
d52a7b19f7b5596e88d7233dfea35a70b2645385
osmaxx-py/excerptconverter/converter_manager.py
osmaxx-py/excerptconverter/converter_manager.py
from excerptconverter.baseexcerptconverter import BaseExcerptConverter class ConverterManager: @staticmethod def converter_configuration(): export_options = {} for Converter in BaseExcerptConverter.available_converters: export_options[Converter.__name__] = Converter.converter_configuration() return export_options def __init__(self, extraction_order, available_converters=BaseExcerptConverter.available_converters, run_as_celery_tasks=True): """" :param execution_configuration example: { 'gis': { 'formats': ['txt', 'file_gdb'], 'options': { 'coordinate_reference_system': 'wgs72', 'detail_level': 'verbatim' } }, 'routing': { ... } } """ self.extraction_order = extraction_order self.available_converters = available_converters self.run_as_celery_tasks = run_as_celery_tasks def execute_converters(self): for Converter in self.available_converters: if Converter.__name__ in self.extraction_order.extraction_configuration: Converter.execute( self.extraction_order, self.extraction_order.extraction_configuration[Converter.__name__], self.run_as_celery_tasks )
from excerptconverter.baseexcerptconverter import BaseExcerptConverter class ConverterManager: @staticmethod def converter_configuration(): return {Converter.__name__: Converter.converter_configuration() for Converter in BaseExcerptConverter.available_converters} def __init__(self, extraction_order, available_converters=BaseExcerptConverter.available_converters, run_as_celery_tasks=True): """" :param execution_configuration example: { 'gis': { 'formats': ['txt', 'file_gdb'], 'options': { 'coordinate_reference_system': 'wgs72', 'detail_level': 'verbatim' } }, 'routing': { ... } } """ self.extraction_order = extraction_order self.available_converters = available_converters self.run_as_celery_tasks = run_as_celery_tasks def execute_converters(self): for Converter in self.available_converters: if Converter.__name__ in self.extraction_order.extraction_configuration: Converter.execute( self.extraction_order, self.extraction_order.extraction_configuration[Converter.__name__], self.run_as_celery_tasks )
Replace loop by dictionary comprehension
Refactoring: Replace loop by dictionary comprehension
Python
mit
geometalab/osmaxx,geometalab/drf-utm-zone-info,geometalab/drf-utm-zone-info,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx,geometalab/osmaxx,geometalab/osmaxx-frontend
from excerptconverter.baseexcerptconverter import BaseExcerptConverter class ConverterManager: @staticmethod def converter_configuration(): - export_options = {} + return {Converter.__name__: Converter.converter_configuration() - for Converter in BaseExcerptConverter.available_converters: + for Converter in BaseExcerptConverter.available_converters} - export_options[Converter.__name__] = Converter.converter_configuration() - return export_options def __init__(self, extraction_order, available_converters=BaseExcerptConverter.available_converters, run_as_celery_tasks=True): """" :param execution_configuration example: { 'gis': { 'formats': ['txt', 'file_gdb'], 'options': { 'coordinate_reference_system': 'wgs72', 'detail_level': 'verbatim' } }, 'routing': { ... } } """ self.extraction_order = extraction_order self.available_converters = available_converters self.run_as_celery_tasks = run_as_celery_tasks def execute_converters(self): for Converter in self.available_converters: if Converter.__name__ in self.extraction_order.extraction_configuration: Converter.execute( self.extraction_order, self.extraction_order.extraction_configuration[Converter.__name__], self.run_as_celery_tasks )
Replace loop by dictionary comprehension
## Code Before: from excerptconverter.baseexcerptconverter import BaseExcerptConverter class ConverterManager: @staticmethod def converter_configuration(): export_options = {} for Converter in BaseExcerptConverter.available_converters: export_options[Converter.__name__] = Converter.converter_configuration() return export_options def __init__(self, extraction_order, available_converters=BaseExcerptConverter.available_converters, run_as_celery_tasks=True): """" :param execution_configuration example: { 'gis': { 'formats': ['txt', 'file_gdb'], 'options': { 'coordinate_reference_system': 'wgs72', 'detail_level': 'verbatim' } }, 'routing': { ... } } """ self.extraction_order = extraction_order self.available_converters = available_converters self.run_as_celery_tasks = run_as_celery_tasks def execute_converters(self): for Converter in self.available_converters: if Converter.__name__ in self.extraction_order.extraction_configuration: Converter.execute( self.extraction_order, self.extraction_order.extraction_configuration[Converter.__name__], self.run_as_celery_tasks ) ## Instruction: Replace loop by dictionary comprehension ## Code After: from excerptconverter.baseexcerptconverter import BaseExcerptConverter class ConverterManager: @staticmethod def converter_configuration(): return {Converter.__name__: Converter.converter_configuration() for Converter in BaseExcerptConverter.available_converters} def __init__(self, extraction_order, available_converters=BaseExcerptConverter.available_converters, run_as_celery_tasks=True): """" :param execution_configuration example: { 'gis': { 'formats': ['txt', 'file_gdb'], 'options': { 'coordinate_reference_system': 'wgs72', 'detail_level': 'verbatim' } }, 'routing': { ... } } """ self.extraction_order = extraction_order self.available_converters = available_converters self.run_as_celery_tasks = run_as_celery_tasks def execute_converters(self): for Converter in self.available_converters: if Converter.__name__ in self.extraction_order.extraction_configuration: Converter.execute( self.extraction_order, self.extraction_order.extraction_configuration[Converter.__name__], self.run_as_celery_tasks )
from excerptconverter.baseexcerptconverter import BaseExcerptConverter class ConverterManager: @staticmethod def converter_configuration(): - export_options = {} + return {Converter.__name__: Converter.converter_configuration() - for Converter in BaseExcerptConverter.available_converters: ? ^ + for Converter in BaseExcerptConverter.available_converters} ? ++++++++ ^ - export_options[Converter.__name__] = Converter.converter_configuration() - return export_options def __init__(self, extraction_order, available_converters=BaseExcerptConverter.available_converters, run_as_celery_tasks=True): """" :param execution_configuration example: { 'gis': { 'formats': ['txt', 'file_gdb'], 'options': { 'coordinate_reference_system': 'wgs72', 'detail_level': 'verbatim' } }, 'routing': { ... } } """ self.extraction_order = extraction_order self.available_converters = available_converters self.run_as_celery_tasks = run_as_celery_tasks def execute_converters(self): for Converter in self.available_converters: if Converter.__name__ in self.extraction_order.extraction_configuration: Converter.execute( self.extraction_order, self.extraction_order.extraction_configuration[Converter.__name__], self.run_as_celery_tasks )
455783a2ef4c47a5bc9933d48e7d44dcf3c41dc0
tests/integration/grains/test_core.py
tests/integration/grains/test_core.py
''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg except: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text )
''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg except ImportError: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text )
Add ImportError to exception instead of bare "except"
Add ImportError to exception instead of bare "except" Fixes lint error on develop.
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg - except: + except ImportError: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text )
Add ImportError to exception instead of bare "except"
## Code Before: ''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg except: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text ) ## Instruction: Add ImportError to exception instead of bare "except" ## Code After: ''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg except ImportError: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text )
''' Test the core grains ''' # Import python libs from __future__ import absolute_import # Import Salt Testing libs import tests.integration as integration from tests.support.unit import skipIf # Import salt libs import salt.utils if salt.utils.is_windows(): try: import salt.modules.reg - except: + except ImportError: pass class TestGrainsCore(integration.ModuleCase): ''' Test the core grains grains ''' @skipIf(not salt.utils.is_windows(), 'Only run on Windows') def test_win_cpu_model(self): ''' test grains['cpu_model'] ''' opts = self.minion_opts cpu_model_text = salt.modules.reg.read_value( "HKEY_LOCAL_MACHINE", "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", "ProcessorNameString").get('vdata') self.assertEqual( self.run_function('grains.items')['cpu_model'], cpu_model_text )
c7a67d4a69e1fe2ecb7f6c1a56202c6153e9766c
frigg/builds/filters.py
frigg/builds/filters.py
from rest_framework import filters from frigg.builds.models import Build, Project class ProjectPermissionFilter(filters.BaseFilterBackend): def filter_queryset(self, request, queryset, view): return queryset.filter(Project.objects.permitted_query(request.user)) class BuildPermissionFilter(filters.BaseFilterBackend): def filter_queryset(self, request, queryset, view): return queryset.filter(Build.objects.permitted_query(request.user))
from rest_framework import filters from frigg.builds.models import Build, Project class ProjectPermissionFilter(filters.BaseFilterBackend): def filter_queryset(self, request, queryset, view): return queryset.filter(Project.objects.permitted_query(request.user)).distinct() class BuildPermissionFilter(filters.BaseFilterBackend): def filter_queryset(self, request, queryset, view): return queryset.filter(Build.objects.permitted_query(request.user)).distinct()
Fix multiple instance bug in api
Fix multiple instance bug in api
Python
mit
frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq
from rest_framework import filters from frigg.builds.models import Build, Project class ProjectPermissionFilter(filters.BaseFilterBackend): def filter_queryset(self, request, queryset, view): - return queryset.filter(Project.objects.permitted_query(request.user)) + return queryset.filter(Project.objects.permitted_query(request.user)).distinct() class BuildPermissionFilter(filters.BaseFilterBackend): def filter_queryset(self, request, queryset, view): - return queryset.filter(Build.objects.permitted_query(request.user)) + return queryset.filter(Build.objects.permitted_query(request.user)).distinct()
Fix multiple instance bug in api
## Code Before: from rest_framework import filters from frigg.builds.models import Build, Project class ProjectPermissionFilter(filters.BaseFilterBackend): def filter_queryset(self, request, queryset, view): return queryset.filter(Project.objects.permitted_query(request.user)) class BuildPermissionFilter(filters.BaseFilterBackend): def filter_queryset(self, request, queryset, view): return queryset.filter(Build.objects.permitted_query(request.user)) ## Instruction: Fix multiple instance bug in api ## Code After: from rest_framework import filters from frigg.builds.models import Build, Project class ProjectPermissionFilter(filters.BaseFilterBackend): def filter_queryset(self, request, queryset, view): return queryset.filter(Project.objects.permitted_query(request.user)).distinct() class BuildPermissionFilter(filters.BaseFilterBackend): def filter_queryset(self, request, queryset, view): return queryset.filter(Build.objects.permitted_query(request.user)).distinct()
from rest_framework import filters from frigg.builds.models import Build, Project class ProjectPermissionFilter(filters.BaseFilterBackend): def filter_queryset(self, request, queryset, view): - return queryset.filter(Project.objects.permitted_query(request.user)) + return queryset.filter(Project.objects.permitted_query(request.user)).distinct() ? +++++++++++ class BuildPermissionFilter(filters.BaseFilterBackend): def filter_queryset(self, request, queryset, view): - return queryset.filter(Build.objects.permitted_query(request.user)) + return queryset.filter(Build.objects.permitted_query(request.user)).distinct() ? +++++++++++
6de5612c0e92b4e7c7ca56b59d7fd5859aeb3409
apps/polls/urls.py
apps/polls/urls.py
from django.conf.urls import patterns, url from apps.polls import views urlpatterns = patterns('', # ex: /polls/ url(r'^$', views.index, name='index'), # ex: /polls/5 url(r'^(?P<poll_id>\d+)/$', views.detail, name='detail'), # ex: /polls/5/results/ url(r'^(?P<poll_id>\d+)/results/$', views.results, name='results'), # ex: /polls/5/vote/ url(r'^(?P<poll_id>\d+)/vote/$', views.vote, name='vote'), )
from django.conf.urls import patterns, url from apps.polls import views urlpatterns = patterns('', # ex: /polls/ url(r'^$', views.IndexView.as_view(), name='index'), # ex: /polls/5 url(r'^(?P<poll_id>\d+)/$', views.DetailView.as_view(), name='detail'), # ex: /polls/5/results/ url(r'^(?P<poll_id>\d+)/results/$', views.detail, name='results'), # ex: /polls/5/vote/ url(r'^(?P<poll_id>\d+)/vote/$', views.vote, name='vote'), )
Use generic views: Less code is better
Use generic views: Less code is better
Python
bsd-3-clause
hoale/teracy-tutorial,hoale/teracy-tutorial
from django.conf.urls import patterns, url from apps.polls import views urlpatterns = patterns('', # ex: /polls/ - url(r'^$', views.index, name='index'), + url(r'^$', views.IndexView.as_view(), name='index'), # ex: /polls/5 - url(r'^(?P<poll_id>\d+)/$', views.detail, name='detail'), + url(r'^(?P<poll_id>\d+)/$', views.DetailView.as_view(), name='detail'), # ex: /polls/5/results/ - url(r'^(?P<poll_id>\d+)/results/$', views.results, name='results'), + url(r'^(?P<poll_id>\d+)/results/$', views.detail, name='results'), # ex: /polls/5/vote/ url(r'^(?P<poll_id>\d+)/vote/$', views.vote, name='vote'), )
Use generic views: Less code is better
## Code Before: from django.conf.urls import patterns, url from apps.polls import views urlpatterns = patterns('', # ex: /polls/ url(r'^$', views.index, name='index'), # ex: /polls/5 url(r'^(?P<poll_id>\d+)/$', views.detail, name='detail'), # ex: /polls/5/results/ url(r'^(?P<poll_id>\d+)/results/$', views.results, name='results'), # ex: /polls/5/vote/ url(r'^(?P<poll_id>\d+)/vote/$', views.vote, name='vote'), ) ## Instruction: Use generic views: Less code is better ## Code After: from django.conf.urls import patterns, url from apps.polls import views urlpatterns = patterns('', # ex: /polls/ url(r'^$', views.IndexView.as_view(), name='index'), # ex: /polls/5 url(r'^(?P<poll_id>\d+)/$', views.DetailView.as_view(), name='detail'), # ex: /polls/5/results/ url(r'^(?P<poll_id>\d+)/results/$', views.detail, name='results'), # ex: /polls/5/vote/ url(r'^(?P<poll_id>\d+)/vote/$', views.vote, name='vote'), )
from django.conf.urls import patterns, url from apps.polls import views urlpatterns = patterns('', # ex: /polls/ - url(r'^$', views.index, name='index'), ? ^ + url(r'^$', views.IndexView.as_view(), name='index'), ? ^ ++++++++++++++ # ex: /polls/5 - url(r'^(?P<poll_id>\d+)/$', views.detail, name='detail'), ? ^ + url(r'^(?P<poll_id>\d+)/$', views.DetailView.as_view(), name='detail'), ? ^ ++++++++++++++ # ex: /polls/5/results/ - url(r'^(?P<poll_id>\d+)/results/$', views.results, name='results'), ? ^ ^^ -- + url(r'^(?P<poll_id>\d+)/results/$', views.detail, name='results'), ? ^ ^^^ # ex: /polls/5/vote/ url(r'^(?P<poll_id>\d+)/vote/$', views.vote, name='vote'), )
2f041e6ed7d07ef8932350b68581e8dfeaef903f
dashboard/dashboard/pinpoint/handlers/job.py
dashboard/dashboard/pinpoint/handlers/job.py
import json import webapp2 from dashboard.pinpoint.models import job as job_module class JobHandler(webapp2.RequestHandler): def post(self): job_id = self.request.get('job_id') # Validate parameters. try: job = job_module.JobFromId(job_id) self.response.write(json.dumps({'data': job.AsDict()})) except: # pylint: disable=bare-except # There's no narrower exception we can catch. Catching # google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError # doesn't appear to work here. # https://github.com/googlecloudplatform/datastore-ndb-python/issues/143 self.response.write(json.dumps({'error': 'Unknown job id.'})) return del job
import json import webapp2 from dashboard.pinpoint.models import job as job_module class JobHandler(webapp2.RequestHandler): def post(self): job_id = self.request.get('job_id') # Validate parameters. try: job = job_module.JobFromId(job_id) except Exception as e: # pylint: disable=broad-except # Catching google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError # directly doesn't work. # https://github.com/googlecloudplatform/datastore-ndb-python/issues/143 if e.__class__.__name__ == 'ProtocolBufferDecodeError': self.response.write(json.dumps({'error': 'Unknown job id.'})) return raise self.response.write(json.dumps({'data': job.AsDict()}))
Move Job handler out of exception block.
[pinpoint] Move Job handler out of exception block. The exception block is solely used for Job loading exceptions. Review-Url: https://codereview.chromium.org/2768293003
Python
bsd-3-clause
catapult-project/catapult,sahiljain/catapult,sahiljain/catapult,sahiljain/catapult,benschmaus/catapult,catapult-project/catapult-csm,catapult-project/catapult-csm,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,catapult-project/catapult,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult-csm,sahiljain/catapult,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult,benschmaus/catapult,benschmaus/catapult,benschmaus/catapult,catapult-project/catapult-csm
import json import webapp2 from dashboard.pinpoint.models import job as job_module class JobHandler(webapp2.RequestHandler): def post(self): job_id = self.request.get('job_id') # Validate parameters. try: job = job_module.JobFromId(job_id) - self.response.write(json.dumps({'data': job.AsDict()})) - except: # pylint: disable=bare-except + except Exception as e: # pylint: disable=broad-except - # There's no narrower exception we can catch. Catching - # google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError + # Catching google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError - # doesn't appear to work here. + # directly doesn't work. # https://github.com/googlecloudplatform/datastore-ndb-python/issues/143 + if e.__class__.__name__ == 'ProtocolBufferDecodeError': - self.response.write(json.dumps({'error': 'Unknown job id.'})) + self.response.write(json.dumps({'error': 'Unknown job id.'})) - return + return + raise - del job + self.response.write(json.dumps({'data': job.AsDict()}))
Move Job handler out of exception block.
## Code Before: import json import webapp2 from dashboard.pinpoint.models import job as job_module class JobHandler(webapp2.RequestHandler): def post(self): job_id = self.request.get('job_id') # Validate parameters. try: job = job_module.JobFromId(job_id) self.response.write(json.dumps({'data': job.AsDict()})) except: # pylint: disable=bare-except # There's no narrower exception we can catch. Catching # google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError # doesn't appear to work here. # https://github.com/googlecloudplatform/datastore-ndb-python/issues/143 self.response.write(json.dumps({'error': 'Unknown job id.'})) return del job ## Instruction: Move Job handler out of exception block. ## Code After: import json import webapp2 from dashboard.pinpoint.models import job as job_module class JobHandler(webapp2.RequestHandler): def post(self): job_id = self.request.get('job_id') # Validate parameters. try: job = job_module.JobFromId(job_id) except Exception as e: # pylint: disable=broad-except # Catching google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError # directly doesn't work. # https://github.com/googlecloudplatform/datastore-ndb-python/issues/143 if e.__class__.__name__ == 'ProtocolBufferDecodeError': self.response.write(json.dumps({'error': 'Unknown job id.'})) return raise self.response.write(json.dumps({'data': job.AsDict()}))
import json import webapp2 from dashboard.pinpoint.models import job as job_module class JobHandler(webapp2.RequestHandler): def post(self): job_id = self.request.get('job_id') # Validate parameters. try: job = job_module.JobFromId(job_id) - self.response.write(json.dumps({'data': job.AsDict()})) - except: # pylint: disable=bare-except ? ^^ + except Exception as e: # pylint: disable=broad-except ? +++++++++++++++ ++ ^ - # There's no narrower exception we can catch. Catching - # google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError + # Catching google.net.proto.ProtocolBuffer.ProtocolBufferDecodeError ? +++++++++ - # doesn't appear to work here. + # directly doesn't work. # https://github.com/googlecloudplatform/datastore-ndb-python/issues/143 + if e.__class__.__name__ == 'ProtocolBufferDecodeError': - self.response.write(json.dumps({'error': 'Unknown job id.'})) + self.response.write(json.dumps({'error': 'Unknown job id.'})) ? ++ - return + return ? ++ + raise - del job + self.response.write(json.dumps({'data': job.AsDict()}))
eed78d3a671aee0fcc0760f15087085f2918da6c
travis_ci/settings.py
travis_ci/settings.py
"""GeoKey settings.""" from geokey.core.settings.dev import * DEFAULT_FROM_EMAIL = '[email protected]' ACCOUNT_EMAIL_VERIFICATION = 'optional' SECRET_KEY = 'xxxxxxxxxxxxxxxxxxxxxxxxx' DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'geokey', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } INSTALLED_APPS += ( 'geokey_epicollect', ) STATIC_URL = '/static/' MEDIA_ROOT = normpath(join(dirname(dirname(abspath(__file__))), 'assets')) MEDIA_URL = '/assets/' WSGI_APPLICATION = 'wsgi.application'
"""GeoKey settings.""" from geokey.core.settings.dev import * DEFAULT_FROM_EMAIL = '[email protected]' ACCOUNT_EMAIL_VERIFICATION = 'optional' SECRET_KEY = 'xxxxxxxxxxxxxxxxxxxxxxxxx' DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'geokey', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } ALLOWED_HOSTS = ['localhost'] INSTALLED_APPS += ( 'geokey_epicollect', ) STATIC_URL = '/static/' MEDIA_ROOT = normpath(join(dirname(dirname(abspath(__file__))), 'assets')) MEDIA_URL = '/assets/' WSGI_APPLICATION = 'wsgi.application'
Add "localhost" in the allowed hosts for testing purposes
Add "localhost" in the allowed hosts for testing purposes
Python
mit
ExCiteS/geokey-epicollect,ExCiteS/geokey-epicollect
"""GeoKey settings.""" from geokey.core.settings.dev import * DEFAULT_FROM_EMAIL = '[email protected]' ACCOUNT_EMAIL_VERIFICATION = 'optional' SECRET_KEY = 'xxxxxxxxxxxxxxxxxxxxxxxxx' DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'geokey', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } + ALLOWED_HOSTS = ['localhost'] + INSTALLED_APPS += ( 'geokey_epicollect', ) STATIC_URL = '/static/' MEDIA_ROOT = normpath(join(dirname(dirname(abspath(__file__))), 'assets')) MEDIA_URL = '/assets/' WSGI_APPLICATION = 'wsgi.application'
Add "localhost" in the allowed hosts for testing purposes
## Code Before: """GeoKey settings.""" from geokey.core.settings.dev import * DEFAULT_FROM_EMAIL = '[email protected]' ACCOUNT_EMAIL_VERIFICATION = 'optional' SECRET_KEY = 'xxxxxxxxxxxxxxxxxxxxxxxxx' DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'geokey', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } INSTALLED_APPS += ( 'geokey_epicollect', ) STATIC_URL = '/static/' MEDIA_ROOT = normpath(join(dirname(dirname(abspath(__file__))), 'assets')) MEDIA_URL = '/assets/' WSGI_APPLICATION = 'wsgi.application' ## Instruction: Add "localhost" in the allowed hosts for testing purposes ## Code After: """GeoKey settings.""" from geokey.core.settings.dev import * DEFAULT_FROM_EMAIL = '[email protected]' ACCOUNT_EMAIL_VERIFICATION = 'optional' SECRET_KEY = 'xxxxxxxxxxxxxxxxxxxxxxxxx' DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'geokey', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } ALLOWED_HOSTS = ['localhost'] INSTALLED_APPS += ( 'geokey_epicollect', ) STATIC_URL = '/static/' MEDIA_ROOT = normpath(join(dirname(dirname(abspath(__file__))), 'assets')) MEDIA_URL = '/assets/' WSGI_APPLICATION = 'wsgi.application'
"""GeoKey settings.""" from geokey.core.settings.dev import * DEFAULT_FROM_EMAIL = '[email protected]' ACCOUNT_EMAIL_VERIFICATION = 'optional' SECRET_KEY = 'xxxxxxxxxxxxxxxxxxxxxxxxx' DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': 'geokey', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', 'PORT': '', } } + ALLOWED_HOSTS = ['localhost'] + INSTALLED_APPS += ( 'geokey_epicollect', ) STATIC_URL = '/static/' MEDIA_ROOT = normpath(join(dirname(dirname(abspath(__file__))), 'assets')) MEDIA_URL = '/assets/' WSGI_APPLICATION = 'wsgi.application'
311dfdc28bda253e20d09c84a3ba739f5e9be7ef
tests/utils_test.py
tests/utils_test.py
import datetime import json import unittest from clippings.utils import DatetimeJSONEncoder DATE = datetime.datetime(2016, 1, 2, 3, 4, 5) DATE_STRING = "2016-01-02T03:04:05" class DatetimeJSONEncoderTest(unittest.TestCase): def test_datetime_encoder_format(self): dictionary = {"now": DATE} expected_json_string = json.dumps({"now": DATE_STRING}) json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder) self.assertEqual(expected_json_string, json_string) def test_datetime_encoder_typeerror(self): undumpable_dictionary = {"set": set()} # Ensure we let the parent raise TypeError with self.assertRaises(TypeError): json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder)
import datetime import json import pytest from clippings.utils import DatetimeJSONEncoder DATE = datetime.datetime(2016, 1, 2, 3, 4, 5) DATE_STRING = "2016-01-02T03:04:05" def test_datetime_encoder_format(): dictionary = {"now": DATE} expected_json_string = json.dumps({"now": DATE_STRING}) json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder) assert json_string == expected_json_string def test_datetime_encoder_typeerror(): undumpable_dictionary = {"set": set()} # Ensure we let the parent raise TypeError with pytest.raises(TypeError): json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder)
Convert parser tests to pytest
Convert parser tests to pytest
Python
mit
samueldg/clippings
import datetime import json + - import unittest + import pytest from clippings.utils import DatetimeJSONEncoder DATE = datetime.datetime(2016, 1, 2, 3, 4, 5) DATE_STRING = "2016-01-02T03:04:05" - class DatetimeJSONEncoderTest(unittest.TestCase): + def test_datetime_encoder_format(): + dictionary = {"now": DATE} + expected_json_string = json.dumps({"now": DATE_STRING}) + json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder) + assert json_string == expected_json_string - def test_datetime_encoder_format(self): - dictionary = {"now": DATE} - expected_json_string = json.dumps({"now": DATE_STRING}) - json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder) - self.assertEqual(expected_json_string, json_string) - def test_datetime_encoder_typeerror(self): + def test_datetime_encoder_typeerror(): - undumpable_dictionary = {"set": set()} + undumpable_dictionary = {"set": set()} - # Ensure we let the parent raise TypeError + # Ensure we let the parent raise TypeError - with self.assertRaises(TypeError): + with pytest.raises(TypeError): - json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder) + json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder)
Convert parser tests to pytest
## Code Before: import datetime import json import unittest from clippings.utils import DatetimeJSONEncoder DATE = datetime.datetime(2016, 1, 2, 3, 4, 5) DATE_STRING = "2016-01-02T03:04:05" class DatetimeJSONEncoderTest(unittest.TestCase): def test_datetime_encoder_format(self): dictionary = {"now": DATE} expected_json_string = json.dumps({"now": DATE_STRING}) json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder) self.assertEqual(expected_json_string, json_string) def test_datetime_encoder_typeerror(self): undumpable_dictionary = {"set": set()} # Ensure we let the parent raise TypeError with self.assertRaises(TypeError): json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder) ## Instruction: Convert parser tests to pytest ## Code After: import datetime import json import pytest from clippings.utils import DatetimeJSONEncoder DATE = datetime.datetime(2016, 1, 2, 3, 4, 5) DATE_STRING = "2016-01-02T03:04:05" def test_datetime_encoder_format(): dictionary = {"now": DATE} expected_json_string = json.dumps({"now": DATE_STRING}) json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder) assert json_string == expected_json_string def test_datetime_encoder_typeerror(): undumpable_dictionary = {"set": set()} # Ensure we let the parent raise TypeError with pytest.raises(TypeError): json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder)
import datetime import json + - import unittest ? ^^^^ + import pytest ? ^^ from clippings.utils import DatetimeJSONEncoder DATE = datetime.datetime(2016, 1, 2, 3, 4, 5) DATE_STRING = "2016-01-02T03:04:05" - class DatetimeJSONEncoderTest(unittest.TestCase): + def test_datetime_encoder_format(): + dictionary = {"now": DATE} + expected_json_string = json.dumps({"now": DATE_STRING}) + json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder) + assert json_string == expected_json_string - def test_datetime_encoder_format(self): - dictionary = {"now": DATE} - expected_json_string = json.dumps({"now": DATE_STRING}) - json_string = json.dumps(dictionary, cls=DatetimeJSONEncoder) - self.assertEqual(expected_json_string, json_string) - def test_datetime_encoder_typeerror(self): ? ---- ---- + def test_datetime_encoder_typeerror(): - undumpable_dictionary = {"set": set()} ? ---- + undumpable_dictionary = {"set": set()} - # Ensure we let the parent raise TypeError ? ---- + # Ensure we let the parent raise TypeError - with self.assertRaises(TypeError): ? ---- ^^^ ---- -- + with pytest.raises(TypeError): ? ++++ ^ - json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder) ? ---- + json_string = json.dumps(undumpable_dictionary, cls=DatetimeJSONEncoder)
ce28b359122475f544b9ae3bc9e05a5bc02377e4
conda_kapsel/internal/py2_compat.py
conda_kapsel/internal/py2_compat.py
from __future__ import absolute_import, print_function import platform import sys _PY2 = sys.version_info[0] == 2 def is_string(s): if _PY2: # pragma: no cover (py2/py3) return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa else: # pragma: no cover (py2/py3) return isinstance(s, str) # pragma: no cover (py2/py3) def env_without_unicode(environ): # On Windows / Python 2.7, Popen explodes if given unicode strings in the environment. if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3) environ_copy = dict() for key, value in environ.items(): assert isinstance(key, basestring) # noqa assert isinstance(key, str) if isinstance(value, unicode): # noqa environ_copy[key] = value.encode() assert isinstance(environ_copy[key], str) elif not isinstance(value, str): raise TypeError("Environment contains non-unicode non-str value %r" % value) else: environ_copy[key] = value return environ_copy else: # pragma: no cover (py2/py3) return environ
from __future__ import absolute_import, print_function import platform import sys _PY2 = sys.version_info[0] == 2 def is_string(s): if _PY2: # pragma: no cover (py2/py3) return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa else: # pragma: no cover (py2/py3) return isinstance(s, str) # pragma: no cover (py2/py3) def env_without_unicode(environ): # On Windows / Python 2.7, Popen explodes if given unicode strings in the environment. if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3) environ_copy = dict() for key, value in environ.items(): if isinstance(key, unicode): # noqa key = key.encode() if isinstance(value, unicode): # noqa value = value.encode() assert isinstance(key, str) assert isinstance(value, str) environ_copy[key] = value return environ_copy else: # pragma: no cover (py2/py3) return environ
Fix unicode keys in addition to values for windows/py2 environment
Fix unicode keys in addition to values for windows/py2 environment
Python
bsd-3-clause
conda/kapsel,conda/kapsel
from __future__ import absolute_import, print_function import platform import sys _PY2 = sys.version_info[0] == 2 def is_string(s): if _PY2: # pragma: no cover (py2/py3) return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa else: # pragma: no cover (py2/py3) return isinstance(s, str) # pragma: no cover (py2/py3) def env_without_unicode(environ): # On Windows / Python 2.7, Popen explodes if given unicode strings in the environment. if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3) environ_copy = dict() for key, value in environ.items(): - assert isinstance(key, basestring) # noqa + if isinstance(key, unicode): # noqa + key = key.encode() + if isinstance(value, unicode): # noqa + value = value.encode() assert isinstance(key, str) - if isinstance(value, unicode): # noqa - environ_copy[key] = value.encode() - assert isinstance(environ_copy[key], str) - elif not isinstance(value, str): + assert isinstance(value, str) - raise TypeError("Environment contains non-unicode non-str value %r" % value) - else: - environ_copy[key] = value + environ_copy[key] = value return environ_copy else: # pragma: no cover (py2/py3) return environ
Fix unicode keys in addition to values for windows/py2 environment
## Code Before: from __future__ import absolute_import, print_function import platform import sys _PY2 = sys.version_info[0] == 2 def is_string(s): if _PY2: # pragma: no cover (py2/py3) return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa else: # pragma: no cover (py2/py3) return isinstance(s, str) # pragma: no cover (py2/py3) def env_without_unicode(environ): # On Windows / Python 2.7, Popen explodes if given unicode strings in the environment. if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3) environ_copy = dict() for key, value in environ.items(): assert isinstance(key, basestring) # noqa assert isinstance(key, str) if isinstance(value, unicode): # noqa environ_copy[key] = value.encode() assert isinstance(environ_copy[key], str) elif not isinstance(value, str): raise TypeError("Environment contains non-unicode non-str value %r" % value) else: environ_copy[key] = value return environ_copy else: # pragma: no cover (py2/py3) return environ ## Instruction: Fix unicode keys in addition to values for windows/py2 environment ## Code After: from __future__ import absolute_import, print_function import platform import sys _PY2 = sys.version_info[0] == 2 def is_string(s): if _PY2: # pragma: no cover (py2/py3) return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa else: # pragma: no cover (py2/py3) return isinstance(s, str) # pragma: no cover (py2/py3) def env_without_unicode(environ): # On Windows / Python 2.7, Popen explodes if given unicode strings in the environment. if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3) environ_copy = dict() for key, value in environ.items(): if isinstance(key, unicode): # noqa key = key.encode() if isinstance(value, unicode): # noqa value = value.encode() assert isinstance(key, str) assert isinstance(value, str) environ_copy[key] = value return environ_copy else: # pragma: no cover (py2/py3) return environ
from __future__ import absolute_import, print_function import platform import sys _PY2 = sys.version_info[0] == 2 def is_string(s): if _PY2: # pragma: no cover (py2/py3) return isinstance(s, basestring) # pragma: no cover (py2/py3) # noqa else: # pragma: no cover (py2/py3) return isinstance(s, str) # pragma: no cover (py2/py3) def env_without_unicode(environ): # On Windows / Python 2.7, Popen explodes if given unicode strings in the environment. if _PY2 and platform.system() == 'Windows': # pragma: no cover (py2/py3) environ_copy = dict() for key, value in environ.items(): - assert isinstance(key, basestring) # noqa ? ^^^^^^ ^^^ ------ + if isinstance(key, unicode): # noqa ? ^^ ^^^^^^ + + key = key.encode() + if isinstance(value, unicode): # noqa + value = value.encode() assert isinstance(key, str) - if isinstance(value, unicode): # noqa - environ_copy[key] = value.encode() - assert isinstance(environ_copy[key], str) - elif not isinstance(value, str): ? ^^^^^^ - + assert isinstance(value, str) ? +++ ^ - raise TypeError("Environment contains non-unicode non-str value %r" % value) - else: - environ_copy[key] = value ? ---- + environ_copy[key] = value return environ_copy else: # pragma: no cover (py2/py3) return environ
c79cedf826a3b6ee89e6186954185ef3217dd901
tomviz/python/InvertData.py
tomviz/python/InvertData.py
import tomviz.operators NUMBER_OF_CHUNKS = 10 class InvertOperator(tomviz.operators.CancelableOperator): def transform_scalars(self, dataset): from tomviz import utils import numpy as np self.progress.maximum = NUMBER_OF_CHUNKS scalars = utils.get_scalars(dataset) if scalars is None: raise RuntimeError("No scalars found!") result = np.float32(scalars) max = np.amax(scalars) step = 0 for chunk in np.array_split(result, NUMBER_OF_CHUNKS): if self.canceled: return chunk[:] = max - chunk step += 1 self.progress.value = step utils.set_scalars(dataset, result)
import tomviz.operators NUMBER_OF_CHUNKS = 10 class InvertOperator(tomviz.operators.CancelableOperator): def transform_scalars(self, dataset): from tomviz import utils import numpy as np self.progress.maximum = NUMBER_OF_CHUNKS scalars = utils.get_scalars(dataset) if scalars is None: raise RuntimeError("No scalars found!") result = np.float32(scalars) min = np.amin(scalars) max = np.amax(scalars) step = 0 for chunk in np.array_split(result, NUMBER_OF_CHUNKS): if self.canceled: return chunk[:] = max - chunk + min step += 1 self.progress.value = step utils.set_scalars(dataset, result)
Add the minimum scalar value to the result of the InvertOperator
Add the minimum scalar value to the result of the InvertOperator Without it, all results would be shifted so the minimum was 0.
Python
bsd-3-clause
OpenChemistry/tomviz,mathturtle/tomviz,OpenChemistry/tomviz,mathturtle/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz,mathturtle/tomviz
import tomviz.operators NUMBER_OF_CHUNKS = 10 class InvertOperator(tomviz.operators.CancelableOperator): def transform_scalars(self, dataset): from tomviz import utils import numpy as np self.progress.maximum = NUMBER_OF_CHUNKS scalars = utils.get_scalars(dataset) if scalars is None: raise RuntimeError("No scalars found!") result = np.float32(scalars) + min = np.amin(scalars) max = np.amax(scalars) step = 0 for chunk in np.array_split(result, NUMBER_OF_CHUNKS): if self.canceled: return - chunk[:] = max - chunk + chunk[:] = max - chunk + min step += 1 self.progress.value = step utils.set_scalars(dataset, result)
Add the minimum scalar value to the result of the InvertOperator
## Code Before: import tomviz.operators NUMBER_OF_CHUNKS = 10 class InvertOperator(tomviz.operators.CancelableOperator): def transform_scalars(self, dataset): from tomviz import utils import numpy as np self.progress.maximum = NUMBER_OF_CHUNKS scalars = utils.get_scalars(dataset) if scalars is None: raise RuntimeError("No scalars found!") result = np.float32(scalars) max = np.amax(scalars) step = 0 for chunk in np.array_split(result, NUMBER_OF_CHUNKS): if self.canceled: return chunk[:] = max - chunk step += 1 self.progress.value = step utils.set_scalars(dataset, result) ## Instruction: Add the minimum scalar value to the result of the InvertOperator ## Code After: import tomviz.operators NUMBER_OF_CHUNKS = 10 class InvertOperator(tomviz.operators.CancelableOperator): def transform_scalars(self, dataset): from tomviz import utils import numpy as np self.progress.maximum = NUMBER_OF_CHUNKS scalars = utils.get_scalars(dataset) if scalars is None: raise RuntimeError("No scalars found!") result = np.float32(scalars) min = np.amin(scalars) max = np.amax(scalars) step = 0 for chunk in np.array_split(result, NUMBER_OF_CHUNKS): if self.canceled: return chunk[:] = max - chunk + min step += 1 self.progress.value = step utils.set_scalars(dataset, result)
import tomviz.operators NUMBER_OF_CHUNKS = 10 class InvertOperator(tomviz.operators.CancelableOperator): def transform_scalars(self, dataset): from tomviz import utils import numpy as np self.progress.maximum = NUMBER_OF_CHUNKS scalars = utils.get_scalars(dataset) if scalars is None: raise RuntimeError("No scalars found!") result = np.float32(scalars) + min = np.amin(scalars) max = np.amax(scalars) step = 0 for chunk in np.array_split(result, NUMBER_OF_CHUNKS): if self.canceled: return - chunk[:] = max - chunk + chunk[:] = max - chunk + min ? ++++++ step += 1 self.progress.value = step utils.set_scalars(dataset, result)
3cee41ff8a7af405fe3a6bfda214e4fe1a6d3c0f
oneflow/settings/snippets/db_production.py
oneflow/settings/snippets/db_production.py
DATABASES['default'] = dj_database_url.config( default='postgres://oneflow:8jxcWaAfPJT3mV@{0}' '/oneflow'.format(MAIN_SERVER)) mongoengine.connect('oneflow', host=MAIN_SERVER) REDIS_DB = 0 CONSTANCE_REDIS_CONNECTION = 'redis://{0}:6379/{1}'.format( MAIN_SERVER, REDIS_DB) SESSION_REDIS_HOST = MAIN_SERVER SESSION_REDIS_DB = 2
DATABASES['default'] = dj_database_url.config( default='postgres://oneflow:8jxcWaAfPJT3mV@{0}' '/oneflow'.format(MAIN_SERVER)) mongoengine.connect('oneflow', host=MAIN_SERVER) REDIS_DB = 0 REDIS_TEST_DB = 9 CONSTANCE_REDIS_CONNECTION = 'redis://{0}:6379/{1}'.format( MAIN_SERVER, REDIS_DB) SESSION_REDIS_HOST = MAIN_SERVER SESSION_REDIS_DB = 2
Add the test REDIS database.
Add the test REDIS database.
Python
agpl-3.0
1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow
DATABASES['default'] = dj_database_url.config( default='postgres://oneflow:8jxcWaAfPJT3mV@{0}' '/oneflow'.format(MAIN_SERVER)) mongoengine.connect('oneflow', host=MAIN_SERVER) REDIS_DB = 0 + REDIS_TEST_DB = 9 CONSTANCE_REDIS_CONNECTION = 'redis://{0}:6379/{1}'.format( MAIN_SERVER, REDIS_DB) SESSION_REDIS_HOST = MAIN_SERVER SESSION_REDIS_DB = 2
Add the test REDIS database.
## Code Before: DATABASES['default'] = dj_database_url.config( default='postgres://oneflow:8jxcWaAfPJT3mV@{0}' '/oneflow'.format(MAIN_SERVER)) mongoengine.connect('oneflow', host=MAIN_SERVER) REDIS_DB = 0 CONSTANCE_REDIS_CONNECTION = 'redis://{0}:6379/{1}'.format( MAIN_SERVER, REDIS_DB) SESSION_REDIS_HOST = MAIN_SERVER SESSION_REDIS_DB = 2 ## Instruction: Add the test REDIS database. ## Code After: DATABASES['default'] = dj_database_url.config( default='postgres://oneflow:8jxcWaAfPJT3mV@{0}' '/oneflow'.format(MAIN_SERVER)) mongoengine.connect('oneflow', host=MAIN_SERVER) REDIS_DB = 0 REDIS_TEST_DB = 9 CONSTANCE_REDIS_CONNECTION = 'redis://{0}:6379/{1}'.format( MAIN_SERVER, REDIS_DB) SESSION_REDIS_HOST = MAIN_SERVER SESSION_REDIS_DB = 2
DATABASES['default'] = dj_database_url.config( default='postgres://oneflow:8jxcWaAfPJT3mV@{0}' '/oneflow'.format(MAIN_SERVER)) mongoengine.connect('oneflow', host=MAIN_SERVER) REDIS_DB = 0 + REDIS_TEST_DB = 9 CONSTANCE_REDIS_CONNECTION = 'redis://{0}:6379/{1}'.format( MAIN_SERVER, REDIS_DB) SESSION_REDIS_HOST = MAIN_SERVER SESSION_REDIS_DB = 2
61accbe3fa6ebdeed3bbf48573d5ac5412d0f1db
app/status/views.py
app/status/views.py
import os from flask import jsonify, current_app, request from sqlalchemy.exc import SQLAlchemyError from . import status from . import utils from dmutils.status import get_flags @status.route('/_status') def status_no_db(): if 'ignore-dependencies' in request.args: return jsonify( status="ok", ), 200 version = current_app.config['VERSION'] try: return jsonify( status="ok", version=version, db_version=utils.get_db_version(), flags=get_flags(current_app) ) except SQLAlchemyError: current_app.logger.exception('Error connecting to database') return jsonify( status="error", version=version, message="Error connecting to database", flags=get_flags(current_app) ), 500
from flask import jsonify, current_app, request from sqlalchemy.exc import SQLAlchemyError from . import status from . import utils from ..models import Framework from dmutils.status import get_flags @status.route('/_status') def status_no_db(): if 'ignore-dependencies' in request.args: return jsonify( status="ok", ), 200 version = current_app.config['VERSION'] try: return jsonify( status="ok", frameworks={f.slug: f.status for f in Framework.query.all()}, version=version, db_version=utils.get_db_version(), flags=get_flags(current_app) ) except SQLAlchemyError: current_app.logger.exception('Error connecting to database') return jsonify( status="error", version=version, message="Error connecting to database", flags=get_flags(current_app) ), 500
Add framework status to API /_status
Add framework status to API /_status To figure out current framework statuses for the given environment you either need access to the API token or you'd have to look through a number of frontend pages to infer the status from. Framework status is a part of almost every request to the API, so it should always be available for a working API instance and it makes sense to add it to the /_status page. Adding it to the /_status page creates an easier way to get the list of all framework statuses.
Python
mit
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
- import os from flask import jsonify, current_app, request from sqlalchemy.exc import SQLAlchemyError from . import status from . import utils + from ..models import Framework from dmutils.status import get_flags @status.route('/_status') def status_no_db(): if 'ignore-dependencies' in request.args: return jsonify( status="ok", ), 200 version = current_app.config['VERSION'] try: return jsonify( status="ok", + frameworks={f.slug: f.status for f in Framework.query.all()}, version=version, db_version=utils.get_db_version(), flags=get_flags(current_app) ) except SQLAlchemyError: current_app.logger.exception('Error connecting to database') return jsonify( status="error", version=version, message="Error connecting to database", flags=get_flags(current_app) ), 500
Add framework status to API /_status
## Code Before: import os from flask import jsonify, current_app, request from sqlalchemy.exc import SQLAlchemyError from . import status from . import utils from dmutils.status import get_flags @status.route('/_status') def status_no_db(): if 'ignore-dependencies' in request.args: return jsonify( status="ok", ), 200 version = current_app.config['VERSION'] try: return jsonify( status="ok", version=version, db_version=utils.get_db_version(), flags=get_flags(current_app) ) except SQLAlchemyError: current_app.logger.exception('Error connecting to database') return jsonify( status="error", version=version, message="Error connecting to database", flags=get_flags(current_app) ), 500 ## Instruction: Add framework status to API /_status ## Code After: from flask import jsonify, current_app, request from sqlalchemy.exc import SQLAlchemyError from . import status from . import utils from ..models import Framework from dmutils.status import get_flags @status.route('/_status') def status_no_db(): if 'ignore-dependencies' in request.args: return jsonify( status="ok", ), 200 version = current_app.config['VERSION'] try: return jsonify( status="ok", frameworks={f.slug: f.status for f in Framework.query.all()}, version=version, db_version=utils.get_db_version(), flags=get_flags(current_app) ) except SQLAlchemyError: current_app.logger.exception('Error connecting to database') return jsonify( status="error", version=version, message="Error connecting to database", flags=get_flags(current_app) ), 500
- import os from flask import jsonify, current_app, request from sqlalchemy.exc import SQLAlchemyError from . import status from . import utils + from ..models import Framework from dmutils.status import get_flags @status.route('/_status') def status_no_db(): if 'ignore-dependencies' in request.args: return jsonify( status="ok", ), 200 version = current_app.config['VERSION'] try: return jsonify( status="ok", + frameworks={f.slug: f.status for f in Framework.query.all()}, version=version, db_version=utils.get_db_version(), flags=get_flags(current_app) ) except SQLAlchemyError: current_app.logger.exception('Error connecting to database') return jsonify( status="error", version=version, message="Error connecting to database", flags=get_flags(current_app) ), 500
fc9fdd2115b46c71c36ba7d86f14395ac4cf1e3e
genome_designer/scripts/generate_coverage_data.py
genome_designer/scripts/generate_coverage_data.py
import os import subprocess from django.conf import settings from main.models import get_dataset_with_type from main.models import AlignmentGroup from main.models import Dataset from utils import generate_safe_filename_prefix_from_label def analyze_coverage(sample_alignment, output_dir): ref_genome_fasta_location = get_dataset_with_type( sample_alignment.alignment_group.reference_genome, Dataset.TYPE.REFERENCE_GENOME_FASTA).get_absolute_location() input_bam_file = sample_alignment.dataset_set.get( type=Dataset.TYPE.BWA_ALIGN).get_absolute_location() output_filename = generate_safe_filename_prefix_from_label( sample_alignment.experiment_sample.label + '_' + sample_alignment.uid) + '.coverage' output_path = os.path.join(output_dir, output_filename) with open(output_path, 'w') as fh: subprocess.check_call([ '%s/samtools/samtools' % settings.TOOLS_DIR, 'mpileup', '-f', ref_genome_fasta_location, input_bam_file ], stdout=fh)
import os import subprocess from django.conf import settings from main.models import get_dataset_with_type from main.models import AlignmentGroup from main.models import Dataset from utils import generate_safe_filename_prefix_from_label def analyze_coverage(sample_alignment, output_dir): ref_genome_fasta_location = get_dataset_with_type( sample_alignment.alignment_group.reference_genome, Dataset.TYPE.REFERENCE_GENOME_FASTA).get_absolute_location() input_bam_file = sample_alignment.dataset_set.get( type=Dataset.TYPE.BWA_ALIGN).get_absolute_location() output_filename = generate_safe_filename_prefix_from_label( sample_alignment.experiment_sample.label + '_' + sample_alignment.uid) + '.coverage' output_path = os.path.join(output_dir, output_filename) with open(output_path, 'w') as fh: p_mpileup = subprocess.Popen([ '%s/samtools/samtools' % settings.TOOLS_DIR, 'mpileup', '-f', ref_genome_fasta_location, input_bam_file ], stdout=subprocess.PIPE) subprocess.check_call([ 'cut', '-f', '-4' ], stdin=p_mpileup.stdout, stdout=fh)
Update coverage script to only output the first 4 cols which shows coverage.
Update coverage script to only output the first 4 cols which shows coverage.
Python
mit
woodymit/millstone,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone,churchlab/millstone,woodymit/millstone,churchlab/millstone,churchlab/millstone,churchlab/millstone
import os import subprocess from django.conf import settings from main.models import get_dataset_with_type from main.models import AlignmentGroup from main.models import Dataset from utils import generate_safe_filename_prefix_from_label def analyze_coverage(sample_alignment, output_dir): ref_genome_fasta_location = get_dataset_with_type( sample_alignment.alignment_group.reference_genome, Dataset.TYPE.REFERENCE_GENOME_FASTA).get_absolute_location() input_bam_file = sample_alignment.dataset_set.get( type=Dataset.TYPE.BWA_ALIGN).get_absolute_location() output_filename = generate_safe_filename_prefix_from_label( sample_alignment.experiment_sample.label + '_' + sample_alignment.uid) + '.coverage' output_path = os.path.join(output_dir, output_filename) with open(output_path, 'w') as fh: - subprocess.check_call([ + p_mpileup = subprocess.Popen([ '%s/samtools/samtools' % settings.TOOLS_DIR, 'mpileup', '-f', ref_genome_fasta_location, input_bam_file - ], stdout=fh) + ], stdout=subprocess.PIPE) + subprocess.check_call([ + 'cut', + '-f', + '-4' + ], stdin=p_mpileup.stdout, stdout=fh) +
Update coverage script to only output the first 4 cols which shows coverage.
## Code Before: import os import subprocess from django.conf import settings from main.models import get_dataset_with_type from main.models import AlignmentGroup from main.models import Dataset from utils import generate_safe_filename_prefix_from_label def analyze_coverage(sample_alignment, output_dir): ref_genome_fasta_location = get_dataset_with_type( sample_alignment.alignment_group.reference_genome, Dataset.TYPE.REFERENCE_GENOME_FASTA).get_absolute_location() input_bam_file = sample_alignment.dataset_set.get( type=Dataset.TYPE.BWA_ALIGN).get_absolute_location() output_filename = generate_safe_filename_prefix_from_label( sample_alignment.experiment_sample.label + '_' + sample_alignment.uid) + '.coverage' output_path = os.path.join(output_dir, output_filename) with open(output_path, 'w') as fh: subprocess.check_call([ '%s/samtools/samtools' % settings.TOOLS_DIR, 'mpileup', '-f', ref_genome_fasta_location, input_bam_file ], stdout=fh) ## Instruction: Update coverage script to only output the first 4 cols which shows coverage. ## Code After: import os import subprocess from django.conf import settings from main.models import get_dataset_with_type from main.models import AlignmentGroup from main.models import Dataset from utils import generate_safe_filename_prefix_from_label def analyze_coverage(sample_alignment, output_dir): ref_genome_fasta_location = get_dataset_with_type( sample_alignment.alignment_group.reference_genome, Dataset.TYPE.REFERENCE_GENOME_FASTA).get_absolute_location() input_bam_file = sample_alignment.dataset_set.get( type=Dataset.TYPE.BWA_ALIGN).get_absolute_location() output_filename = generate_safe_filename_prefix_from_label( sample_alignment.experiment_sample.label + '_' + sample_alignment.uid) + '.coverage' output_path = os.path.join(output_dir, output_filename) with open(output_path, 'w') as fh: p_mpileup = subprocess.Popen([ '%s/samtools/samtools' % settings.TOOLS_DIR, 'mpileup', '-f', ref_genome_fasta_location, input_bam_file ], stdout=subprocess.PIPE) subprocess.check_call([ 'cut', '-f', '-4' ], stdin=p_mpileup.stdout, stdout=fh)
import os import subprocess from django.conf import settings from main.models import get_dataset_with_type from main.models import AlignmentGroup from main.models import Dataset from utils import generate_safe_filename_prefix_from_label def analyze_coverage(sample_alignment, output_dir): ref_genome_fasta_location = get_dataset_with_type( sample_alignment.alignment_group.reference_genome, Dataset.TYPE.REFERENCE_GENOME_FASTA).get_absolute_location() input_bam_file = sample_alignment.dataset_set.get( type=Dataset.TYPE.BWA_ALIGN).get_absolute_location() output_filename = generate_safe_filename_prefix_from_label( sample_alignment.experiment_sample.label + '_' + sample_alignment.uid) + '.coverage' output_path = os.path.join(output_dir, output_filename) with open(output_path, 'w') as fh: - subprocess.check_call([ + p_mpileup = subprocess.Popen([ '%s/samtools/samtools' % settings.TOOLS_DIR, 'mpileup', '-f', ref_genome_fasta_location, input_bam_file - ], stdout=fh) + ], stdout=subprocess.PIPE) + + subprocess.check_call([ + 'cut', + '-f', + '-4' + ], stdin=p_mpileup.stdout, stdout=fh)
78ef59e29e2bed99d07261ff947f16be69e0e6b5
tests/fake_dbus_tools/swm.py
tests/fake_dbus_tools/swm.py
import gtk import dbus.service import sys from dbus.mainloop.glib import DBusGMainLoop class SLMService(dbus.service.Object): def __init__(self): bus_name = dbus.service.BusName('org.genivi.SoftwareLoadingManager', bus=dbus.SessionBus()) dbus.service.Object.__init__(self, bus_name, "/org/genivi/SoftwareLoadingManager") @dbus.service.method("org.genivi.SoftwareLoadingManager", async_callbacks=('send_reply', 'send_error')) def downloadComplete(self, update_image, signature, send_reply, send_error): print('SoftwareLoadingManager.SLMService.downloadComplete(%s, %s): Called.', update_image, signature) send_reply(True) fl = open("/tmp/dbustestswm.txt", 'w') fl.write("DownloadComplete") fl.close() if __name__ == "__main__": DBusGMainLoop(set_as_default=True) swlm_service = SLMService() while True: gtk.main_iteration()
import dbus.service import sys from dbus.mainloop.glib import DBusGMainLoop import gobject class SLMService(dbus.service.Object): def __init__(self): bus_name = dbus.service.BusName('org.genivi.SoftwareLoadingManager', bus=dbus.SessionBus()) dbus.service.Object.__init__(self, bus_name, "/org/genivi/SoftwareLoadingManager") @dbus.service.method("org.genivi.SoftwareLoadingManager", async_callbacks=('send_reply', 'send_error')) def downloadComplete(self, update_image, signature, send_reply, send_error): print('SoftwareLoadingManager.SLMService.downloadComplete(%s, %s): Called.', update_image, signature) send_reply(True) fl = open("/tmp/dbustestswm.txt", 'w') fl.write("DownloadComplete") fl.close() if __name__ == "__main__": DBusGMainLoop(set_as_default=True) mainloop = gobject.MainLoop() swlm_service = SLMService() while True: mainloop.run()
Replace gtk mainloop with glib mainloop
Replace gtk mainloop with glib mainloop This is because Travis CI runs headless and importing gtk fails
Python
mpl-2.0
advancedtelematic/aktualizr,advancedtelematic/aktualizr,advancedtelematic/sota_client_cpp,advancedtelematic/aktualizr,advancedtelematic/aktualizr,advancedtelematic/sota_client_cpp
- import gtk import dbus.service import sys from dbus.mainloop.glib import DBusGMainLoop + import gobject class SLMService(dbus.service.Object): def __init__(self): bus_name = dbus.service.BusName('org.genivi.SoftwareLoadingManager', bus=dbus.SessionBus()) dbus.service.Object.__init__(self, bus_name, "/org/genivi/SoftwareLoadingManager") @dbus.service.method("org.genivi.SoftwareLoadingManager", async_callbacks=('send_reply', 'send_error')) def downloadComplete(self, update_image, signature, send_reply, send_error): print('SoftwareLoadingManager.SLMService.downloadComplete(%s, %s): Called.', update_image, signature) send_reply(True) fl = open("/tmp/dbustestswm.txt", 'w') fl.write("DownloadComplete") fl.close() if __name__ == "__main__": DBusGMainLoop(set_as_default=True) + mainloop = gobject.MainLoop() swlm_service = SLMService() while True: - gtk.main_iteration() + mainloop.run() +
Replace gtk mainloop with glib mainloop
## Code Before: import gtk import dbus.service import sys from dbus.mainloop.glib import DBusGMainLoop class SLMService(dbus.service.Object): def __init__(self): bus_name = dbus.service.BusName('org.genivi.SoftwareLoadingManager', bus=dbus.SessionBus()) dbus.service.Object.__init__(self, bus_name, "/org/genivi/SoftwareLoadingManager") @dbus.service.method("org.genivi.SoftwareLoadingManager", async_callbacks=('send_reply', 'send_error')) def downloadComplete(self, update_image, signature, send_reply, send_error): print('SoftwareLoadingManager.SLMService.downloadComplete(%s, %s): Called.', update_image, signature) send_reply(True) fl = open("/tmp/dbustestswm.txt", 'w') fl.write("DownloadComplete") fl.close() if __name__ == "__main__": DBusGMainLoop(set_as_default=True) swlm_service = SLMService() while True: gtk.main_iteration() ## Instruction: Replace gtk mainloop with glib mainloop ## Code After: import dbus.service import sys from dbus.mainloop.glib import DBusGMainLoop import gobject class SLMService(dbus.service.Object): def __init__(self): bus_name = dbus.service.BusName('org.genivi.SoftwareLoadingManager', bus=dbus.SessionBus()) dbus.service.Object.__init__(self, bus_name, "/org/genivi/SoftwareLoadingManager") @dbus.service.method("org.genivi.SoftwareLoadingManager", async_callbacks=('send_reply', 'send_error')) def downloadComplete(self, update_image, signature, send_reply, send_error): print('SoftwareLoadingManager.SLMService.downloadComplete(%s, %s): Called.', update_image, signature) send_reply(True) fl = open("/tmp/dbustestswm.txt", 'w') fl.write("DownloadComplete") fl.close() if __name__ == "__main__": DBusGMainLoop(set_as_default=True) mainloop = gobject.MainLoop() swlm_service = SLMService() while True: mainloop.run()
- import gtk import dbus.service import sys from dbus.mainloop.glib import DBusGMainLoop + import gobject class SLMService(dbus.service.Object): def __init__(self): bus_name = dbus.service.BusName('org.genivi.SoftwareLoadingManager', bus=dbus.SessionBus()) dbus.service.Object.__init__(self, bus_name, "/org/genivi/SoftwareLoadingManager") @dbus.service.method("org.genivi.SoftwareLoadingManager", async_callbacks=('send_reply', 'send_error')) def downloadComplete(self, update_image, signature, send_reply, send_error): print('SoftwareLoadingManager.SLMService.downloadComplete(%s, %s): Called.', update_image, signature) send_reply(True) fl = open("/tmp/dbustestswm.txt", 'w') fl.write("DownloadComplete") fl.close() if __name__ == "__main__": DBusGMainLoop(set_as_default=True) + mainloop = gobject.MainLoop() swlm_service = SLMService() while True: - gtk.main_iteration() + mainloop.run()
fdcfe40cd388a6f53db22af44fcfa10d7901f490
reviewboard/attachments/admin.py
reviewboard/attachments/admin.py
from django.contrib import admin from django.utils.translation import ugettext_lazy as _ from reviewboard.attachments.models import FileAttachment from reviewboard.reviews.models import FileAttachmentComment class FileAttachmentAdmin(admin.ModelAdmin): list_display = ('file', 'caption', 'mimetype', 'review_request_id') list_display_links = ('file_attachment', 'caption') search_fields = ('caption', 'mimetype') def review_request_id(self, obj): return obj.review_request.get().id review_request_id.short_description = _('Review request ID') class FileAttachmentCommentAdmin(admin.ModelAdmin): list_display = ('text', 'file_attachment', 'review_request_id', 'timestamp') list_filter = ('timestamp',) search_fields = ('caption', 'file_attachment') raw_id_fields = ('file', 'reply_to') def review_request_id(self, obj): return obj.review.get().review_request.id review_request_id.short_description = _('Review request ID') admin.site.register(FileAttachment, FileAttachmentAdmin) admin.site.register(FileAttachmentComment, FileAttachmentCommentAdmin)
from django.contrib import admin from django.utils.translation import ugettext_lazy as _ from reviewboard.attachments.models import FileAttachment from reviewboard.reviews.models import FileAttachmentComment class FileAttachmentAdmin(admin.ModelAdmin): list_display = ('file', 'caption', 'mimetype', 'review_request_id') list_display_links = ('file_attachment', 'caption') search_fields = ('caption', 'mimetype') def review_request_id(self, obj): return obj.review_request.get().id review_request_id.short_description = _('Review request ID') class FileAttachmentCommentAdmin(admin.ModelAdmin): list_display = ('text', 'file_attachment', 'review_request_id', 'timestamp') list_filter = ('timestamp',) search_fields = ('caption', 'file_attachment') raw_id_fields = ('file_attachment', 'reply_to') def review_request_id(self, obj): return obj.review.get().review_request.id review_request_id.short_description = _('Review request ID') admin.site.register(FileAttachment, FileAttachmentAdmin) admin.site.register(FileAttachmentComment, FileAttachmentCommentAdmin)
Fix another broken file attachment field.
Fix another broken file attachment field. There was one more admin field for file attachments that was using an old name. Now fixed.
Python
mit
chazy/reviewboard,custode/reviewboard,sgallagher/reviewboard,beol/reviewboard,Khan/reviewboard,chipx86/reviewboard,1tush/reviewboard,sgallagher/reviewboard,brennie/reviewboard,davidt/reviewboard,Khan/reviewboard,atagar/ReviewBoard,Khan/reviewboard,reviewboard/reviewboard,chazy/reviewboard,1tush/reviewboard,custode/reviewboard,beol/reviewboard,chazy/reviewboard,custode/reviewboard,atagar/ReviewBoard,bkochendorfer/reviewboard,1tush/reviewboard,bkochendorfer/reviewboard,atagar/ReviewBoard,atagar/ReviewBoard,davidt/reviewboard,1tush/reviewboard,KnowNo/reviewboard,reviewboard/reviewboard,beol/reviewboard,sgallagher/reviewboard,chazy/reviewboard,Khan/reviewboard,1tush/reviewboard,brennie/reviewboard,chazy/reviewboard,1tush/reviewboard,KnowNo/reviewboard,brennie/reviewboard,atagar/ReviewBoard,reviewboard/reviewboard,davidt/reviewboard,Khan/reviewboard,sgallagher/reviewboard,brennie/reviewboard,atagar/ReviewBoard,atagar/ReviewBoard,davidt/reviewboard,chazy/reviewboard,KnowNo/reviewboard,1tush/reviewboard,reviewboard/reviewboard,bkochendorfer/reviewboard,custode/reviewboard,atagar/ReviewBoard,Khan/reviewboard,Khan/reviewboard,KnowNo/reviewboard,chazy/reviewboard,atagar/ReviewBoard,beol/reviewboard,1tush/reviewboard,chipx86/reviewboard,chipx86/reviewboard,chazy/reviewboard,Khan/reviewboard,Khan/reviewboard,chipx86/reviewboard,chazy/reviewboard,1tush/reviewboard,bkochendorfer/reviewboard
from django.contrib import admin from django.utils.translation import ugettext_lazy as _ from reviewboard.attachments.models import FileAttachment from reviewboard.reviews.models import FileAttachmentComment class FileAttachmentAdmin(admin.ModelAdmin): list_display = ('file', 'caption', 'mimetype', 'review_request_id') list_display_links = ('file_attachment', 'caption') search_fields = ('caption', 'mimetype') def review_request_id(self, obj): return obj.review_request.get().id review_request_id.short_description = _('Review request ID') class FileAttachmentCommentAdmin(admin.ModelAdmin): list_display = ('text', 'file_attachment', 'review_request_id', 'timestamp') list_filter = ('timestamp',) search_fields = ('caption', 'file_attachment') - raw_id_fields = ('file', 'reply_to') + raw_id_fields = ('file_attachment', 'reply_to') def review_request_id(self, obj): return obj.review.get().review_request.id review_request_id.short_description = _('Review request ID') admin.site.register(FileAttachment, FileAttachmentAdmin) admin.site.register(FileAttachmentComment, FileAttachmentCommentAdmin)
Fix another broken file attachment field.
## Code Before: from django.contrib import admin from django.utils.translation import ugettext_lazy as _ from reviewboard.attachments.models import FileAttachment from reviewboard.reviews.models import FileAttachmentComment class FileAttachmentAdmin(admin.ModelAdmin): list_display = ('file', 'caption', 'mimetype', 'review_request_id') list_display_links = ('file_attachment', 'caption') search_fields = ('caption', 'mimetype') def review_request_id(self, obj): return obj.review_request.get().id review_request_id.short_description = _('Review request ID') class FileAttachmentCommentAdmin(admin.ModelAdmin): list_display = ('text', 'file_attachment', 'review_request_id', 'timestamp') list_filter = ('timestamp',) search_fields = ('caption', 'file_attachment') raw_id_fields = ('file', 'reply_to') def review_request_id(self, obj): return obj.review.get().review_request.id review_request_id.short_description = _('Review request ID') admin.site.register(FileAttachment, FileAttachmentAdmin) admin.site.register(FileAttachmentComment, FileAttachmentCommentAdmin) ## Instruction: Fix another broken file attachment field. ## Code After: from django.contrib import admin from django.utils.translation import ugettext_lazy as _ from reviewboard.attachments.models import FileAttachment from reviewboard.reviews.models import FileAttachmentComment class FileAttachmentAdmin(admin.ModelAdmin): list_display = ('file', 'caption', 'mimetype', 'review_request_id') list_display_links = ('file_attachment', 'caption') search_fields = ('caption', 'mimetype') def review_request_id(self, obj): return obj.review_request.get().id review_request_id.short_description = _('Review request ID') class FileAttachmentCommentAdmin(admin.ModelAdmin): list_display = ('text', 'file_attachment', 'review_request_id', 'timestamp') list_filter = ('timestamp',) search_fields = ('caption', 'file_attachment') raw_id_fields = ('file_attachment', 'reply_to') def review_request_id(self, obj): return obj.review.get().review_request.id review_request_id.short_description = _('Review request ID') admin.site.register(FileAttachment, FileAttachmentAdmin) admin.site.register(FileAttachmentComment, FileAttachmentCommentAdmin)
from django.contrib import admin from django.utils.translation import ugettext_lazy as _ from reviewboard.attachments.models import FileAttachment from reviewboard.reviews.models import FileAttachmentComment class FileAttachmentAdmin(admin.ModelAdmin): list_display = ('file', 'caption', 'mimetype', 'review_request_id') list_display_links = ('file_attachment', 'caption') search_fields = ('caption', 'mimetype') def review_request_id(self, obj): return obj.review_request.get().id review_request_id.short_description = _('Review request ID') class FileAttachmentCommentAdmin(admin.ModelAdmin): list_display = ('text', 'file_attachment', 'review_request_id', 'timestamp') list_filter = ('timestamp',) search_fields = ('caption', 'file_attachment') - raw_id_fields = ('file', 'reply_to') + raw_id_fields = ('file_attachment', 'reply_to') ? +++++++++++ def review_request_id(self, obj): return obj.review.get().review_request.id review_request_id.short_description = _('Review request ID') admin.site.register(FileAttachment, FileAttachmentAdmin) admin.site.register(FileAttachmentComment, FileAttachmentCommentAdmin)
e3c53133b71d7426695fbf24cac5b8e82311c037
seeker/middleware.py
seeker/middleware.py
from .utils import index, delete from django.db import models import logging logger = logging.getLogger(__name__) class ModelIndexingMiddleware (object): """ Middleware class that automatically indexes any new or deleted model objects. """ def __init__(self): models.signals.post_save.connect(self.handle_save, dispatch_uid='handle_save') models.signals.post_delete.connect(self.handle_delete, dispatch_uid='handle_delete') def handle_save(self, sender, instance, **kwargs): try: index(instance) except: logger.exception('Error indexing %s instance: %s', sender, instance) def handle_delete(self, sender, instance, **kwargs): try: delete(instance) except: logger.exception('Error deleting %s instance: %s', sender, instance) def process_request(self, request): # This is really just here so Django keeps the middleware installed. pass
from .utils import index, delete from django.db import models import logging logger = logging.getLogger(__name__) class ModelIndexingMiddleware (object): """ Middleware class that automatically indexes any new or deleted model objects. """ def __init__(self): models.signals.post_save.connect(self.handle_save, dispatch_uid='seeker.middleware.ModelIndexingMiddleware.handle_save') models.signals.post_delete.connect(self.handle_delete, dispatch_uid='seeker.middleware.ModelIndexingMiddleware.handle_delete') def handle_save(self, sender, instance, **kwargs): try: index(instance) except: logger.exception('Error indexing %s instance: %s', sender, instance) def handle_delete(self, sender, instance, **kwargs): try: delete(instance) except: logger.exception('Error deleting %s instance: %s', sender, instance) def process_request(self, request): # This is really just here so Django keeps the middleware installed. pass
Make signal dispatch_uid values more specific
Make signal dispatch_uid values more specific
Python
bsd-2-clause
imsweb/django-seeker,imsweb/django-seeker
from .utils import index, delete from django.db import models import logging logger = logging.getLogger(__name__) class ModelIndexingMiddleware (object): """ Middleware class that automatically indexes any new or deleted model objects. """ def __init__(self): - models.signals.post_save.connect(self.handle_save, dispatch_uid='handle_save') + models.signals.post_save.connect(self.handle_save, dispatch_uid='seeker.middleware.ModelIndexingMiddleware.handle_save') - models.signals.post_delete.connect(self.handle_delete, dispatch_uid='handle_delete') + models.signals.post_delete.connect(self.handle_delete, dispatch_uid='seeker.middleware.ModelIndexingMiddleware.handle_delete') def handle_save(self, sender, instance, **kwargs): try: index(instance) except: logger.exception('Error indexing %s instance: %s', sender, instance) def handle_delete(self, sender, instance, **kwargs): try: delete(instance) except: logger.exception('Error deleting %s instance: %s', sender, instance) def process_request(self, request): # This is really just here so Django keeps the middleware installed. pass
Make signal dispatch_uid values more specific
## Code Before: from .utils import index, delete from django.db import models import logging logger = logging.getLogger(__name__) class ModelIndexingMiddleware (object): """ Middleware class that automatically indexes any new or deleted model objects. """ def __init__(self): models.signals.post_save.connect(self.handle_save, dispatch_uid='handle_save') models.signals.post_delete.connect(self.handle_delete, dispatch_uid='handle_delete') def handle_save(self, sender, instance, **kwargs): try: index(instance) except: logger.exception('Error indexing %s instance: %s', sender, instance) def handle_delete(self, sender, instance, **kwargs): try: delete(instance) except: logger.exception('Error deleting %s instance: %s', sender, instance) def process_request(self, request): # This is really just here so Django keeps the middleware installed. pass ## Instruction: Make signal dispatch_uid values more specific ## Code After: from .utils import index, delete from django.db import models import logging logger = logging.getLogger(__name__) class ModelIndexingMiddleware (object): """ Middleware class that automatically indexes any new or deleted model objects. """ def __init__(self): models.signals.post_save.connect(self.handle_save, dispatch_uid='seeker.middleware.ModelIndexingMiddleware.handle_save') models.signals.post_delete.connect(self.handle_delete, dispatch_uid='seeker.middleware.ModelIndexingMiddleware.handle_delete') def handle_save(self, sender, instance, **kwargs): try: index(instance) except: logger.exception('Error indexing %s instance: %s', sender, instance) def handle_delete(self, sender, instance, **kwargs): try: delete(instance) except: logger.exception('Error deleting %s instance: %s', sender, instance) def process_request(self, request): # This is really just here so Django keeps the middleware installed. pass
from .utils import index, delete from django.db import models import logging logger = logging.getLogger(__name__) class ModelIndexingMiddleware (object): """ Middleware class that automatically indexes any new or deleted model objects. """ def __init__(self): - models.signals.post_save.connect(self.handle_save, dispatch_uid='handle_save') + models.signals.post_save.connect(self.handle_save, dispatch_uid='seeker.middleware.ModelIndexingMiddleware.handle_save') ? ++++++++++++++++++++++++++++++++++++++++++ - models.signals.post_delete.connect(self.handle_delete, dispatch_uid='handle_delete') + models.signals.post_delete.connect(self.handle_delete, dispatch_uid='seeker.middleware.ModelIndexingMiddleware.handle_delete') ? ++++++++++++++++++++++++++++++++++++++++++ def handle_save(self, sender, instance, **kwargs): try: index(instance) except: logger.exception('Error indexing %s instance: %s', sender, instance) def handle_delete(self, sender, instance, **kwargs): try: delete(instance) except: logger.exception('Error deleting %s instance: %s', sender, instance) def process_request(self, request): # This is really just here so Django keeps the middleware installed. pass
e39bcde813d35c8079743fbed7e77f2c8e4b4596
examples/mainwindow.py
examples/mainwindow.py
import sys from os.path import join, dirname, abspath from qtpy import uic from qtpy.QtCore import Slot from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox import qtmodern.styles import qtmodern.windows _UI = join(dirname(abspath(__file__)), 'mainwindow.ui') class MainWindow(QMainWindow): def __init__(self): QMainWindow.__init__(self) uic.loadUi(_UI, self) @Slot() def on_pushButton_clicked(self): self.close() @Slot() def closeEvent(self, event): reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?') if reply == QMessageBox.Yes: event.accept() else: event.ignore() if __name__ == '__main__': app = QApplication(sys.argv) qtmodern.styles.dark(app) mw = qtmodern.windows.ModernWindow(MainWindow()) mw.show() sys.exit(app.exec_())
import sys from os.path import join, dirname, abspath from qtpy import uic from qtpy.QtCore import Slot from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox import qtmodern.styles import qtmodern.windows _UI = join(dirname(abspath(__file__)), 'mainwindow.ui') class MainWindow(QMainWindow): def __init__(self): QMainWindow.__init__(self) self.ui = uic.loadUi(_UI, self) self.ui.actionLight.triggered.connect(self.lightTheme) self.ui.actionDark.triggered.connect(self.darkTheme) def lightTheme(self): qtmodern.styles.light(QApplication.instance()) def darkTheme(self): qtmodern.styles.dark(QApplication.instance()) @Slot() def on_pushButton_clicked(self): self.close() @Slot() def closeEvent(self, event): reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?') if reply == QMessageBox.Yes: event.accept() else: event.ignore() if __name__ == '__main__': app = QApplication(sys.argv) qtmodern.styles.dark(app) mw = qtmodern.windows.ModernWindow(MainWindow()) mw.show() sys.exit(app.exec_())
Update example to switch between light and dark themes
Update example to switch between light and dark themes
Python
mit
gmarull/qtmodern
import sys from os.path import join, dirname, abspath from qtpy import uic from qtpy.QtCore import Slot from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox import qtmodern.styles import qtmodern.windows _UI = join(dirname(abspath(__file__)), 'mainwindow.ui') class MainWindow(QMainWindow): def __init__(self): QMainWindow.__init__(self) - uic.loadUi(_UI, self) + self.ui = uic.loadUi(_UI, self) + + self.ui.actionLight.triggered.connect(self.lightTheme) + self.ui.actionDark.triggered.connect(self.darkTheme) + + def lightTheme(self): + qtmodern.styles.light(QApplication.instance()) + + def darkTheme(self): + qtmodern.styles.dark(QApplication.instance()) @Slot() def on_pushButton_clicked(self): self.close() @Slot() def closeEvent(self, event): reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?') if reply == QMessageBox.Yes: event.accept() else: event.ignore() if __name__ == '__main__': app = QApplication(sys.argv) qtmodern.styles.dark(app) mw = qtmodern.windows.ModernWindow(MainWindow()) mw.show() sys.exit(app.exec_())
Update example to switch between light and dark themes
## Code Before: import sys from os.path import join, dirname, abspath from qtpy import uic from qtpy.QtCore import Slot from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox import qtmodern.styles import qtmodern.windows _UI = join(dirname(abspath(__file__)), 'mainwindow.ui') class MainWindow(QMainWindow): def __init__(self): QMainWindow.__init__(self) uic.loadUi(_UI, self) @Slot() def on_pushButton_clicked(self): self.close() @Slot() def closeEvent(self, event): reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?') if reply == QMessageBox.Yes: event.accept() else: event.ignore() if __name__ == '__main__': app = QApplication(sys.argv) qtmodern.styles.dark(app) mw = qtmodern.windows.ModernWindow(MainWindow()) mw.show() sys.exit(app.exec_()) ## Instruction: Update example to switch between light and dark themes ## Code After: import sys from os.path import join, dirname, abspath from qtpy import uic from qtpy.QtCore import Slot from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox import qtmodern.styles import qtmodern.windows _UI = join(dirname(abspath(__file__)), 'mainwindow.ui') class MainWindow(QMainWindow): def __init__(self): QMainWindow.__init__(self) self.ui = uic.loadUi(_UI, self) self.ui.actionLight.triggered.connect(self.lightTheme) self.ui.actionDark.triggered.connect(self.darkTheme) def lightTheme(self): qtmodern.styles.light(QApplication.instance()) def darkTheme(self): qtmodern.styles.dark(QApplication.instance()) @Slot() def on_pushButton_clicked(self): self.close() @Slot() def closeEvent(self, event): reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?') if reply == QMessageBox.Yes: event.accept() else: event.ignore() if __name__ == '__main__': app = QApplication(sys.argv) qtmodern.styles.dark(app) mw = qtmodern.windows.ModernWindow(MainWindow()) mw.show() sys.exit(app.exec_())
import sys from os.path import join, dirname, abspath from qtpy import uic from qtpy.QtCore import Slot from qtpy.QtWidgets import QApplication, QMainWindow, QMessageBox import qtmodern.styles import qtmodern.windows _UI = join(dirname(abspath(__file__)), 'mainwindow.ui') class MainWindow(QMainWindow): def __init__(self): QMainWindow.__init__(self) - uic.loadUi(_UI, self) + self.ui = uic.loadUi(_UI, self) ? ++++++++++ + + self.ui.actionLight.triggered.connect(self.lightTheme) + self.ui.actionDark.triggered.connect(self.darkTheme) + + def lightTheme(self): + qtmodern.styles.light(QApplication.instance()) + + def darkTheme(self): + qtmodern.styles.dark(QApplication.instance()) @Slot() def on_pushButton_clicked(self): self.close() @Slot() def closeEvent(self, event): reply = QMessageBox.question(self, 'Exit', 'Do you want to exit?') if reply == QMessageBox.Yes: event.accept() else: event.ignore() if __name__ == '__main__': app = QApplication(sys.argv) qtmodern.styles.dark(app) mw = qtmodern.windows.ModernWindow(MainWindow()) mw.show() sys.exit(app.exec_())
f468ea8123768a3f66621bfecae20814fa83017b
website_sale_clear_line/controllers/main.py
website_sale_clear_line/controllers/main.py
from openerp.http import request from openerp import http class pos_website_sale(http.Controller): @http.route( ['/shop/clear_cart_line'], type='json', auth="public", website=True) def clear_cart_line(self, line_id, **kw): cr, uid, context, pool = ( request.cr, request.uid, request.context, request.registry) pool['sale.order.line'].unlink( cr, uid, line_id, context=context)
from openerp.http import request from openerp import http, SUPERUSER_ID class pos_website_sale(http.Controller): @http.route( ['/shop/clear_cart_line'], type='json', auth="public", website=True) def clear_cart_line(self, line_id, **kw): cr, context, pool = ( request.cr, request.context, request.registry) pool['sale.order.line'].unlink( cr, SUPERUSER_ID, line_id, context=context)
FIX website sale clear line
FIX website sale clear line
Python
agpl-3.0
ingadhoc/website
from openerp.http import request - from openerp import http + from openerp import http, SUPERUSER_ID class pos_website_sale(http.Controller): @http.route( ['/shop/clear_cart_line'], type='json', auth="public", website=True) def clear_cart_line(self, line_id, **kw): - cr, uid, context, pool = ( + cr, context, pool = ( - request.cr, request.uid, request.context, request.registry) + request.cr, request.context, request.registry) pool['sale.order.line'].unlink( - cr, uid, line_id, context=context) + cr, SUPERUSER_ID, line_id, context=context)
FIX website sale clear line
## Code Before: from openerp.http import request from openerp import http class pos_website_sale(http.Controller): @http.route( ['/shop/clear_cart_line'], type='json', auth="public", website=True) def clear_cart_line(self, line_id, **kw): cr, uid, context, pool = ( request.cr, request.uid, request.context, request.registry) pool['sale.order.line'].unlink( cr, uid, line_id, context=context) ## Instruction: FIX website sale clear line ## Code After: from openerp.http import request from openerp import http, SUPERUSER_ID class pos_website_sale(http.Controller): @http.route( ['/shop/clear_cart_line'], type='json', auth="public", website=True) def clear_cart_line(self, line_id, **kw): cr, context, pool = ( request.cr, request.context, request.registry) pool['sale.order.line'].unlink( cr, SUPERUSER_ID, line_id, context=context)
from openerp.http import request - from openerp import http + from openerp import http, SUPERUSER_ID ? ++++++++++++++ class pos_website_sale(http.Controller): @http.route( ['/shop/clear_cart_line'], type='json', auth="public", website=True) def clear_cart_line(self, line_id, **kw): - cr, uid, context, pool = ( ? ----- + cr, context, pool = ( - request.cr, request.uid, request.context, request.registry) ? ------------- + request.cr, request.context, request.registry) pool['sale.order.line'].unlink( - cr, uid, line_id, context=context) ? ^^^ + cr, SUPERUSER_ID, line_id, context=context) ? ^^^^^^^^^^^^
8a2fb9001581f66babf59b062af266a1c332f175
debacl/__init__.py
debacl/__init__.py
import level_set_tree import utils
from level_set_tree import construct_tree from level_set_tree import construct_tree_from_graph from level_set_tree import load_tree from level_set_tree import LevelSetTree
Add tree constructors and LevelSetTree to the debacl namespace.
Add tree constructors and LevelSetTree to the debacl namespace.
Python
bsd-3-clause
CoAxLab/DeBaCl
- import level_set_tree - import utils + from level_set_tree import construct_tree + from level_set_tree import construct_tree_from_graph + from level_set_tree import load_tree + from level_set_tree import LevelSetTree +
Add tree constructors and LevelSetTree to the debacl namespace.
## Code Before: import level_set_tree import utils ## Instruction: Add tree constructors and LevelSetTree to the debacl namespace. ## Code After: from level_set_tree import construct_tree from level_set_tree import construct_tree_from_graph from level_set_tree import load_tree from level_set_tree import LevelSetTree
- import level_set_tree - import utils + from level_set_tree import construct_tree + from level_set_tree import construct_tree_from_graph + from level_set_tree import load_tree + + from level_set_tree import LevelSetTree
73c84754699a6f0803d0ceb3081988b45c9c76e7
contours/__init__.py
contours/__init__.py
"""Contour calculations.""" # Python 2 support # pylint: disable=redefined-builtin,unused-wildcard-import,wildcard-import from __future__ import (absolute_import, division, print_function, unicode_literals) from builtins import * from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0'
"""Contour calculations.""" # Python 2 support from __future__ import absolute_import from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0'
Remove unneeded Python 2.7 compatibility imports.
Remove unneeded Python 2.7 compatibility imports.
Python
mit
ccarocean/python-contours
"""Contour calculations.""" # Python 2 support - # pylint: disable=redefined-builtin,unused-wildcard-import,wildcard-import - from __future__ import (absolute_import, division, + from __future__ import absolute_import - print_function, unicode_literals) - from builtins import * from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0'
Remove unneeded Python 2.7 compatibility imports.
## Code Before: """Contour calculations.""" # Python 2 support # pylint: disable=redefined-builtin,unused-wildcard-import,wildcard-import from __future__ import (absolute_import, division, print_function, unicode_literals) from builtins import * from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0' ## Instruction: Remove unneeded Python 2.7 compatibility imports. ## Code After: """Contour calculations.""" # Python 2 support from __future__ import absolute_import from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0'
"""Contour calculations.""" # Python 2 support - # pylint: disable=redefined-builtin,unused-wildcard-import,wildcard-import - from __future__ import (absolute_import, division, ? - ----------- + from __future__ import absolute_import - print_function, unicode_literals) - from builtins import * from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0'
62494cd7125d498d8de058ab3ebe556cd9686f6e
calvin/runtime/north/plugins/coders/messages/msgpack_coder.py
calvin/runtime/north/plugins/coders/messages/msgpack_coder.py
import msgpack from message_coder import MessageCoderBase # set of functions to encode/decode data tokens to/from a json description class MessageCoder(MessageCoderBase): def encode(self, data): return msgpack.packb(data) def decode(self, data): data = msgpack.unpackb(data) return data
import umsgpack from message_coder import MessageCoderBase umsgpack.compatibility = True # set of functions to encode/decode data tokens to/from a json description class MessageCoder(MessageCoderBase): def encode(self, data): return umsgpack.packb(data) def decode(self, data): data = umsgpack.unpackb(data) return data
Use umsgpack package for msgpack coder
coder/msgpack: Use umsgpack package for msgpack coder
Python
apache-2.0
EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base
- import msgpack + import umsgpack from message_coder import MessageCoderBase + + umsgpack.compatibility = True # set of functions to encode/decode data tokens to/from a json description class MessageCoder(MessageCoderBase): def encode(self, data): - return msgpack.packb(data) + return umsgpack.packb(data) def decode(self, data): - data = msgpack.unpackb(data) + data = umsgpack.unpackb(data) return data
Use umsgpack package for msgpack coder
## Code Before: import msgpack from message_coder import MessageCoderBase # set of functions to encode/decode data tokens to/from a json description class MessageCoder(MessageCoderBase): def encode(self, data): return msgpack.packb(data) def decode(self, data): data = msgpack.unpackb(data) return data ## Instruction: Use umsgpack package for msgpack coder ## Code After: import umsgpack from message_coder import MessageCoderBase umsgpack.compatibility = True # set of functions to encode/decode data tokens to/from a json description class MessageCoder(MessageCoderBase): def encode(self, data): return umsgpack.packb(data) def decode(self, data): data = umsgpack.unpackb(data) return data
- import msgpack + import umsgpack ? + from message_coder import MessageCoderBase + + umsgpack.compatibility = True # set of functions to encode/decode data tokens to/from a json description class MessageCoder(MessageCoderBase): def encode(self, data): - return msgpack.packb(data) + return umsgpack.packb(data) ? + def decode(self, data): - data = msgpack.unpackb(data) + data = umsgpack.unpackb(data) ? + return data
d16988174f5570334b6b3986dbd0b35148566a62
opps/flatpages/models.py
opps/flatpages/models.py
from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass
from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) short_url = models.URLField( _("Short URL"), null=True, blank=False, ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) def get_absolute_url(self): return "/page/{0}".format(self.slug) get_absolute_url.short_description = 'URL' def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass
Add field short_url on flatpages model
Add field short_url on flatpages model
Python
mit
opps/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,williamroot/opps,opps/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,opps/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,opps/opps
from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) + short_url = models.URLField( + _("Short URL"), + null=True, blank=False, + ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) + + def get_absolute_url(self): + return "/page/{0}".format(self.slug) + get_absolute_url.short_description = 'URL' def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass
Add field short_url on flatpages model
## Code Before: from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass ## Instruction: Add field short_url on flatpages model ## Code After: from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) short_url = models.URLField( _("Short URL"), null=True, blank=False, ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) def get_absolute_url(self): return "/page/{0}".format(self.slug) get_absolute_url.short_description = 'URL' def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass
from django.db import models from django.utils.translation import ugettext_lazy as _ from googl.short import GooglUrlShort from opps.core.models import Publishable, BaseConfig class FlatPage(Publishable): title = models.CharField(_(u"Title"), max_length=140, db_index=True) headline = models.TextField(_(u"Headline"), blank=True, null=True) slug = models.SlugField( _(u"URL"), db_index=True, max_length=150, unique=True, ) + short_url = models.URLField( + _("Short URL"), + null=True, blank=False, + ) show_in_menu = models.BooleanField(_(u"Show in menu?"), default=False) main_image = models.ForeignKey( 'images.Image', null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_(u'Main Image'), ) content = models.TextField(_(u"Content")) order = models.IntegerField(_(u"Order"), default=0) + def get_absolute_url(self): + return "/page/{0}".format(self.slug) + get_absolute_url.short_description = 'URL' + def __unicode__(self): return u"{0} - {1}".format(self.site.name, self.slug) class FlatPageConfig(BaseConfig): """ Default implementation """ pass
c656ac5231d85e5f6a07688d3d7c2f4f07b3e154
backstage/settings/development.py
backstage/settings/development.py
import os import dj_database_url from .base import * DATABASE_URL = os.environ.get('DATABASE_URL') DATABASES = { 'default': dj_database_url.config() } STATIC_ROOT = 'static_sources' STATIC_URL = '/static/' STATICFILES_DIRS = ( os.path.join(BASE_DIR, 'static'), )
import os import dj_database_url from .base import * DATABASE_URL = os.environ.get('DATABASE_URL') DATABASES = { 'default': dj_database_url.config() } # Media handling MEDIA_ROOT = BASE_DIR.child("media") # Static file handling STATIC_ROOT = 'static_sources' STATIC_URL = '/static/' STATICFILES_DIRS = ( os.path.join(BASE_DIR, 'static'), )
Add MEDIA_ROOT to dev settings
Add MEDIA_ROOT to dev settings
Python
mit
mhotwagner/backstage,mhotwagner/backstage,mhotwagner/backstage
import os import dj_database_url from .base import * DATABASE_URL = os.environ.get('DATABASE_URL') DATABASES = { 'default': dj_database_url.config() } + # Media handling + MEDIA_ROOT = BASE_DIR.child("media") + + # Static file handling STATIC_ROOT = 'static_sources' STATIC_URL = '/static/' STATICFILES_DIRS = ( os.path.join(BASE_DIR, 'static'), )
Add MEDIA_ROOT to dev settings
## Code Before: import os import dj_database_url from .base import * DATABASE_URL = os.environ.get('DATABASE_URL') DATABASES = { 'default': dj_database_url.config() } STATIC_ROOT = 'static_sources' STATIC_URL = '/static/' STATICFILES_DIRS = ( os.path.join(BASE_DIR, 'static'), ) ## Instruction: Add MEDIA_ROOT to dev settings ## Code After: import os import dj_database_url from .base import * DATABASE_URL = os.environ.get('DATABASE_URL') DATABASES = { 'default': dj_database_url.config() } # Media handling MEDIA_ROOT = BASE_DIR.child("media") # Static file handling STATIC_ROOT = 'static_sources' STATIC_URL = '/static/' STATICFILES_DIRS = ( os.path.join(BASE_DIR, 'static'), )
import os import dj_database_url from .base import * DATABASE_URL = os.environ.get('DATABASE_URL') DATABASES = { 'default': dj_database_url.config() } + # Media handling + MEDIA_ROOT = BASE_DIR.child("media") + + # Static file handling STATIC_ROOT = 'static_sources' STATIC_URL = '/static/' STATICFILES_DIRS = ( os.path.join(BASE_DIR, 'static'), )
8541ec09e237f1401095d31177bdde9ac1adaa39
util/linkJS.py
util/linkJS.py
import os def linkJS(target_fn, file_list_fn, source_base, prologue="", module_dirs=[]): with open(target_fn, "wb") as target: target.write(prologue) # Add files listed in file_list_fn with open(file_list_fn) as file_list: for source_fn in file_list: source_fn = source_fn.replace("/", os.path.sep).strip() if len(source_fn) > 0 and source_fn[0] != "#": addContents(source_base, source_fn, target) # Add all *.js files in module_dirs for module_base in module_dirs: for module_fn in os.listdir(module_base): if module_fn.endswith(".js"): addContents(module_base, module_fn, target) def addContents(source_base, source_fn, target): target.write("\n\n// " + source_fn + "\n\n") with open(os.path.join(source_base, source_fn)) as source: for line in source: target.write(line)
import os def linkJS(target_fn, file_list_fn, source_base, prologue="", module_dirs=[]): with open(target_fn, "wb") as target: target.write(prologue) # Add files listed in file_list_fn with open(file_list_fn) as file_list: for source_fn in file_list: source_fn = source_fn.replace("/", os.path.sep).strip() if len(source_fn) > 0 and source_fn[0] != "#": addContents(os.path.join(source_base, source_fn), target) # Add all *.js files in module_dirs for module_base in module_dirs: for module_fn in os.listdir(module_base): if module_fn.endswith(".js"): addContents(os.path.join(module_base, module_fn), target) def addContents(source_fn, target): target.write("\n\n// " + source_fn + "\n\n") with open(source_fn) as source: for line in source: target.write(line)
Include full path to original files
Include full path to original files
Python
mpl-2.0
MozillaSecurity/funfuzz,nth10sd/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz
import os def linkJS(target_fn, file_list_fn, source_base, prologue="", module_dirs=[]): with open(target_fn, "wb") as target: target.write(prologue) # Add files listed in file_list_fn with open(file_list_fn) as file_list: for source_fn in file_list: source_fn = source_fn.replace("/", os.path.sep).strip() if len(source_fn) > 0 and source_fn[0] != "#": - addContents(source_base, source_fn, target) + addContents(os.path.join(source_base, source_fn), target) # Add all *.js files in module_dirs for module_base in module_dirs: for module_fn in os.listdir(module_base): if module_fn.endswith(".js"): - addContents(module_base, module_fn, target) + addContents(os.path.join(module_base, module_fn), target) - def addContents(source_base, source_fn, target): + def addContents(source_fn, target): target.write("\n\n// " + source_fn + "\n\n") - with open(os.path.join(source_base, source_fn)) as source: + with open(source_fn) as source: for line in source: target.write(line)
Include full path to original files
## Code Before: import os def linkJS(target_fn, file_list_fn, source_base, prologue="", module_dirs=[]): with open(target_fn, "wb") as target: target.write(prologue) # Add files listed in file_list_fn with open(file_list_fn) as file_list: for source_fn in file_list: source_fn = source_fn.replace("/", os.path.sep).strip() if len(source_fn) > 0 and source_fn[0] != "#": addContents(source_base, source_fn, target) # Add all *.js files in module_dirs for module_base in module_dirs: for module_fn in os.listdir(module_base): if module_fn.endswith(".js"): addContents(module_base, module_fn, target) def addContents(source_base, source_fn, target): target.write("\n\n// " + source_fn + "\n\n") with open(os.path.join(source_base, source_fn)) as source: for line in source: target.write(line) ## Instruction: Include full path to original files ## Code After: import os def linkJS(target_fn, file_list_fn, source_base, prologue="", module_dirs=[]): with open(target_fn, "wb") as target: target.write(prologue) # Add files listed in file_list_fn with open(file_list_fn) as file_list: for source_fn in file_list: source_fn = source_fn.replace("/", os.path.sep).strip() if len(source_fn) > 0 and source_fn[0] != "#": addContents(os.path.join(source_base, source_fn), target) # Add all *.js files in module_dirs for module_base in module_dirs: for module_fn in os.listdir(module_base): if module_fn.endswith(".js"): addContents(os.path.join(module_base, module_fn), target) def addContents(source_fn, target): target.write("\n\n// " + source_fn + "\n\n") with open(source_fn) as source: for line in source: target.write(line)
import os def linkJS(target_fn, file_list_fn, source_base, prologue="", module_dirs=[]): with open(target_fn, "wb") as target: target.write(prologue) # Add files listed in file_list_fn with open(file_list_fn) as file_list: for source_fn in file_list: source_fn = source_fn.replace("/", os.path.sep).strip() if len(source_fn) > 0 and source_fn[0] != "#": - addContents(source_base, source_fn, target) + addContents(os.path.join(source_base, source_fn), target) ? +++++++++++++ + # Add all *.js files in module_dirs for module_base in module_dirs: for module_fn in os.listdir(module_base): if module_fn.endswith(".js"): - addContents(module_base, module_fn, target) + addContents(os.path.join(module_base, module_fn), target) ? +++++++++++++ + - def addContents(source_base, source_fn, target): ? ------------- + def addContents(source_fn, target): target.write("\n\n// " + source_fn + "\n\n") - with open(os.path.join(source_base, source_fn)) as source: + with open(source_fn) as source: for line in source: target.write(line)
4ccc5ea6cf25adb029f5e08cc0675e2b8415abdf
LayerView.py
LayerView.py
from UM.View.View import View from UM.View.Renderer import Renderer from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator from UM.Resources import Resources class LayerView(View): def __init__(self): super().__init__() self._material = None def beginRendering(self): scene = self.getController().getScene() renderer = self.getRenderer() if not self._material: self._material = renderer.createMaterial(Resources.getPath(Resources.ShadersLocation, 'basic.vert'), Resources.getPath(Resources.ShadersLocation, 'color.frag')) self._material.setUniformValue("u_color", [1.0, 0.0, 0.0, 1.0]) for node in DepthFirstIterator(scene.getRoot()): if not node.render(renderer): if node.getMeshData() and node.isVisible(): try: layerData = node.getMeshData().layerData except AttributeError: continue renderer.queueNode(node, mesh = layerData, material = self._material, mode = Renderer.RenderLineLoop) def endRendering(self): pass
from UM.View.View import View from UM.View.Renderer import Renderer from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator from UM.Resources import Resources class LayerView(View): def __init__(self): super().__init__() self._material = None def beginRendering(self): scene = self.getController().getScene() renderer = self.getRenderer() if not self._material: self._material = renderer.createMaterial(Resources.getPath(Resources.ShadersLocation, 'basic.vert'), Resources.getPath(Resources.ShadersLocation, 'vertexcolor.frag')) self._material.setUniformValue("u_color", [1.0, 0.0, 0.0, 1.0]) for node in DepthFirstIterator(scene.getRoot()): if not node.render(renderer): if node.getMeshData() and node.isVisible(): try: layerData = node.getMeshData().layerData except AttributeError: continue renderer.queueNode(node, mesh = layerData, material = self._material, mode = Renderer.RenderLines) def endRendering(self): pass
Support colours for rendering the layer view
Support colours for rendering the layer view
Python
agpl-3.0
markwal/Cura,DeskboxBrazil/Cura,ad1217/Cura,Curahelper/Cura,senttech/Cura,derekhe/Cura,fxtentacle/Cura,ynotstartups/Wanhao,ad1217/Cura,markwal/Cura,fxtentacle/Cura,Curahelper/Cura,quillford/Cura,hmflash/Cura,ynotstartups/Wanhao,fieldOfView/Cura,hmflash/Cura,totalretribution/Cura,quillford/Cura,lo0ol/Ultimaker-Cura,lo0ol/Ultimaker-Cura,senttech/Cura,fieldOfView/Cura,bq/Ultimaker-Cura,derekhe/Cura,DeskboxBrazil/Cura,bq/Ultimaker-Cura,totalretribution/Cura
from UM.View.View import View from UM.View.Renderer import Renderer from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator from UM.Resources import Resources class LayerView(View): def __init__(self): super().__init__() self._material = None def beginRendering(self): scene = self.getController().getScene() renderer = self.getRenderer() if not self._material: - self._material = renderer.createMaterial(Resources.getPath(Resources.ShadersLocation, 'basic.vert'), Resources.getPath(Resources.ShadersLocation, 'color.frag')) + self._material = renderer.createMaterial(Resources.getPath(Resources.ShadersLocation, 'basic.vert'), Resources.getPath(Resources.ShadersLocation, 'vertexcolor.frag')) self._material.setUniformValue("u_color", [1.0, 0.0, 0.0, 1.0]) for node in DepthFirstIterator(scene.getRoot()): if not node.render(renderer): if node.getMeshData() and node.isVisible(): try: layerData = node.getMeshData().layerData except AttributeError: continue - renderer.queueNode(node, mesh = layerData, material = self._material, mode = Renderer.RenderLineLoop) + renderer.queueNode(node, mesh = layerData, material = self._material, mode = Renderer.RenderLines) def endRendering(self): pass
Support colours for rendering the layer view
## Code Before: from UM.View.View import View from UM.View.Renderer import Renderer from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator from UM.Resources import Resources class LayerView(View): def __init__(self): super().__init__() self._material = None def beginRendering(self): scene = self.getController().getScene() renderer = self.getRenderer() if not self._material: self._material = renderer.createMaterial(Resources.getPath(Resources.ShadersLocation, 'basic.vert'), Resources.getPath(Resources.ShadersLocation, 'color.frag')) self._material.setUniformValue("u_color", [1.0, 0.0, 0.0, 1.0]) for node in DepthFirstIterator(scene.getRoot()): if not node.render(renderer): if node.getMeshData() and node.isVisible(): try: layerData = node.getMeshData().layerData except AttributeError: continue renderer.queueNode(node, mesh = layerData, material = self._material, mode = Renderer.RenderLineLoop) def endRendering(self): pass ## Instruction: Support colours for rendering the layer view ## Code After: from UM.View.View import View from UM.View.Renderer import Renderer from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator from UM.Resources import Resources class LayerView(View): def __init__(self): super().__init__() self._material = None def beginRendering(self): scene = self.getController().getScene() renderer = self.getRenderer() if not self._material: self._material = renderer.createMaterial(Resources.getPath(Resources.ShadersLocation, 'basic.vert'), Resources.getPath(Resources.ShadersLocation, 'vertexcolor.frag')) self._material.setUniformValue("u_color", [1.0, 0.0, 0.0, 1.0]) for node in DepthFirstIterator(scene.getRoot()): if not node.render(renderer): if node.getMeshData() and node.isVisible(): try: layerData = node.getMeshData().layerData except AttributeError: continue renderer.queueNode(node, mesh = layerData, material = self._material, mode = Renderer.RenderLines) def endRendering(self): pass
from UM.View.View import View from UM.View.Renderer import Renderer from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator from UM.Resources import Resources class LayerView(View): def __init__(self): super().__init__() self._material = None def beginRendering(self): scene = self.getController().getScene() renderer = self.getRenderer() if not self._material: - self._material = renderer.createMaterial(Resources.getPath(Resources.ShadersLocation, 'basic.vert'), Resources.getPath(Resources.ShadersLocation, 'color.frag')) + self._material = renderer.createMaterial(Resources.getPath(Resources.ShadersLocation, 'basic.vert'), Resources.getPath(Resources.ShadersLocation, 'vertexcolor.frag')) ? ++++++ self._material.setUniformValue("u_color", [1.0, 0.0, 0.0, 1.0]) for node in DepthFirstIterator(scene.getRoot()): if not node.render(renderer): if node.getMeshData() and node.isVisible(): try: layerData = node.getMeshData().layerData except AttributeError: continue - renderer.queueNode(node, mesh = layerData, material = self._material, mode = Renderer.RenderLineLoop) ? ^^^^ + renderer.queueNode(node, mesh = layerData, material = self._material, mode = Renderer.RenderLines) ? ^ def endRendering(self): pass
dbbd6e1e87964db6b2279a661a63751da31213e5
millipede.py
millipede.py
class millipede: def __init__(self, size, comment=None): self._millipede = "" if comment: self._millipede = comment + "\n\n" self._millipede += " ╚⊙ ⊙╝ \n" padding = 2 direction = -1 while (size): for i in range(0, padding): self._millipede += " " self._millipede += "╚═(███)═╝\n" padding += direction if padding == 0: direction = 1 elif padding == 4: padding = 3 direction = -1 size -= 1 def __str__(self): return self._millipede if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='Millipede generator') parser.add_argument('size', metavar='s', type=int, help='the size of the millipede') parser.add_argument('comment', metavar='c', type=str, help='the comment', nargs="?") args = parser.parse_args() print(millipede(args.size, args.comment))
class millipede: def __init__(self, size, comment=None, reverse=False): self._padding_offsets = [2, 1, 0, 1, 2, 3, 4, 4, 3] head = " ╔⊙ ⊙╗\n" if reverse else " ╚⊙ ⊙╝\n" body = "".join([ "{}{}\n".format( " " * self._padding_offsets[(x + 3) % 9 if reverse else x % 9], "╔═(███)═╗" if reverse else "╚═(███)═╝" ) for x in range(size) ]) self._millipede = "" if reverse: self._millipede += body + head if comment: self._millipede += "\n" + comment else: if comment: self._millipede += comment + "\n\n" self._millipede += head + body def __str__(self): return self._millipede if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='Millipede generator') parser.add_argument('size', metavar='s', type=int, help='the size of the millipede') parser.add_argument('comment', metavar='c', type=str, help='the comment', nargs="?") parser.add_argument('-r', '--reverse', action='store_true', help='reverse the millipede') args = parser.parse_args() print(millipede(args.size, comment=args.comment, reverse=args.reverse))
Rewrite body generation and add reverse option
Rewrite body generation and add reverse option
Python
bsd-3-clause
evadot/millipede-python,getmillipede/millipede-python,moul/millipede-python,EasonYi/millipede-python,EasonYi/millipede-python,evadot/millipede-python,moul/millipede-python,getmillipede/millipede-python
class millipede: + - def __init__(self, size, comment=None): + def __init__(self, size, comment=None, reverse=False): + self._padding_offsets = [2, 1, 0, 1, 2, 3, 4, 4, 3] + + head = " ╔⊙ ⊙╗\n" if reverse else " ╚⊙ ⊙╝\n" + body = "".join([ + "{}{}\n".format( + " " * self._padding_offsets[(x + 3) % 9 if reverse else x % 9], + "╔═(███)═╗" if reverse else "╚═(███)═╝" + ) + for x in range(size) + ]) + self._millipede = "" + if reverse: + self._millipede += body + head - if comment: + if comment: + self._millipede += "\n" + comment + else: + if comment: - self._millipede = comment + "\n\n" + self._millipede += comment + "\n\n" - - self._millipede += " ╚⊙ ⊙╝ \n" - padding = 2 - direction = -1 - while (size): - for i in range(0, padding): - self._millipede += " " + self._millipede += head + body - self._millipede += "╚═(███)═╝\n" - padding += direction - - if padding == 0: - direction = 1 - elif padding == 4: - padding = 3 - direction = -1 - size -= 1 def __str__(self): return self._millipede if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='Millipede generator') parser.add_argument('size', metavar='s', type=int, help='the size of the millipede') parser.add_argument('comment', metavar='c', type=str, help='the comment', nargs="?") + parser.add_argument('-r', '--reverse', action='store_true', help='reverse the millipede') args = parser.parse_args() - print(millipede(args.size, args.comment)) + print(millipede(args.size, comment=args.comment, reverse=args.reverse))
Rewrite body generation and add reverse option
## Code Before: class millipede: def __init__(self, size, comment=None): self._millipede = "" if comment: self._millipede = comment + "\n\n" self._millipede += " ╚⊙ ⊙╝ \n" padding = 2 direction = -1 while (size): for i in range(0, padding): self._millipede += " " self._millipede += "╚═(███)═╝\n" padding += direction if padding == 0: direction = 1 elif padding == 4: padding = 3 direction = -1 size -= 1 def __str__(self): return self._millipede if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='Millipede generator') parser.add_argument('size', metavar='s', type=int, help='the size of the millipede') parser.add_argument('comment', metavar='c', type=str, help='the comment', nargs="?") args = parser.parse_args() print(millipede(args.size, args.comment)) ## Instruction: Rewrite body generation and add reverse option ## Code After: class millipede: def __init__(self, size, comment=None, reverse=False): self._padding_offsets = [2, 1, 0, 1, 2, 3, 4, 4, 3] head = " ╔⊙ ⊙╗\n" if reverse else " ╚⊙ ⊙╝\n" body = "".join([ "{}{}\n".format( " " * self._padding_offsets[(x + 3) % 9 if reverse else x % 9], "╔═(███)═╗" if reverse else "╚═(███)═╝" ) for x in range(size) ]) self._millipede = "" if reverse: self._millipede += body + head if comment: self._millipede += "\n" + comment else: if comment: self._millipede += comment + "\n\n" self._millipede += head + body def __str__(self): return self._millipede if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='Millipede generator') parser.add_argument('size', metavar='s', type=int, help='the size of the millipede') parser.add_argument('comment', metavar='c', type=str, help='the comment', nargs="?") parser.add_argument('-r', '--reverse', action='store_true', help='reverse the millipede') args = parser.parse_args() print(millipede(args.size, comment=args.comment, reverse=args.reverse))
class millipede: + - def __init__(self, size, comment=None): + def __init__(self, size, comment=None, reverse=False): ? +++++++++++++++ + self._padding_offsets = [2, 1, 0, 1, 2, 3, 4, 4, 3] + + head = " ╔⊙ ⊙╗\n" if reverse else " ╚⊙ ⊙╝\n" + body = "".join([ + "{}{}\n".format( + " " * self._padding_offsets[(x + 3) % 9 if reverse else x % 9], + "╔═(███)═╗" if reverse else "╚═(███)═╝" + ) + for x in range(size) + ]) + self._millipede = "" + if reverse: + self._millipede += body + head - if comment: + if comment: ? ++++ + self._millipede += "\n" + comment + else: + if comment: - self._millipede = comment + "\n\n" + self._millipede += comment + "\n\n" ? ++++ + - - self._millipede += " ╚⊙ ⊙╝ \n" - padding = 2 - direction = -1 - while (size): - for i in range(0, padding): - self._millipede += " " ? ---- ^ ^ + self._millipede += head + body ? ^^^^ ^^^^^^ - self._millipede += "╚═(███)═╝\n" - padding += direction - - if padding == 0: - direction = 1 - elif padding == 4: - padding = 3 - direction = -1 - size -= 1 def __str__(self): return self._millipede if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='Millipede generator') parser.add_argument('size', metavar='s', type=int, help='the size of the millipede') parser.add_argument('comment', metavar='c', type=str, help='the comment', nargs="?") + parser.add_argument('-r', '--reverse', action='store_true', help='reverse the millipede') args = parser.parse_args() - print(millipede(args.size, args.comment)) + print(millipede(args.size, comment=args.comment, reverse=args.reverse)) ? ++++++++ ++++++++++++++++++++++
151c97a3a5cd0f9103c891ee9c60f3fe52fc3d12
test_suite.py
test_suite.py
import os os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' from django.core import management management.call_command('test', 'resources', 'forms', 'tokens')
import os import sys os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' from django.core import management apps = sys.argv[1:] if not apps: apps = [ 'resources', 'forms', 'tokens', ] management.call_command('test', *apps)
Allow apps to be specified from the command line
Allow apps to be specified from the command line
Python
bsd-2-clause
chop-dbhi/serrano,chop-dbhi/serrano,rv816/serrano_night,rv816/serrano_night
import os + import sys os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' from django.core import management - management.call_command('test', 'resources', 'forms', 'tokens') + apps = sys.argv[1:] + + if not apps: + apps = [ + 'resources', + 'forms', + 'tokens', + ] + + management.call_command('test', *apps) +
Allow apps to be specified from the command line
## Code Before: import os os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' from django.core import management management.call_command('test', 'resources', 'forms', 'tokens') ## Instruction: Allow apps to be specified from the command line ## Code After: import os import sys os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' from django.core import management apps = sys.argv[1:] if not apps: apps = [ 'resources', 'forms', 'tokens', ] management.call_command('test', *apps)
import os + import sys os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings' from django.core import management - management.call_command('test', 'resources', 'forms', 'tokens') + + apps = sys.argv[1:] + + if not apps: + apps = [ + 'resources', + 'forms', + 'tokens', + ] + + management.call_command('test', *apps)
89b9fb1cb14aeb99cb7c96717830898aead4fef1
src/waldur_core/core/management/commands/createstaffuser.py
src/waldur_core/core/management/commands/createstaffuser.py
from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand, CommandError from django.utils import timezone class Command(BaseCommand): help = "Create a user with a specified username and password. User will be created as staff." def add_arguments(self, parser): parser.add_argument('-u', '--username', dest='username', required=True) parser.add_argument('-p', '--password', dest='password', required=True) def handle(self, *args, **options): User = get_user_model() username = options['username'] password = options['password'] user, created = User.objects.get_or_create( username=username, defaults=dict(last_login=timezone.now(), is_staff=True) ) if not created: raise CommandError('Username %s is already taken.' % username) user.set_password(password) user.save() self.stdout.write(self.style.SUCCESS('User %s has been created.' % username))
from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand, CommandError from django.utils import timezone class Command(BaseCommand): help = "Create a user with a specified username and password. User will be created as staff." def add_arguments(self, parser): parser.add_argument('-u', '--username', dest='username', required=True) parser.add_argument('-p', '--password', dest='password', required=True) parser.add_argument('-e', '--email', dest='email', required=True) def handle(self, *args, **options): User = get_user_model() username = options['username'] password = options['password'] email = options['email'] user, created = User.objects.get_or_create( username=username, email=email, defaults=dict(last_login=timezone.now(), is_staff=True), ) if not created: raise CommandError('Username %s is already taken.' % username) user.set_password(password) user.save() self.stdout.write(self.style.SUCCESS('User %s has been created.' % username))
Allow setting email when creating a staff account.
Allow setting email when creating a staff account. Otherwise makes it hard to start using HomePort as it requires email validation.
Python
mit
opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind
from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand, CommandError from django.utils import timezone class Command(BaseCommand): help = "Create a user with a specified username and password. User will be created as staff." def add_arguments(self, parser): parser.add_argument('-u', '--username', dest='username', required=True) parser.add_argument('-p', '--password', dest='password', required=True) + parser.add_argument('-e', '--email', dest='email', required=True) def handle(self, *args, **options): User = get_user_model() username = options['username'] password = options['password'] + email = options['email'] user, created = User.objects.get_or_create( + username=username, + email=email, - username=username, defaults=dict(last_login=timezone.now(), is_staff=True) + defaults=dict(last_login=timezone.now(), is_staff=True), ) if not created: raise CommandError('Username %s is already taken.' % username) user.set_password(password) user.save() self.stdout.write(self.style.SUCCESS('User %s has been created.' % username))
Allow setting email when creating a staff account.
## Code Before: from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand, CommandError from django.utils import timezone class Command(BaseCommand): help = "Create a user with a specified username and password. User will be created as staff." def add_arguments(self, parser): parser.add_argument('-u', '--username', dest='username', required=True) parser.add_argument('-p', '--password', dest='password', required=True) def handle(self, *args, **options): User = get_user_model() username = options['username'] password = options['password'] user, created = User.objects.get_or_create( username=username, defaults=dict(last_login=timezone.now(), is_staff=True) ) if not created: raise CommandError('Username %s is already taken.' % username) user.set_password(password) user.save() self.stdout.write(self.style.SUCCESS('User %s has been created.' % username)) ## Instruction: Allow setting email when creating a staff account. ## Code After: from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand, CommandError from django.utils import timezone class Command(BaseCommand): help = "Create a user with a specified username and password. User will be created as staff." def add_arguments(self, parser): parser.add_argument('-u', '--username', dest='username', required=True) parser.add_argument('-p', '--password', dest='password', required=True) parser.add_argument('-e', '--email', dest='email', required=True) def handle(self, *args, **options): User = get_user_model() username = options['username'] password = options['password'] email = options['email'] user, created = User.objects.get_or_create( username=username, email=email, defaults=dict(last_login=timezone.now(), is_staff=True), ) if not created: raise CommandError('Username %s is already taken.' % username) user.set_password(password) user.save() self.stdout.write(self.style.SUCCESS('User %s has been created.' % username))
from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand, CommandError from django.utils import timezone class Command(BaseCommand): help = "Create a user with a specified username and password. User will be created as staff." def add_arguments(self, parser): parser.add_argument('-u', '--username', dest='username', required=True) parser.add_argument('-p', '--password', dest='password', required=True) + parser.add_argument('-e', '--email', dest='email', required=True) def handle(self, *args, **options): User = get_user_model() username = options['username'] password = options['password'] + email = options['email'] user, created = User.objects.get_or_create( + username=username, + email=email, - username=username, defaults=dict(last_login=timezone.now(), is_staff=True) ? ------------------- + defaults=dict(last_login=timezone.now(), is_staff=True), ? + ) if not created: raise CommandError('Username %s is already taken.' % username) user.set_password(password) user.save() self.stdout.write(self.style.SUCCESS('User %s has been created.' % username))
ed542ea8979882e7cc245aee7e3c4a6cb6235a5f
HARK/tests/test_validators.py
HARK/tests/test_validators.py
import unittest, sys from HARK.validators import non_empty class ValidatorsTests(unittest.TestCase): ''' Tests for validator decorators which validate function arguments ''' def test_non_empty(self): @non_empty('list_a') def foo(list_a, list_b): pass try: foo([1], []) except Exception: self.fail() if sys.version[0] == '2': with self.assertRaisesRegexp( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1]) else: with self.assertRaisesRegex( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1]) @non_empty('list_a', 'list_b') def foo(list_a, list_b): pass with self.assertRaisesRegex( TypeError, 'Expected non-empty argument for parameter list_b', ): foo([1], []) with self.assertRaisesRegex( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1])
import unittest, sys from HARK.validators import non_empty class ValidatorsTests(unittest.TestCase): ''' Tests for validator decorators which validate function arguments ''' def test_non_empty(self): @non_empty('list_a') def foo(list_a, list_b): pass try: foo([1], []) except Exception: self.fail() if sys.version[0] == '2': with self.assertRaisesRegexp( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1]) else: with self.assertRaisesRegex( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1]) @non_empty('list_a', 'list_b') def foo(list_a, list_b): pass if sys.version[0] == '2': with self.assertRaisesRegexp( TypeError, 'Expected non-empty argument for parameter list_b', ): foo([1], []) with self.assertRaisesRegexp( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1]) else: with self.assertRaisesRegex( TypeError, 'Expected non-empty argument for parameter list_b', ): foo([1], []) with self.assertRaisesRegex( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1])
Fix other tests with same regexp issue
Fix other tests with same regexp issue
Python
apache-2.0
econ-ark/HARK,econ-ark/HARK
import unittest, sys from HARK.validators import non_empty class ValidatorsTests(unittest.TestCase): ''' Tests for validator decorators which validate function arguments ''' def test_non_empty(self): @non_empty('list_a') def foo(list_a, list_b): pass try: foo([1], []) except Exception: self.fail() if sys.version[0] == '2': with self.assertRaisesRegexp( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1]) else: with self.assertRaisesRegex( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1]) @non_empty('list_a', 'list_b') def foo(list_a, list_b): pass + if sys.version[0] == '2': - with self.assertRaisesRegex( + with self.assertRaisesRegexp( - TypeError, + TypeError, - 'Expected non-empty argument for parameter list_b', + 'Expected non-empty argument for parameter list_b', - ): + ): - foo([1], []) + foo([1], []) - with self.assertRaisesRegex( + with self.assertRaisesRegexp( - TypeError, + TypeError, - 'Expected non-empty argument for parameter list_a', + 'Expected non-empty argument for parameter list_a', - ): + ): - foo([], [1]) + foo([], [1]) + else: + with self.assertRaisesRegex( + TypeError, + 'Expected non-empty argument for parameter list_b', + ): + foo([1], []) + with self.assertRaisesRegex( + TypeError, + 'Expected non-empty argument for parameter list_a', + ): + foo([], [1])
Fix other tests with same regexp issue
## Code Before: import unittest, sys from HARK.validators import non_empty class ValidatorsTests(unittest.TestCase): ''' Tests for validator decorators which validate function arguments ''' def test_non_empty(self): @non_empty('list_a') def foo(list_a, list_b): pass try: foo([1], []) except Exception: self.fail() if sys.version[0] == '2': with self.assertRaisesRegexp( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1]) else: with self.assertRaisesRegex( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1]) @non_empty('list_a', 'list_b') def foo(list_a, list_b): pass with self.assertRaisesRegex( TypeError, 'Expected non-empty argument for parameter list_b', ): foo([1], []) with self.assertRaisesRegex( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1]) ## Instruction: Fix other tests with same regexp issue ## Code After: import unittest, sys from HARK.validators import non_empty class ValidatorsTests(unittest.TestCase): ''' Tests for validator decorators which validate function arguments ''' def test_non_empty(self): @non_empty('list_a') def foo(list_a, list_b): pass try: foo([1], []) except Exception: self.fail() if sys.version[0] == '2': with self.assertRaisesRegexp( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1]) else: with self.assertRaisesRegex( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1]) @non_empty('list_a', 'list_b') def foo(list_a, list_b): pass if sys.version[0] == '2': with self.assertRaisesRegexp( TypeError, 'Expected non-empty argument for parameter list_b', ): foo([1], []) with self.assertRaisesRegexp( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1]) else: with self.assertRaisesRegex( TypeError, 'Expected non-empty argument for parameter list_b', ): foo([1], []) with self.assertRaisesRegex( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1])
import unittest, sys from HARK.validators import non_empty class ValidatorsTests(unittest.TestCase): ''' Tests for validator decorators which validate function arguments ''' def test_non_empty(self): @non_empty('list_a') def foo(list_a, list_b): pass try: foo([1], []) except Exception: self.fail() if sys.version[0] == '2': with self.assertRaisesRegexp( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1]) else: with self.assertRaisesRegex( TypeError, 'Expected non-empty argument for parameter list_a', ): foo([], [1]) @non_empty('list_a', 'list_b') def foo(list_a, list_b): pass + if sys.version[0] == '2': - with self.assertRaisesRegex( + with self.assertRaisesRegexp( ? ++++ + - TypeError, + TypeError, ? ++++ - 'Expected non-empty argument for parameter list_b', + 'Expected non-empty argument for parameter list_b', ? ++++ - ): + ): ? ++++ - foo([1], []) + foo([1], []) ? ++++ - with self.assertRaisesRegex( + with self.assertRaisesRegexp( ? ++++ + - TypeError, + TypeError, ? ++++ - 'Expected non-empty argument for parameter list_a', + 'Expected non-empty argument for parameter list_a', ? ++++ - ): + ): ? ++++ - foo([], [1]) + foo([], [1]) ? ++++ + else: + with self.assertRaisesRegex( + TypeError, + 'Expected non-empty argument for parameter list_b', + ): + foo([1], []) + with self.assertRaisesRegex( + TypeError, + 'Expected non-empty argument for parameter list_a', + ): + foo([], [1])
e317812daaae4ff1b50c7d56931425e86a7255b8
run_IRIDA_Uploader.py
run_IRIDA_Uploader.py
import wx from GUI.iridaUploaderMain import MainFrame if __name__ == "__main__": app = wx.App(False) frame = MainFrame() frame.Show() frame.mp.api = frame.settings_frame.attempt_connect_to_api() app.MainLoop()
import wx from GUI.MainFrame import MainFrame if __name__ == "__main__": app = wx.App(False) frame = MainFrame() frame.Show() frame.mp.api = frame.settings_frame.attempt_connect_to_api() app.MainLoop()
Use the right package name for running the uploader.
Use the right package name for running the uploader.
Python
apache-2.0
phac-nml/irida-miseq-uploader,phac-nml/irida-miseq-uploader
import wx - from GUI.iridaUploaderMain import MainFrame + from GUI.MainFrame import MainFrame if __name__ == "__main__": app = wx.App(False) frame = MainFrame() frame.Show() frame.mp.api = frame.settings_frame.attempt_connect_to_api() app.MainLoop()
Use the right package name for running the uploader.
## Code Before: import wx from GUI.iridaUploaderMain import MainFrame if __name__ == "__main__": app = wx.App(False) frame = MainFrame() frame.Show() frame.mp.api = frame.settings_frame.attempt_connect_to_api() app.MainLoop() ## Instruction: Use the right package name for running the uploader. ## Code After: import wx from GUI.MainFrame import MainFrame if __name__ == "__main__": app = wx.App(False) frame = MainFrame() frame.Show() frame.mp.api = frame.settings_frame.attempt_connect_to_api() app.MainLoop()
import wx - from GUI.iridaUploaderMain import MainFrame ? ------------- + from GUI.MainFrame import MainFrame ? +++++ if __name__ == "__main__": app = wx.App(False) frame = MainFrame() frame.Show() frame.mp.api = frame.settings_frame.attempt_connect_to_api() app.MainLoop()
2b2a1848b398e59818ea7d3aa51bf7db6669917c
pytus2000/datadicts/__init__.py
pytus2000/datadicts/__init__.py
"""This subpackage contains all data dictionaries.""" # The Python source code gets auto-generated and this package is intentially empty. from enum import Enum class OrderedEnum(Enum): """An Enum whose members are ordered by their value.""" def __ge__(self, other): if self.__class__ is other.__class__: return self.value >= other.value return NotImplemented def __gt__(self, other): if self.__class__ is other.__class__: return self.value > other.value return NotImplemented def __le__(self, other): if self.__class__ is other.__class__: return self.value <= other.value return NotImplemented def __lt__(self, other): if self.__class__ is other.__class__: return self.value < other.value return NotImplemented class VariableEnum(OrderedEnum): """Contains all variables in a datadict. Parameters: * position: the position in the datadict (int) * label: the string describing the variable """ def __init__(self, position, label): self.position = position self.label = label
"""This subpackage contains all data dictionaries.""" # The Python source code in this package other than this file has been auto-generated. from enum import Enum class OrderedEnum(Enum): """An Enum whose members are ordered by their value.""" def __ge__(self, other): if self.__class__ is other.__class__: return self.value >= other.value return NotImplemented def __gt__(self, other): if self.__class__ is other.__class__: return self.value > other.value return NotImplemented def __le__(self, other): if self.__class__ is other.__class__: return self.value <= other.value return NotImplemented def __lt__(self, other): if self.__class__ is other.__class__: return self.value < other.value return NotImplemented class VariableEnum(OrderedEnum): """Contains all variables in a datadict. Parameters: * position: the position in the datadict (int) * label: the string describing the variable """ def __init__(self, position, label): self.position = position self.label = label
Update comment for auto generated files
Update comment for auto generated files
Python
mit
timtroendle/pytus2000
"""This subpackage contains all data dictionaries.""" - # The Python source code gets auto-generated and this package is intentially empty. + # The Python source code in this package other than this file has been auto-generated. from enum import Enum class OrderedEnum(Enum): """An Enum whose members are ordered by their value.""" def __ge__(self, other): if self.__class__ is other.__class__: return self.value >= other.value return NotImplemented def __gt__(self, other): if self.__class__ is other.__class__: return self.value > other.value return NotImplemented def __le__(self, other): if self.__class__ is other.__class__: return self.value <= other.value return NotImplemented def __lt__(self, other): if self.__class__ is other.__class__: return self.value < other.value return NotImplemented class VariableEnum(OrderedEnum): """Contains all variables in a datadict. Parameters: * position: the position in the datadict (int) * label: the string describing the variable """ def __init__(self, position, label): self.position = position self.label = label
Update comment for auto generated files
## Code Before: """This subpackage contains all data dictionaries.""" # The Python source code gets auto-generated and this package is intentially empty. from enum import Enum class OrderedEnum(Enum): """An Enum whose members are ordered by their value.""" def __ge__(self, other): if self.__class__ is other.__class__: return self.value >= other.value return NotImplemented def __gt__(self, other): if self.__class__ is other.__class__: return self.value > other.value return NotImplemented def __le__(self, other): if self.__class__ is other.__class__: return self.value <= other.value return NotImplemented def __lt__(self, other): if self.__class__ is other.__class__: return self.value < other.value return NotImplemented class VariableEnum(OrderedEnum): """Contains all variables in a datadict. Parameters: * position: the position in the datadict (int) * label: the string describing the variable """ def __init__(self, position, label): self.position = position self.label = label ## Instruction: Update comment for auto generated files ## Code After: """This subpackage contains all data dictionaries.""" # The Python source code in this package other than this file has been auto-generated. from enum import Enum class OrderedEnum(Enum): """An Enum whose members are ordered by their value.""" def __ge__(self, other): if self.__class__ is other.__class__: return self.value >= other.value return NotImplemented def __gt__(self, other): if self.__class__ is other.__class__: return self.value > other.value return NotImplemented def __le__(self, other): if self.__class__ is other.__class__: return self.value <= other.value return NotImplemented def __lt__(self, other): if self.__class__ is other.__class__: return self.value < other.value return NotImplemented class VariableEnum(OrderedEnum): """Contains all variables in a datadict. Parameters: * position: the position in the datadict (int) * label: the string describing the variable """ def __init__(self, position, label): self.position = position self.label = label
"""This subpackage contains all data dictionaries.""" - # The Python source code gets auto-generated and this package is intentially empty. + # The Python source code in this package other than this file has been auto-generated. from enum import Enum class OrderedEnum(Enum): """An Enum whose members are ordered by their value.""" def __ge__(self, other): if self.__class__ is other.__class__: return self.value >= other.value return NotImplemented def __gt__(self, other): if self.__class__ is other.__class__: return self.value > other.value return NotImplemented def __le__(self, other): if self.__class__ is other.__class__: return self.value <= other.value return NotImplemented def __lt__(self, other): if self.__class__ is other.__class__: return self.value < other.value return NotImplemented class VariableEnum(OrderedEnum): """Contains all variables in a datadict. Parameters: * position: the position in the datadict (int) * label: the string describing the variable """ def __init__(self, position, label): self.position = position self.label = label
4fb39abc5afef5b0ca87e5c3b40e3dc9c9c0b2ef
tests/functions_tests/test_accuracy.py
tests/functions_tests/test_accuracy.py
import unittest import numpy import six import chainer from chainer import cuda from chainer import gradient_check from chainer.testing import attr if cuda.available: cuda.init() class TestAccuracy(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32) self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32) def check_forward(self, x_data, t_data): x = chainer.Variable(x_data) t = chainer.Variable(t_data) y = chainer.functions.accuracy(x, t) count = 0 for i in six.moves.range(self.t.size): pred = self.x[i].argmax() if pred == self.t[i]: count += 1 expected = float(count) / self.t.size gradient_check.assert_allclose(expected, cuda.to_cpu(y.data)) def test_forward_cpu(self): self.check_forward(self.x, self.t) @attr.gpu def test_forward_gpu(self): self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
import unittest import numpy import six import chainer from chainer import cuda from chainer import gradient_check from chainer.testing import attr if cuda.available: cuda.init() class TestAccuracy(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32) self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32) def check_forward(self, x_data, t_data): x = chainer.Variable(x_data) t = chainer.Variable(t_data) y = chainer.functions.accuracy(x, t) self.assertEqual((), y.data.shape) count = 0 for i in six.moves.range(self.t.size): pred = self.x[i].argmax() if pred == self.t[i]: count += 1 expected = float(count) / self.t.size gradient_check.assert_allclose(expected, cuda.to_cpu(y.data)) def test_forward_cpu(self): self.check_forward(self.x, self.t) @attr.gpu def test_forward_gpu(self): self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
Add test fot shape of result of accuracy function
Add test fot shape of result of accuracy function
Python
mit
elviswf/chainer,1986ks/chainer,cupy/cupy,ktnyt/chainer,hvy/chainer,tigerneil/chainer,rezoo/chainer,sou81821/chainer,tereka114/chainer,hvy/chainer,wavelets/chainer,jfsantos/chainer,chainer/chainer,ikasumi/chainer,okuta/chainer,aonotas/chainer,ysekky/chainer,kashif/chainer,ktnyt/chainer,niboshi/chainer,pfnet/chainer,ytoyama/yans_chainer_hackathon,cupy/cupy,jnishi/chainer,muupan/chainer,AlpacaDB/chainer,cemoody/chainer,chainer/chainer,anaruse/chainer,wkentaro/chainer,ronekko/chainer,kiyukuta/chainer,masia02/chainer,truongdq/chainer,kikusu/chainer,umitanuki/chainer,hidenori-t/chainer,cupy/cupy,t-abe/chainer,keisuke-umezawa/chainer,jnishi/chainer,jnishi/chainer,keisuke-umezawa/chainer,truongdq/chainer,jnishi/chainer,AlpacaDB/chainer,niboshi/chainer,laysakura/chainer,niboshi/chainer,okuta/chainer,benob/chainer,hvy/chainer,t-abe/chainer,keisuke-umezawa/chainer,ktnyt/chainer,kikusu/chainer,kuwa32/chainer,chainer/chainer,delta2323/chainer,wkentaro/chainer,woodshop/complex-chainer,tscohen/chainer,muupan/chainer,ktnyt/chainer,woodshop/chainer,wkentaro/chainer,bayerj/chainer,okuta/chainer,minhpqn/chainer,Kaisuke5/chainer,cupy/cupy,sinhrks/chainer,benob/chainer,okuta/chainer,wkentaro/chainer,hvy/chainer,tkerola/chainer,chainer/chainer,keisuke-umezawa/chainer,niboshi/chainer,sinhrks/chainer,yanweifu/chainer
import unittest import numpy import six import chainer from chainer import cuda from chainer import gradient_check from chainer.testing import attr if cuda.available: cuda.init() class TestAccuracy(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32) self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32) def check_forward(self, x_data, t_data): x = chainer.Variable(x_data) t = chainer.Variable(t_data) y = chainer.functions.accuracy(x, t) + self.assertEqual((), y.data.shape) count = 0 for i in six.moves.range(self.t.size): pred = self.x[i].argmax() if pred == self.t[i]: count += 1 expected = float(count) / self.t.size gradient_check.assert_allclose(expected, cuda.to_cpu(y.data)) def test_forward_cpu(self): self.check_forward(self.x, self.t) @attr.gpu def test_forward_gpu(self): self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
Add test fot shape of result of accuracy function
## Code Before: import unittest import numpy import six import chainer from chainer import cuda from chainer import gradient_check from chainer.testing import attr if cuda.available: cuda.init() class TestAccuracy(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32) self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32) def check_forward(self, x_data, t_data): x = chainer.Variable(x_data) t = chainer.Variable(t_data) y = chainer.functions.accuracy(x, t) count = 0 for i in six.moves.range(self.t.size): pred = self.x[i].argmax() if pred == self.t[i]: count += 1 expected = float(count) / self.t.size gradient_check.assert_allclose(expected, cuda.to_cpu(y.data)) def test_forward_cpu(self): self.check_forward(self.x, self.t) @attr.gpu def test_forward_gpu(self): self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t)) ## Instruction: Add test fot shape of result of accuracy function ## Code After: import unittest import numpy import six import chainer from chainer import cuda from chainer import gradient_check from chainer.testing import attr if cuda.available: cuda.init() class TestAccuracy(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32) self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32) def check_forward(self, x_data, t_data): x = chainer.Variable(x_data) t = chainer.Variable(t_data) y = chainer.functions.accuracy(x, t) self.assertEqual((), y.data.shape) count = 0 for i in six.moves.range(self.t.size): pred = self.x[i].argmax() if pred == self.t[i]: count += 1 expected = float(count) / self.t.size gradient_check.assert_allclose(expected, cuda.to_cpu(y.data)) def test_forward_cpu(self): self.check_forward(self.x, self.t) @attr.gpu def test_forward_gpu(self): self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
import unittest import numpy import six import chainer from chainer import cuda from chainer import gradient_check from chainer.testing import attr if cuda.available: cuda.init() class TestAccuracy(unittest.TestCase): def setUp(self): self.x = numpy.random.uniform(-1, 1, (10, 3)).astype(numpy.float32) self.t = numpy.random.randint(3, size=(10,)).astype(numpy.int32) def check_forward(self, x_data, t_data): x = chainer.Variable(x_data) t = chainer.Variable(t_data) y = chainer.functions.accuracy(x, t) + self.assertEqual((), y.data.shape) count = 0 for i in six.moves.range(self.t.size): pred = self.x[i].argmax() if pred == self.t[i]: count += 1 expected = float(count) / self.t.size gradient_check.assert_allclose(expected, cuda.to_cpu(y.data)) def test_forward_cpu(self): self.check_forward(self.x, self.t) @attr.gpu def test_forward_gpu(self): self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.t))
f8b4b1a860b5c0a3ff16dbb8bbf83010bd9a1009
feincms3/plugins/__init__.py
feincms3/plugins/__init__.py
from . import html from . import snippet try: from . import external except ImportError: # pragma: no cover pass try: from . import image except ImportError: # pragma: no cover pass try: from . import richtext except ImportError: # pragma: no cover pass try: from . import versatileimage except ImportError: # pragma: no cover pass
from . import html from . import snippet try: import requests except ImportError: # pragma: no cover pass else: from . import external try: import imagefield except ImportError: # pragma: no cover pass else: from . import image try: import feincms3.cleanse except ImportError: # pragma: no cover pass else: from . import richtext try: import versatileimagefield except ImportError: # pragma: no cover pass else: from . import versatileimage
Stop hiding local import errors
feincms3.plugins: Stop hiding local import errors
Python
bsd-3-clause
matthiask/feincms3,matthiask/feincms3,matthiask/feincms3
from . import html from . import snippet try: - from . import external + import requests except ImportError: # pragma: no cover pass + else: + from . import external try: - from . import image + import imagefield except ImportError: # pragma: no cover pass + else: + from . import image try: - from . import richtext + import feincms3.cleanse except ImportError: # pragma: no cover pass + else: + from . import richtext try: - from . import versatileimage + import versatileimagefield except ImportError: # pragma: no cover pass + else: + from . import versatileimage
Stop hiding local import errors
## Code Before: from . import html from . import snippet try: from . import external except ImportError: # pragma: no cover pass try: from . import image except ImportError: # pragma: no cover pass try: from . import richtext except ImportError: # pragma: no cover pass try: from . import versatileimage except ImportError: # pragma: no cover pass ## Instruction: Stop hiding local import errors ## Code After: from . import html from . import snippet try: import requests except ImportError: # pragma: no cover pass else: from . import external try: import imagefield except ImportError: # pragma: no cover pass else: from . import image try: import feincms3.cleanse except ImportError: # pragma: no cover pass else: from . import richtext try: import versatileimagefield except ImportError: # pragma: no cover pass else: from . import versatileimage
from . import html from . import snippet try: - from . import external + import requests except ImportError: # pragma: no cover pass + else: + from . import external try: - from . import image + import imagefield except ImportError: # pragma: no cover pass + else: + from . import image try: - from . import richtext + import feincms3.cleanse except ImportError: # pragma: no cover pass + else: + from . import richtext try: - from . import versatileimage ? ------- + import versatileimagefield ? +++++ except ImportError: # pragma: no cover pass + else: + from . import versatileimage
9dad4033e4a66208ca00bcb0340f6a2271f1090f
montage_wrapper/mpi.py
montage_wrapper/mpi.py
MPI_COMMAND = 'mpirun -n {n_proc} {executable}' def set_mpi_command(command): """ Set the MPI Command to use. This should contain {n_proc} to indicate the number of processes, and {executable} to indicate the name of the executable. Parameters ---------- command: str The MPI command for running executables Examples -------- Use ``mpirun``: >>> set_mpi_command('mpirun -n {n_proc} {executable}') Use ``mpiexec`` with host list: >>> set_mpi_command('mpiexec -f mpd.hosts -np {n_proc} {executable}') """ MPI_COMMAND = command def _get_mpi_command(executable=None, n_proc=None): return MPI_COMMAND.format(executable=executable, n_proc=n_proc)
MPI_COMMAND = 'mpirun -n {n_proc} {executable}' def set_mpi_command(command): """ Set the MPI Command to use. This should contain {n_proc} to indicate the number of processes, and {executable} to indicate the name of the executable. Parameters ---------- command: str The MPI command for running executables Examples -------- Use ``mpirun``: >>> set_mpi_command('mpirun -n {n_proc} {executable}') Use ``mpiexec`` with host list: >>> set_mpi_command('mpiexec -f mpd.hosts -np {n_proc} {executable}') """ global MPI_COMMAND MPI_COMMAND = command def _get_mpi_command(executable=None, n_proc=None): return MPI_COMMAND.format(executable=executable, n_proc=n_proc)
Fix setting of custom MPI command
Fix setting of custom MPI command
Python
bsd-3-clause
vterron/montage-wrapper,astrofrog/montage-wrapper,astropy/montage-wrapper,astrofrog/montage-wrapper,jat255/montage-wrapper
MPI_COMMAND = 'mpirun -n {n_proc} {executable}' def set_mpi_command(command): """ Set the MPI Command to use. This should contain {n_proc} to indicate the number of processes, and {executable} to indicate the name of the executable. Parameters ---------- command: str The MPI command for running executables Examples -------- Use ``mpirun``: >>> set_mpi_command('mpirun -n {n_proc} {executable}') Use ``mpiexec`` with host list: >>> set_mpi_command('mpiexec -f mpd.hosts -np {n_proc} {executable}') """ + global MPI_COMMAND MPI_COMMAND = command def _get_mpi_command(executable=None, n_proc=None): return MPI_COMMAND.format(executable=executable, n_proc=n_proc)
Fix setting of custom MPI command
## Code Before: MPI_COMMAND = 'mpirun -n {n_proc} {executable}' def set_mpi_command(command): """ Set the MPI Command to use. This should contain {n_proc} to indicate the number of processes, and {executable} to indicate the name of the executable. Parameters ---------- command: str The MPI command for running executables Examples -------- Use ``mpirun``: >>> set_mpi_command('mpirun -n {n_proc} {executable}') Use ``mpiexec`` with host list: >>> set_mpi_command('mpiexec -f mpd.hosts -np {n_proc} {executable}') """ MPI_COMMAND = command def _get_mpi_command(executable=None, n_proc=None): return MPI_COMMAND.format(executable=executable, n_proc=n_proc) ## Instruction: Fix setting of custom MPI command ## Code After: MPI_COMMAND = 'mpirun -n {n_proc} {executable}' def set_mpi_command(command): """ Set the MPI Command to use. This should contain {n_proc} to indicate the number of processes, and {executable} to indicate the name of the executable. Parameters ---------- command: str The MPI command for running executables Examples -------- Use ``mpirun``: >>> set_mpi_command('mpirun -n {n_proc} {executable}') Use ``mpiexec`` with host list: >>> set_mpi_command('mpiexec -f mpd.hosts -np {n_proc} {executable}') """ global MPI_COMMAND MPI_COMMAND = command def _get_mpi_command(executable=None, n_proc=None): return MPI_COMMAND.format(executable=executable, n_proc=n_proc)
MPI_COMMAND = 'mpirun -n {n_proc} {executable}' def set_mpi_command(command): """ Set the MPI Command to use. This should contain {n_proc} to indicate the number of processes, and {executable} to indicate the name of the executable. Parameters ---------- command: str The MPI command for running executables Examples -------- Use ``mpirun``: >>> set_mpi_command('mpirun -n {n_proc} {executable}') Use ``mpiexec`` with host list: >>> set_mpi_command('mpiexec -f mpd.hosts -np {n_proc} {executable}') """ + global MPI_COMMAND MPI_COMMAND = command def _get_mpi_command(executable=None, n_proc=None): return MPI_COMMAND.format(executable=executable, n_proc=n_proc)
263e31a5d87b8134a25df97eee06f1fe9c1e94bc
django_countries/release.py
django_countries/release.py
import os from txclib.commands import cmd_pull from txclib.utils import find_dot_tx from txclib.log import logger from zest.releaser.utils import ask, execute_command from django.core.management import call_command import django_countries def translations(data): if data["name"] != "django-countries": return if not ask("Pull translations from transifex and compile", default=True): return _handlers = logger.handlers logger.handlers = [] try: cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx()) finally: logger.handlers = _handlers _cwd = os.getcwd() os.chdir(os.path.dirname(django_countries.__file__)) try: call_command("compilemessages") execute_command(["git", "add", "locale"]) finally: os.chdir(_cwd)
import os import re import shutil from txclib.commands import cmd_pull from txclib.utils import find_dot_tx from txclib.log import logger from zest.releaser.utils import ask, execute_command from django.core.management import call_command import django_countries def translations(data): if data["name"] != "django-countries": return if not ask("Pull translations from transifex and compile", default=True): return _handlers = logger.handlers logger.handlers = [] try: cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx()) finally: logger.handlers = _handlers fix_locale_paths() _cwd = os.getcwd() os.chdir(os.path.dirname(django_countries.__file__)) try: call_command("compilemessages") execute_command(["git", "add", "locale"]) finally: os.chdir(_cwd) def fix_locale_paths(): lpath = os.path.join(os.path.dirname(django_countries.__file__), "locale") for name in os.listdir(lpath): if re.match(r"\w\w-\w{3}", name): new_path = os.path.join(lpath, name.replace("-", "_", 1)) if os.path.exists(new_path): shutil.rmtree(new_path) os.rename(os.path.join(lpath, name), new_path)
Fix locale paths when pulling from transifex
Fix locale paths when pulling from transifex
Python
mit
SmileyChris/django-countries
import os + import re + import shutil from txclib.commands import cmd_pull from txclib.utils import find_dot_tx from txclib.log import logger from zest.releaser.utils import ask, execute_command from django.core.management import call_command import django_countries def translations(data): if data["name"] != "django-countries": return if not ask("Pull translations from transifex and compile", default=True): return _handlers = logger.handlers logger.handlers = [] try: cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx()) finally: logger.handlers = _handlers - + fix_locale_paths() _cwd = os.getcwd() os.chdir(os.path.dirname(django_countries.__file__)) try: call_command("compilemessages") execute_command(["git", "add", "locale"]) finally: os.chdir(_cwd) + + def fix_locale_paths(): + lpath = os.path.join(os.path.dirname(django_countries.__file__), "locale") + for name in os.listdir(lpath): + if re.match(r"\w\w-\w{3}", name): + new_path = os.path.join(lpath, name.replace("-", "_", 1)) + if os.path.exists(new_path): + shutil.rmtree(new_path) + os.rename(os.path.join(lpath, name), new_path) +
Fix locale paths when pulling from transifex
## Code Before: import os from txclib.commands import cmd_pull from txclib.utils import find_dot_tx from txclib.log import logger from zest.releaser.utils import ask, execute_command from django.core.management import call_command import django_countries def translations(data): if data["name"] != "django-countries": return if not ask("Pull translations from transifex and compile", default=True): return _handlers = logger.handlers logger.handlers = [] try: cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx()) finally: logger.handlers = _handlers _cwd = os.getcwd() os.chdir(os.path.dirname(django_countries.__file__)) try: call_command("compilemessages") execute_command(["git", "add", "locale"]) finally: os.chdir(_cwd) ## Instruction: Fix locale paths when pulling from transifex ## Code After: import os import re import shutil from txclib.commands import cmd_pull from txclib.utils import find_dot_tx from txclib.log import logger from zest.releaser.utils import ask, execute_command from django.core.management import call_command import django_countries def translations(data): if data["name"] != "django-countries": return if not ask("Pull translations from transifex and compile", default=True): return _handlers = logger.handlers logger.handlers = [] try: cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx()) finally: logger.handlers = _handlers fix_locale_paths() _cwd = os.getcwd() os.chdir(os.path.dirname(django_countries.__file__)) try: call_command("compilemessages") execute_command(["git", "add", "locale"]) finally: os.chdir(_cwd) def fix_locale_paths(): lpath = os.path.join(os.path.dirname(django_countries.__file__), "locale") for name in os.listdir(lpath): if re.match(r"\w\w-\w{3}", name): new_path = os.path.join(lpath, name.replace("-", "_", 1)) if os.path.exists(new_path): shutil.rmtree(new_path) os.rename(os.path.join(lpath, name), new_path)
import os + import re + import shutil from txclib.commands import cmd_pull from txclib.utils import find_dot_tx from txclib.log import logger from zest.releaser.utils import ask, execute_command from django.core.management import call_command import django_countries def translations(data): if data["name"] != "django-countries": return if not ask("Pull translations from transifex and compile", default=True): return _handlers = logger.handlers logger.handlers = [] try: cmd_pull(argv=["-a", "--minimum-perc=60"], path_to_tx=find_dot_tx()) finally: logger.handlers = _handlers - + fix_locale_paths() _cwd = os.getcwd() os.chdir(os.path.dirname(django_countries.__file__)) try: call_command("compilemessages") execute_command(["git", "add", "locale"]) finally: os.chdir(_cwd) + + + def fix_locale_paths(): + lpath = os.path.join(os.path.dirname(django_countries.__file__), "locale") + for name in os.listdir(lpath): + if re.match(r"\w\w-\w{3}", name): + new_path = os.path.join(lpath, name.replace("-", "_", 1)) + if os.path.exists(new_path): + shutil.rmtree(new_path) + os.rename(os.path.join(lpath, name), new_path)
5a12f027079d109228456c6f3e4912317721246a
setup.py
setup.py
from distutils.core import setup setup( name='cyrtranslit', packages=['cyrtranslit'], version='0.4', description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.', author='Open Data Kosovo', author_email='[email protected]', url='https://github.com/opendatakosovo/cyrillic-transliteration', download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz', license='MIT', long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.', keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'], classifiers=['Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'], )
from distutils.core import setup setup( name='cyrtranslit', packages=['cyrtranslit'], version='0.4', description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.', author='Open Data Kosovo', author_email='[email protected]', url='https://github.com/opendatakosovo/cyrillic-transliteration', download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz', license='MIT', long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.', keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'], classifiers=['Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7'], )
Declare that cyrtranslit supports Python 3.7
Declare that cyrtranslit supports Python 3.7
Python
mit
opendatakosovo/cyrillic-transliteration
from distutils.core import setup setup( name='cyrtranslit', packages=['cyrtranslit'], version='0.4', description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.', author='Open Data Kosovo', author_email='[email protected]', url='https://github.com/opendatakosovo/cyrillic-transliteration', download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz', license='MIT', long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.', keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'], classifiers=['Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6'], + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7'], )
Declare that cyrtranslit supports Python 3.7
## Code Before: from distutils.core import setup setup( name='cyrtranslit', packages=['cyrtranslit'], version='0.4', description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.', author='Open Data Kosovo', author_email='[email protected]', url='https://github.com/opendatakosovo/cyrillic-transliteration', download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz', license='MIT', long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.', keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'], classifiers=['Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'], ) ## Instruction: Declare that cyrtranslit supports Python 3.7 ## Code After: from distutils.core import setup setup( name='cyrtranslit', packages=['cyrtranslit'], version='0.4', description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.', author='Open Data Kosovo', author_email='[email protected]', url='https://github.com/opendatakosovo/cyrillic-transliteration', download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz', license='MIT', long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.', keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'], classifiers=['Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7'], )
from distutils.core import setup setup( name='cyrtranslit', packages=['cyrtranslit'], version='0.4', description='Bi-directional Cyrillic transliteration. Transliterate Cyrillic script text to Roman alphabet text and vice versa.', author='Open Data Kosovo', author_email='[email protected]', url='https://github.com/opendatakosovo/cyrillic-transliteration', download_url='https://github.com/opendatakosovo/cyrillic-transliteration/archive/v0.4.tar.gz', license='MIT', long_description='Transliteration is the conversion of a text from one script to another. Current version supports transliteration for Serbian, Macedonian, Montenegrin, and Russian.', keywords=['cyrillic', 'latin', 'transliteration', 'transliterate', 'cyrtranslit', 'serbian', 'macedonian', 'montenegrin', 'russian'], classifiers=['Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6'], ? - + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7'], )
a175dbf2f239690cb5128698d5896233467e285e
huxley/settings/pipeline.py
huxley/settings/pipeline.py
from os.path import join from .roots import PROJECT_ROOT PIPELINE_COMPILERS = ( 'huxley.utils.pipeline.PySCSSCompiler', 'pipeline_browserify.compiler.BrowserifyCompiler', ) PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor' PIPELINE_JS_COMPRESSOR = None PIPELINE_CSS = { 'huxley': { 'source_filenames': ( 'css/*.css', 'scss/core/*.scss', 'scss/accounts/*.scss', 'scss/advisors/*.scss', 'scss/chairs/*.scss', ), 'output_filename': 'css/huxley.css' }, } PIPELINE_JS = { 'huxley': { 'source_filenames': ( 'js/huxley.browserify.js', ), 'output_filename': 'js/huxley.js' } } PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify') PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify'
from os.path import join from .roots import PROJECT_ROOT PIPELINE_COMPILERS = ( 'huxley.utils.pipeline.PySCSSCompiler', 'pipeline_browserify.compiler.BrowserifyCompiler', ) PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor' PIPELINE_JS_COMPRESSOR = None PIPELINE_CSS = { 'huxley': { 'source_filenames': ( 'scss/core/*.scss', 'scss/accounts/*.scss', ), 'output_filename': 'css/huxley.css' }, } PIPELINE_JS = { 'huxley': { 'source_filenames': ( 'js/huxley.browserify.js', ), 'output_filename': 'js/huxley.js' } } PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify') PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify'
Clean up file patterns in PIPELINE_CSS setting.
Clean up file patterns in PIPELINE_CSS setting.
Python
bsd-3-clause
ctmunwebmaster/huxley,ctmunwebmaster/huxley,nathanielparke/huxley,bmun/huxley,bmun/huxley,nathanielparke/huxley,nathanielparke/huxley,nathanielparke/huxley,ctmunwebmaster/huxley,bmun/huxley,ctmunwebmaster/huxley,bmun/huxley
from os.path import join from .roots import PROJECT_ROOT PIPELINE_COMPILERS = ( 'huxley.utils.pipeline.PySCSSCompiler', 'pipeline_browserify.compiler.BrowserifyCompiler', ) PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor' PIPELINE_JS_COMPRESSOR = None PIPELINE_CSS = { 'huxley': { 'source_filenames': ( - 'css/*.css', 'scss/core/*.scss', 'scss/accounts/*.scss', - 'scss/advisors/*.scss', - 'scss/chairs/*.scss', ), 'output_filename': 'css/huxley.css' }, } PIPELINE_JS = { 'huxley': { 'source_filenames': ( 'js/huxley.browserify.js', ), 'output_filename': 'js/huxley.js' } } PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify') PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify'
Clean up file patterns in PIPELINE_CSS setting.
## Code Before: from os.path import join from .roots import PROJECT_ROOT PIPELINE_COMPILERS = ( 'huxley.utils.pipeline.PySCSSCompiler', 'pipeline_browserify.compiler.BrowserifyCompiler', ) PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor' PIPELINE_JS_COMPRESSOR = None PIPELINE_CSS = { 'huxley': { 'source_filenames': ( 'css/*.css', 'scss/core/*.scss', 'scss/accounts/*.scss', 'scss/advisors/*.scss', 'scss/chairs/*.scss', ), 'output_filename': 'css/huxley.css' }, } PIPELINE_JS = { 'huxley': { 'source_filenames': ( 'js/huxley.browserify.js', ), 'output_filename': 'js/huxley.js' } } PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify') PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify' ## Instruction: Clean up file patterns in PIPELINE_CSS setting. ## Code After: from os.path import join from .roots import PROJECT_ROOT PIPELINE_COMPILERS = ( 'huxley.utils.pipeline.PySCSSCompiler', 'pipeline_browserify.compiler.BrowserifyCompiler', ) PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor' PIPELINE_JS_COMPRESSOR = None PIPELINE_CSS = { 'huxley': { 'source_filenames': ( 'scss/core/*.scss', 'scss/accounts/*.scss', ), 'output_filename': 'css/huxley.css' }, } PIPELINE_JS = { 'huxley': { 'source_filenames': ( 'js/huxley.browserify.js', ), 'output_filename': 'js/huxley.js' } } PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify') PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify'
from os.path import join from .roots import PROJECT_ROOT PIPELINE_COMPILERS = ( 'huxley.utils.pipeline.PySCSSCompiler', 'pipeline_browserify.compiler.BrowserifyCompiler', ) PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor' PIPELINE_JS_COMPRESSOR = None PIPELINE_CSS = { 'huxley': { 'source_filenames': ( - 'css/*.css', 'scss/core/*.scss', 'scss/accounts/*.scss', - 'scss/advisors/*.scss', - 'scss/chairs/*.scss', ), 'output_filename': 'css/huxley.css' }, } PIPELINE_JS = { 'huxley': { 'source_filenames': ( 'js/huxley.browserify.js', ), 'output_filename': 'js/huxley.js' } } PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify') PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify'
04d7e76cf372802e99ff3108cccd836d7aada0df
games/views/installers.py
games/views/installers.py
from __future__ import absolute_import from rest_framework import generics from reversion.models import Version from common.permissions import IsAdminOrReadOnly from games import models, serializers class InstallerListView(generics.ListAPIView): serializer_class = serializers.InstallerSerializer queryset = models.Installer.objects.all() class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerSerializer queryset = models.Installer.objects.all() class InstallerRevisionListView(generics.ListAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerRevisionSerializer def get_queryset(self): print "InstallerRevisionListView" installer_id = self.request.parser_context['kwargs']['pk'] versions = [] for version in Version.objects.filter(content_type__model='installer', object_id=installer_id): versions.append(models.InstallerRevision(version.id)) return versions class InstallerRevisionDetailView(generics.RetrieveAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerRevisionSerializer def get_object(self): revision_id = self.request.parser_context['kwargs']['pk'] version = models.InstallerRevision(revision_id) return version
from __future__ import absolute_import from rest_framework import generics from reversion.models import Version from common.permissions import IsAdminOrReadOnly from games import models, serializers class InstallerListView(generics.ListAPIView): serializer_class = serializers.InstallerSerializer queryset = models.Installer.objects.all() class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerSerializer queryset = models.Installer.objects.all() class InstallerRevisionListView(generics.ListAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerRevisionSerializer def get_queryset(self): installer_id = self.request.parser_context['kwargs']['pk'] return [ models.InstallerRevision(version.id) for version in Version.objects.filter( content_type__model='installer', object_id=installer_id ) ] class InstallerRevisionDetailView(generics.RetrieveAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerRevisionSerializer def get_object(self): revision_id = self.request.parser_context['kwargs']['pk'] return models.InstallerRevision(revision_id)
Simplify Installer revision API views
Simplify Installer revision API views
Python
agpl-3.0
lutris/website,lutris/website,Turupawn/website,lutris/website,Turupawn/website,lutris/website,Turupawn/website,Turupawn/website
from __future__ import absolute_import from rest_framework import generics from reversion.models import Version from common.permissions import IsAdminOrReadOnly from games import models, serializers class InstallerListView(generics.ListAPIView): serializer_class = serializers.InstallerSerializer queryset = models.Installer.objects.all() class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerSerializer queryset = models.Installer.objects.all() class InstallerRevisionListView(generics.ListAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerRevisionSerializer def get_queryset(self): - print "InstallerRevisionListView" installer_id = self.request.parser_context['kwargs']['pk'] + return [ - versions = [] - for version in Version.objects.filter(content_type__model='installer', - object_id=installer_id): - versions.append(models.InstallerRevision(version.id)) + models.InstallerRevision(version.id) - return versions + for version + in Version.objects.filter( + content_type__model='installer', object_id=installer_id + ) + ] class InstallerRevisionDetailView(generics.RetrieveAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerRevisionSerializer def get_object(self): revision_id = self.request.parser_context['kwargs']['pk'] - version = models.InstallerRevision(revision_id) + return models.InstallerRevision(revision_id) - return version
Simplify Installer revision API views
## Code Before: from __future__ import absolute_import from rest_framework import generics from reversion.models import Version from common.permissions import IsAdminOrReadOnly from games import models, serializers class InstallerListView(generics.ListAPIView): serializer_class = serializers.InstallerSerializer queryset = models.Installer.objects.all() class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerSerializer queryset = models.Installer.objects.all() class InstallerRevisionListView(generics.ListAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerRevisionSerializer def get_queryset(self): print "InstallerRevisionListView" installer_id = self.request.parser_context['kwargs']['pk'] versions = [] for version in Version.objects.filter(content_type__model='installer', object_id=installer_id): versions.append(models.InstallerRevision(version.id)) return versions class InstallerRevisionDetailView(generics.RetrieveAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerRevisionSerializer def get_object(self): revision_id = self.request.parser_context['kwargs']['pk'] version = models.InstallerRevision(revision_id) return version ## Instruction: Simplify Installer revision API views ## Code After: from __future__ import absolute_import from rest_framework import generics from reversion.models import Version from common.permissions import IsAdminOrReadOnly from games import models, serializers class InstallerListView(generics.ListAPIView): serializer_class = serializers.InstallerSerializer queryset = models.Installer.objects.all() class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerSerializer queryset = models.Installer.objects.all() class InstallerRevisionListView(generics.ListAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerRevisionSerializer def get_queryset(self): installer_id = self.request.parser_context['kwargs']['pk'] return [ models.InstallerRevision(version.id) for version in Version.objects.filter( content_type__model='installer', object_id=installer_id ) ] class InstallerRevisionDetailView(generics.RetrieveAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerRevisionSerializer def get_object(self): revision_id = self.request.parser_context['kwargs']['pk'] return models.InstallerRevision(revision_id)
from __future__ import absolute_import from rest_framework import generics from reversion.models import Version from common.permissions import IsAdminOrReadOnly from games import models, serializers class InstallerListView(generics.ListAPIView): serializer_class = serializers.InstallerSerializer queryset = models.Installer.objects.all() class InstallerDetailView(generics.RetrieveUpdateDestroyAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerSerializer queryset = models.Installer.objects.all() class InstallerRevisionListView(generics.ListAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerRevisionSerializer def get_queryset(self): - print "InstallerRevisionListView" installer_id = self.request.parser_context['kwargs']['pk'] + return [ - versions = [] - for version in Version.objects.filter(content_type__model='installer', - object_id=installer_id): - versions.append(models.InstallerRevision(version.id)) ? ---------------- - + models.InstallerRevision(version.id) - return versions + for version + in Version.objects.filter( + content_type__model='installer', object_id=installer_id + ) + ] class InstallerRevisionDetailView(generics.RetrieveAPIView): permission_classes = [IsAdminOrReadOnly] serializer_class = serializers.InstallerRevisionSerializer def get_object(self): revision_id = self.request.parser_context['kwargs']['pk'] - version = models.InstallerRevision(revision_id) ? ^ --- -- + return models.InstallerRevision(revision_id) ? ^ ++ - return version
fce5152e8d902821c9b521402667ac87f9e9a17b
checks.d/system_core.py
checks.d/system_core.py
import psutil from checks import AgentCheck class SystemCore(AgentCheck): def check(self, instance): cpu_times = psutil.cpu_times(percpu=True) for i, cpu in enumerate(cpu_times): for key, value in cpu._asdict().iteritems(): self.rate( "system.core.{0}".format(key), 100.0 * value, tags=["core:{0}".format(i)] )
import psutil from checks import AgentCheck class SystemCore(AgentCheck): def check(self, instance): cpu_times = psutil.cpu_times(percpu=True) self.gauge("system.core.count", len(cpu_times)) for i, cpu in enumerate(cpu_times): for key, value in cpu._asdict().iteritems(): self.rate( "system.core.{0}".format(key), 100.0 * value, tags=["core:{0}".format(i)] )
Send the core count as a metric
Send the core count as a metric
Python
bsd-3-clause
truthbk/dd-agent,tebriel/dd-agent,indeedops/dd-agent,mderomph-coolblue/dd-agent,packetloop/dd-agent,PagerDuty/dd-agent,joelvanvelden/dd-agent,indeedops/dd-agent,jyogi/purvar-agent,manolama/dd-agent,tebriel/dd-agent,packetloop/dd-agent,amalakar/dd-agent,a20012251/dd-agent,jraede/dd-agent,AniruddhaSAtre/dd-agent,brettlangdon/dd-agent,polynomial/dd-agent,gphat/dd-agent,urosgruber/dd-agent,c960657/dd-agent,darron/dd-agent,jvassev/dd-agent,Shopify/dd-agent,jyogi/purvar-agent,yuecong/dd-agent,darron/dd-agent,gphat/dd-agent,brettlangdon/dd-agent,eeroniemi/dd-agent,zendesk/dd-agent,JohnLZeller/dd-agent,manolama/dd-agent,citrusleaf/dd-agent,Wattpad/dd-agent,benmccann/dd-agent,huhongbo/dd-agent,c960657/dd-agent,indeedops/dd-agent,lookout/dd-agent,Mashape/dd-agent,darron/dd-agent,polynomial/dd-agent,amalakar/dd-agent,guruxu/dd-agent,benmccann/dd-agent,pmav99/praktoras,zendesk/dd-agent,eeroniemi/dd-agent,jraede/dd-agent,yuecong/dd-agent,pmav99/praktoras,cberry777/dd-agent,jvassev/dd-agent,manolama/dd-agent,AntoCard/powerdns-recursor_check,tebriel/dd-agent,jamesandariese/dd-agent,ess/dd-agent,relateiq/dd-agent,joelvanvelden/dd-agent,joelvanvelden/dd-agent,yuecong/dd-agent,brettlangdon/dd-agent,darron/dd-agent,brettlangdon/dd-agent,jamesandariese/dd-agent,guruxu/dd-agent,Mashape/dd-agent,remh/dd-agent,jvassev/dd-agent,huhongbo/dd-agent,relateiq/dd-agent,PagerDuty/dd-agent,guruxu/dd-agent,jshum/dd-agent,takus/dd-agent,ess/dd-agent,urosgruber/dd-agent,huhongbo/dd-agent,AntoCard/powerdns-recursor_check,jyogi/purvar-agent,truthbk/dd-agent,a20012251/dd-agent,Wattpad/dd-agent,Wattpad/dd-agent,Mashape/dd-agent,takus/dd-agent,jamesandariese/dd-agent,a20012251/dd-agent,PagerDuty/dd-agent,truthbk/dd-agent,urosgruber/dd-agent,AniruddhaSAtre/dd-agent,AniruddhaSAtre/dd-agent,jshum/dd-agent,cberry777/dd-agent,AniruddhaSAtre/dd-agent,a20012251/dd-agent,Wattpad/dd-agent,indeedops/dd-agent,citrusleaf/dd-agent,truthbk/dd-agent,jraede/dd-agent,brettlangdon/dd-agent,amalakar/dd-agent,relateiq/dd-agent,GabrielNicolasAvellaneda/dd-agent,Wattpad/dd-agent,gphat/dd-agent,yuecong/dd-agent,cberry777/dd-agent,eeroniemi/dd-agent,jraede/dd-agent,mderomph-coolblue/dd-agent,c960657/dd-agent,lookout/dd-agent,Shopify/dd-agent,tebriel/dd-agent,AntoCard/powerdns-recursor_check,AniruddhaSAtre/dd-agent,huhongbo/dd-agent,oneandoneis2/dd-agent,jyogi/purvar-agent,tebriel/dd-agent,JohnLZeller/dd-agent,takus/dd-agent,yuecong/dd-agent,Shopify/dd-agent,gphat/dd-agent,indeedops/dd-agent,relateiq/dd-agent,takus/dd-agent,joelvanvelden/dd-agent,joelvanvelden/dd-agent,GabrielNicolasAvellaneda/dd-agent,remh/dd-agent,zendesk/dd-agent,polynomial/dd-agent,cberry777/dd-agent,guruxu/dd-agent,packetloop/dd-agent,c960657/dd-agent,citrusleaf/dd-agent,pmav99/praktoras,manolama/dd-agent,amalakar/dd-agent,lookout/dd-agent,darron/dd-agent,lookout/dd-agent,c960657/dd-agent,GabrielNicolasAvellaneda/dd-agent,polynomial/dd-agent,ess/dd-agent,AntoCard/powerdns-recursor_check,jyogi/purvar-agent,JohnLZeller/dd-agent,manolama/dd-agent,packetloop/dd-agent,PagerDuty/dd-agent,benmccann/dd-agent,mderomph-coolblue/dd-agent,JohnLZeller/dd-agent,cberry777/dd-agent,GabrielNicolasAvellaneda/dd-agent,jvassev/dd-agent,PagerDuty/dd-agent,citrusleaf/dd-agent,eeroniemi/dd-agent,eeroniemi/dd-agent,urosgruber/dd-agent,guruxu/dd-agent,a20012251/dd-agent,lookout/dd-agent,AntoCard/powerdns-recursor_check,jraede/dd-agent,remh/dd-agent,jshum/dd-agent,pmav99/praktoras,relateiq/dd-agent,remh/dd-agent,pfmooney/dd-agent,oneandoneis2/dd-agent,zendesk/dd-agent,Mashape/dd-agent,oneandoneis2/dd-agent,pmav99/praktoras,benmccann/dd-agent,jshum/dd-agent,mderomph-coolblue/dd-agent,ess/dd-agent,oneandoneis2/dd-agent,mderomph-coolblue/dd-agent,remh/dd-agent,Shopify/dd-agent,ess/dd-agent,jshum/dd-agent,Mashape/dd-agent,packetloop/dd-agent,jvassev/dd-agent,gphat/dd-agent,pfmooney/dd-agent,huhongbo/dd-agent,oneandoneis2/dd-agent,zendesk/dd-agent,pfmooney/dd-agent,takus/dd-agent,pfmooney/dd-agent,amalakar/dd-agent,pfmooney/dd-agent,JohnLZeller/dd-agent,polynomial/dd-agent,truthbk/dd-agent,urosgruber/dd-agent,citrusleaf/dd-agent,Shopify/dd-agent,GabrielNicolasAvellaneda/dd-agent,benmccann/dd-agent,jamesandariese/dd-agent,jamesandariese/dd-agent
import psutil from checks import AgentCheck class SystemCore(AgentCheck): def check(self, instance): cpu_times = psutil.cpu_times(percpu=True) + self.gauge("system.core.count", len(cpu_times)) for i, cpu in enumerate(cpu_times): for key, value in cpu._asdict().iteritems(): self.rate( "system.core.{0}".format(key), 100.0 * value, tags=["core:{0}".format(i)] )
Send the core count as a metric
## Code Before: import psutil from checks import AgentCheck class SystemCore(AgentCheck): def check(self, instance): cpu_times = psutil.cpu_times(percpu=True) for i, cpu in enumerate(cpu_times): for key, value in cpu._asdict().iteritems(): self.rate( "system.core.{0}".format(key), 100.0 * value, tags=["core:{0}".format(i)] ) ## Instruction: Send the core count as a metric ## Code After: import psutil from checks import AgentCheck class SystemCore(AgentCheck): def check(self, instance): cpu_times = psutil.cpu_times(percpu=True) self.gauge("system.core.count", len(cpu_times)) for i, cpu in enumerate(cpu_times): for key, value in cpu._asdict().iteritems(): self.rate( "system.core.{0}".format(key), 100.0 * value, tags=["core:{0}".format(i)] )
import psutil from checks import AgentCheck class SystemCore(AgentCheck): def check(self, instance): cpu_times = psutil.cpu_times(percpu=True) + self.gauge("system.core.count", len(cpu_times)) for i, cpu in enumerate(cpu_times): for key, value in cpu._asdict().iteritems(): self.rate( "system.core.{0}".format(key), 100.0 * value, tags=["core:{0}".format(i)] )
9f17fc03a79434b3d92e4dea00ea33567c806280
runner/update_manifest.py
runner/update_manifest.py
import json import os import sys here = os.path.abspath(os.path.split(__file__)[0]) root = os.path.abspath(os.path.join(here, "..", "..")) sys.path.insert(0, os.path.abspath(os.path.join(here, "..", "scripts"))) import manifest def main(request, response): manifest_path = os.path.join(root, "MANIFEST.json") manifest.update_manifest(root, **{"rebuild": False, "local_changes": True, "path": manifest_path}) return [("Content-Type", "application/json")], json.dumps({"url": "/MANIFEST.json"})
import json import os import sys here = os.path.abspath(os.path.split(__file__)[0]) root = os.path.abspath(os.path.join(here, "..", "..")) sys.path.insert(0, os.path.abspath(os.path.join(here, "..", "scripts"))) import manifest def main(request, response): path = os.path.join(root, "MANIFEST.json") manifest_file = manifest.load(path) manifest.update(root, "/", manifest_file) manifest.write(manifest_file, path) return [("Content-Type", "application/json")], json.dumps({"url": "/MANIFEST.json"})
Update test runner for changes in the manifest API.
Update test runner for changes in the manifest API.
Python
bsd-3-clause
frewsxcv/wpt-tools,wpt-on-tv-tf/wpt-tools,wpt-on-tv-tf/wpt-tools,frewsxcv/wpt-tools,kaixinjxq/wpt-tools,UprootStaging/wpt-tools,UprootStaging/wpt-tools,wpt-on-tv-tf/wpt-tools,kaixinjxq/wpt-tools,vivliostyle/wpt-tools,UprootStaging/wpt-tools,frewsxcv/wpt-tools,vivliostyle/wpt-tools,kaixinjxq/wpt-tools,vivliostyle/wpt-tools
import json import os import sys here = os.path.abspath(os.path.split(__file__)[0]) root = os.path.abspath(os.path.join(here, "..", "..")) sys.path.insert(0, os.path.abspath(os.path.join(here, "..", "scripts"))) import manifest def main(request, response): - manifest_path = os.path.join(root, "MANIFEST.json") + path = os.path.join(root, "MANIFEST.json") - manifest.update_manifest(root, **{"rebuild": False, - "local_changes": True, - "path": manifest_path}) + manifest_file = manifest.load(path) + manifest.update(root, "/", manifest_file) + manifest.write(manifest_file, path) return [("Content-Type", "application/json")], json.dumps({"url": "/MANIFEST.json"})
Update test runner for changes in the manifest API.
## Code Before: import json import os import sys here = os.path.abspath(os.path.split(__file__)[0]) root = os.path.abspath(os.path.join(here, "..", "..")) sys.path.insert(0, os.path.abspath(os.path.join(here, "..", "scripts"))) import manifest def main(request, response): manifest_path = os.path.join(root, "MANIFEST.json") manifest.update_manifest(root, **{"rebuild": False, "local_changes": True, "path": manifest_path}) return [("Content-Type", "application/json")], json.dumps({"url": "/MANIFEST.json"}) ## Instruction: Update test runner for changes in the manifest API. ## Code After: import json import os import sys here = os.path.abspath(os.path.split(__file__)[0]) root = os.path.abspath(os.path.join(here, "..", "..")) sys.path.insert(0, os.path.abspath(os.path.join(here, "..", "scripts"))) import manifest def main(request, response): path = os.path.join(root, "MANIFEST.json") manifest_file = manifest.load(path) manifest.update(root, "/", manifest_file) manifest.write(manifest_file, path) return [("Content-Type", "application/json")], json.dumps({"url": "/MANIFEST.json"})
import json import os import sys here = os.path.abspath(os.path.split(__file__)[0]) root = os.path.abspath(os.path.join(here, "..", "..")) sys.path.insert(0, os.path.abspath(os.path.join(here, "..", "scripts"))) import manifest def main(request, response): - manifest_path = os.path.join(root, "MANIFEST.json") ? --------- + path = os.path.join(root, "MANIFEST.json") - manifest.update_manifest(root, **{"rebuild": False, - "local_changes": True, - "path": manifest_path}) + manifest_file = manifest.load(path) + manifest.update(root, "/", manifest_file) + manifest.write(manifest_file, path) return [("Content-Type", "application/json")], json.dumps({"url": "/MANIFEST.json"})
a138d7126acd1418e4bec47aeecf5a96076d1acf
djangae/contrib/backup/urls.py
djangae/contrib/backup/urls.py
from django.conf.urls import include, url from . import views urlpatterns = ( url( '^create-datastore-backup$', views.create_datastore_backup, name="create_datastore_backup" ), )
from django.conf.urls import url from . import views urlpatterns = ( url( '^create-datastore-backup/?$', views.create_datastore_backup, name="create_datastore_backup" ), )
Fix the backup url to match the docs (and retain backwards compatibility)
Fix the backup url to match the docs (and retain backwards compatibility)
Python
bsd-3-clause
potatolondon/djangae,potatolondon/djangae
- from django.conf.urls import include, url + from django.conf.urls import url from . import views urlpatterns = ( url( - '^create-datastore-backup$', + '^create-datastore-backup/?$', views.create_datastore_backup, name="create_datastore_backup" ), )
Fix the backup url to match the docs (and retain backwards compatibility)
## Code Before: from django.conf.urls import include, url from . import views urlpatterns = ( url( '^create-datastore-backup$', views.create_datastore_backup, name="create_datastore_backup" ), ) ## Instruction: Fix the backup url to match the docs (and retain backwards compatibility) ## Code After: from django.conf.urls import url from . import views urlpatterns = ( url( '^create-datastore-backup/?$', views.create_datastore_backup, name="create_datastore_backup" ), )
- from django.conf.urls import include, url ? --------- + from django.conf.urls import url from . import views urlpatterns = ( url( - '^create-datastore-backup$', + '^create-datastore-backup/?$', ? ++ views.create_datastore_backup, name="create_datastore_backup" ), )
9f5418e5b755232e12ea18e85b131dbd04c74587
benchmarks_sphere/paper_jrn_parco_rexi_nonlinear/postprocessing_pickle.py
benchmarks_sphere/paper_jrn_parco_rexi_nonlinear/postprocessing_pickle.py
import sys import math import glob from sweet.postprocessing.pickle_SphereDataPhysicalDiff import * p = pickle_SphereDataPhysicalDiff("_t00000000120.00000000.csv")
import sys import math import glob from sweet.postprocessing.pickle_SphereDataPhysicalDiff import * from mule.exec_program import * # Ugly hack! #output, retval = exec_program('ls *benchref*/*prog_h* | sort | tail -n 1 | sed "s/.*prog_h//"') #if retval != 0: # print(output) # raise Exception("Something went wrong") #output = output.replace("\n", '') #output = output.replace("\r", '') #p = pickle_SphereDataPhysicalDiff(output) p = pickle_SphereDataPhysicalDiff()
Make postprocess pickling generic to various reference files
Make postprocess pickling generic to various reference files
Python
mit
schreiberx/sweet,schreiberx/sweet,schreiberx/sweet,schreiberx/sweet
import sys import math import glob from sweet.postprocessing.pickle_SphereDataPhysicalDiff import * + from mule.exec_program import * - p = pickle_SphereDataPhysicalDiff("_t00000000120.00000000.csv") + # Ugly hack! + #output, retval = exec_program('ls *benchref*/*prog_h* | sort | tail -n 1 | sed "s/.*prog_h//"') + #if retval != 0: + # print(output) + # raise Exception("Something went wrong") + #output = output.replace("\n", '') + #output = output.replace("\r", '') + #p = pickle_SphereDataPhysicalDiff(output) + p = pickle_SphereDataPhysicalDiff() +
Make postprocess pickling generic to various reference files
## Code Before: import sys import math import glob from sweet.postprocessing.pickle_SphereDataPhysicalDiff import * p = pickle_SphereDataPhysicalDiff("_t00000000120.00000000.csv") ## Instruction: Make postprocess pickling generic to various reference files ## Code After: import sys import math import glob from sweet.postprocessing.pickle_SphereDataPhysicalDiff import * from mule.exec_program import * # Ugly hack! #output, retval = exec_program('ls *benchref*/*prog_h* | sort | tail -n 1 | sed "s/.*prog_h//"') #if retval != 0: # print(output) # raise Exception("Something went wrong") #output = output.replace("\n", '') #output = output.replace("\r", '') #p = pickle_SphereDataPhysicalDiff(output) p = pickle_SphereDataPhysicalDiff()
import sys import math import glob from sweet.postprocessing.pickle_SphereDataPhysicalDiff import * + from mule.exec_program import * - p = pickle_SphereDataPhysicalDiff("_t00000000120.00000000.csv") + # Ugly hack! + #output, retval = exec_program('ls *benchref*/*prog_h* | sort | tail -n 1 | sed "s/.*prog_h//"') + #if retval != 0: + # print(output) + # raise Exception("Something went wrong") + #output = output.replace("\n", '') + #output = output.replace("\r", '') + + #p = pickle_SphereDataPhysicalDiff(output) + p = pickle_SphereDataPhysicalDiff()
348c28bacececb787ab73c9716dc515d0fabbe4b
armstrong/hatband/widgets/visualsearch.py
armstrong/hatband/widgets/visualsearch.py
from django.forms import Widget from django.template.loader import render_to_string from ..utils import static_url class GenericKeyWidget(Widget): template = "admin/hatband/widgets/generickey.html" class Media: js = (static_url("visualsearch/dependencies.js"), static_url("visualsearch/visualsearch.js"), static_url("generickey.js"), ) css = { "all": (static_url("visualsearch/visualsearch.css"), static_url("hatband/css/generickey.css"), ) } def __init__(self, object_id_name="object_id", content_type_name="content_type", *args, **kwargs): super(GenericKeyWidget, self).__init__(*args, **kwargs) self.object_id_name = object_id_name self.content_type_name = content_type_name def render(self, name, value, attrs=None): if value is None: value = '' final_attrs = self.build_attrs(attrs, name=name) final_attrs["value"] = value final_attrs["is_templated"] = final_attrs["id"].find("__prefix__") > -1 final_attrs["object_id_name"] = self.object_id_name final_attrs["content_type_name"] = self.content_type_name return render_to_string(self.template, final_attrs)
from django.forms import Widget from django.template.loader import render_to_string from ..utils import static_url class GenericKeyWidget(Widget): template = "admin/hatband/widgets/generickey.html" class Media: js = (static_url("visualsearch/dependencies.js"), static_url("visualsearch/visualsearch.js"), static_url("generickey.js"), ) css = { "all": (static_url("visualsearch/visualsearch.css"), static_url("hatband/css/generickey.css"), ) } def __init__(self, object_id_name="object_id", content_type_name="content_type", *args, **kwargs): super(GenericKeyWidget, self).__init__(*args, **kwargs) self.object_id_name = object_id_name self.content_type_name = content_type_name def render(self, name, value, attrs=None): if value is None: value = '' final_attrs = self.build_attrs(attrs, name=name) final_attrs.update({ "value": value, "is_templated": final_attrs["id"].find("__prefix__") > -1, "object_id_name": self.object_id_name, "content_type_name": self.content_type_name, }) return render_to_string(self.template, final_attrs)
Clean up this code a bit (no functional change)
Clean up this code a bit (no functional change)
Python
apache-2.0
armstrong/armstrong.hatband,texastribune/armstrong.hatband,armstrong/armstrong.hatband,armstrong/armstrong.hatband,texastribune/armstrong.hatband,texastribune/armstrong.hatband
from django.forms import Widget from django.template.loader import render_to_string from ..utils import static_url class GenericKeyWidget(Widget): template = "admin/hatband/widgets/generickey.html" class Media: js = (static_url("visualsearch/dependencies.js"), static_url("visualsearch/visualsearch.js"), static_url("generickey.js"), ) css = { "all": (static_url("visualsearch/visualsearch.css"), static_url("hatband/css/generickey.css"), ) } def __init__(self, object_id_name="object_id", content_type_name="content_type", *args, **kwargs): super(GenericKeyWidget, self).__init__(*args, **kwargs) self.object_id_name = object_id_name self.content_type_name = content_type_name def render(self, name, value, attrs=None): if value is None: value = '' final_attrs = self.build_attrs(attrs, name=name) - final_attrs["value"] = value + final_attrs.update({ + "value": value, - final_attrs["is_templated"] = final_attrs["id"].find("__prefix__") > -1 + "is_templated": final_attrs["id"].find("__prefix__") > -1, - final_attrs["object_id_name"] = self.object_id_name + "object_id_name": self.object_id_name, - final_attrs["content_type_name"] = self.content_type_name + "content_type_name": self.content_type_name, + }) return render_to_string(self.template, final_attrs)
Clean up this code a bit (no functional change)
## Code Before: from django.forms import Widget from django.template.loader import render_to_string from ..utils import static_url class GenericKeyWidget(Widget): template = "admin/hatband/widgets/generickey.html" class Media: js = (static_url("visualsearch/dependencies.js"), static_url("visualsearch/visualsearch.js"), static_url("generickey.js"), ) css = { "all": (static_url("visualsearch/visualsearch.css"), static_url("hatband/css/generickey.css"), ) } def __init__(self, object_id_name="object_id", content_type_name="content_type", *args, **kwargs): super(GenericKeyWidget, self).__init__(*args, **kwargs) self.object_id_name = object_id_name self.content_type_name = content_type_name def render(self, name, value, attrs=None): if value is None: value = '' final_attrs = self.build_attrs(attrs, name=name) final_attrs["value"] = value final_attrs["is_templated"] = final_attrs["id"].find("__prefix__") > -1 final_attrs["object_id_name"] = self.object_id_name final_attrs["content_type_name"] = self.content_type_name return render_to_string(self.template, final_attrs) ## Instruction: Clean up this code a bit (no functional change) ## Code After: from django.forms import Widget from django.template.loader import render_to_string from ..utils import static_url class GenericKeyWidget(Widget): template = "admin/hatband/widgets/generickey.html" class Media: js = (static_url("visualsearch/dependencies.js"), static_url("visualsearch/visualsearch.js"), static_url("generickey.js"), ) css = { "all": (static_url("visualsearch/visualsearch.css"), static_url("hatband/css/generickey.css"), ) } def __init__(self, object_id_name="object_id", content_type_name="content_type", *args, **kwargs): super(GenericKeyWidget, self).__init__(*args, **kwargs) self.object_id_name = object_id_name self.content_type_name = content_type_name def render(self, name, value, attrs=None): if value is None: value = '' final_attrs = self.build_attrs(attrs, name=name) final_attrs.update({ "value": value, "is_templated": final_attrs["id"].find("__prefix__") > -1, "object_id_name": self.object_id_name, "content_type_name": self.content_type_name, }) return render_to_string(self.template, final_attrs)
from django.forms import Widget from django.template.loader import render_to_string from ..utils import static_url class GenericKeyWidget(Widget): template = "admin/hatband/widgets/generickey.html" class Media: js = (static_url("visualsearch/dependencies.js"), static_url("visualsearch/visualsearch.js"), static_url("generickey.js"), ) css = { "all": (static_url("visualsearch/visualsearch.css"), static_url("hatband/css/generickey.css"), ) } def __init__(self, object_id_name="object_id", content_type_name="content_type", *args, **kwargs): super(GenericKeyWidget, self).__init__(*args, **kwargs) self.object_id_name = object_id_name self.content_type_name = content_type_name def render(self, name, value, attrs=None): if value is None: value = '' final_attrs = self.build_attrs(attrs, name=name) - final_attrs["value"] = value + final_attrs.update({ + "value": value, - final_attrs["is_templated"] = final_attrs["id"].find("__prefix__") > -1 ? ^^^^^^^^^^^^ ^^^ + "is_templated": final_attrs["id"].find("__prefix__") > -1, ? ^^^^ ^ + - final_attrs["object_id_name"] = self.object_id_name ? ^^^^^^^^^^^^ ^^^ + "object_id_name": self.object_id_name, ? ^^^^ ^ + - final_attrs["content_type_name"] = self.content_type_name ? ^^^^^^^^^^^^ ^^^ + "content_type_name": self.content_type_name, ? ^^^^ ^ + + }) return render_to_string(self.template, final_attrs)
7fa490cb598aca2848ce886dfc45bb8606f07e58
backend/geonature/core/gn_profiles/models.py
backend/geonature/core/gn_profiles/models.py
from geonature.utils.env import DB from utils_flask_sqla.serializers import serializable @serializable class VmCorTaxonPhenology(DB.Model): __tablename__ = "vm_cor_taxon_phenology" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) period = DB.Column(DB.Integer) id_nomenclature_life_stage = DB.Column(DB.Integer) id_altitude_range = DB.Column(DB.Integer) count_valid_data = DB.Column(DB.Integer)
from flask import current_app from geoalchemy2 import Geometry from utils_flask_sqla.serializers import serializable from utils_flask_sqla_geo.serializers import geoserializable from geonature.utils.env import DB @serializable class VmCorTaxonPhenology(DB.Model): __tablename__ = "vm_cor_taxon_phenology" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) period = DB.Column(DB.Integer) id_nomenclature_life_stage = DB.Column(DB.Integer) id_altitude_range = DB.Column(DB.Integer) count_valid_data = DB.Column(DB.Integer) @serializable @geoserializable class VmValidProfiles(DB.Model): __tablename__ = "vm_valid_profiles" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) valid_distribution = DB.Column(Geometry("GEOMETRY", current_app.config["LOCAL_SRID"])) altitude_min = DB.Column(DB.Integer) altitude_max = DB.Column(DB.Integer) first_valid_data = DB.Column(DB.DateTime) last_valid_data = DB.Column(DB.DateTime) count_valid_data = DB.Column(DB.Integer)
Add VM valid profile model
Add VM valid profile model
Python
bsd-2-clause
PnEcrins/GeoNature,PnEcrins/GeoNature,PnEcrins/GeoNature,PnEcrins/GeoNature
+ from flask import current_app + from geoalchemy2 import Geometry + + from utils_flask_sqla.serializers import serializable + from utils_flask_sqla_geo.serializers import geoserializable + from geonature.utils.env import DB - from utils_flask_sqla.serializers import serializable @serializable class VmCorTaxonPhenology(DB.Model): __tablename__ = "vm_cor_taxon_phenology" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) period = DB.Column(DB.Integer) id_nomenclature_life_stage = DB.Column(DB.Integer) id_altitude_range = DB.Column(DB.Integer) count_valid_data = DB.Column(DB.Integer) + + @serializable + @geoserializable + class VmValidProfiles(DB.Model): + __tablename__ = "vm_valid_profiles" + __table_args__ = {"schema": "gn_profiles"} + cd_ref = DB.Column(DB.Integer) + valid_distribution = DB.Column(Geometry("GEOMETRY", current_app.config["LOCAL_SRID"])) + altitude_min = DB.Column(DB.Integer) + altitude_max = DB.Column(DB.Integer) + first_valid_data = DB.Column(DB.DateTime) + last_valid_data = DB.Column(DB.DateTime) + count_valid_data = DB.Column(DB.Integer)
Add VM valid profile model
## Code Before: from geonature.utils.env import DB from utils_flask_sqla.serializers import serializable @serializable class VmCorTaxonPhenology(DB.Model): __tablename__ = "vm_cor_taxon_phenology" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) period = DB.Column(DB.Integer) id_nomenclature_life_stage = DB.Column(DB.Integer) id_altitude_range = DB.Column(DB.Integer) count_valid_data = DB.Column(DB.Integer) ## Instruction: Add VM valid profile model ## Code After: from flask import current_app from geoalchemy2 import Geometry from utils_flask_sqla.serializers import serializable from utils_flask_sqla_geo.serializers import geoserializable from geonature.utils.env import DB @serializable class VmCorTaxonPhenology(DB.Model): __tablename__ = "vm_cor_taxon_phenology" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) period = DB.Column(DB.Integer) id_nomenclature_life_stage = DB.Column(DB.Integer) id_altitude_range = DB.Column(DB.Integer) count_valid_data = DB.Column(DB.Integer) @serializable @geoserializable class VmValidProfiles(DB.Model): __tablename__ = "vm_valid_profiles" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) valid_distribution = DB.Column(Geometry("GEOMETRY", current_app.config["LOCAL_SRID"])) altitude_min = DB.Column(DB.Integer) altitude_max = DB.Column(DB.Integer) first_valid_data = DB.Column(DB.DateTime) last_valid_data = DB.Column(DB.DateTime) count_valid_data = DB.Column(DB.Integer)
+ from flask import current_app + from geoalchemy2 import Geometry + + from utils_flask_sqla.serializers import serializable + from utils_flask_sqla_geo.serializers import geoserializable + from geonature.utils.env import DB - from utils_flask_sqla.serializers import serializable @serializable class VmCorTaxonPhenology(DB.Model): __tablename__ = "vm_cor_taxon_phenology" __table_args__ = {"schema": "gn_profiles"} cd_ref = DB.Column(DB.Integer) period = DB.Column(DB.Integer) id_nomenclature_life_stage = DB.Column(DB.Integer) id_altitude_range = DB.Column(DB.Integer) count_valid_data = DB.Column(DB.Integer) + + + @serializable + @geoserializable + class VmValidProfiles(DB.Model): + __tablename__ = "vm_valid_profiles" + __table_args__ = {"schema": "gn_profiles"} + cd_ref = DB.Column(DB.Integer) + valid_distribution = DB.Column(Geometry("GEOMETRY", current_app.config["LOCAL_SRID"])) + altitude_min = DB.Column(DB.Integer) + altitude_max = DB.Column(DB.Integer) + first_valid_data = DB.Column(DB.DateTime) + last_valid_data = DB.Column(DB.DateTime) + count_valid_data = DB.Column(DB.Integer)
41beca23fff6eab718550d0ce8d22769653c3109
sauce_test/test_suite.py
sauce_test/test_suite.py
import unittest import access_dvn # This is a list of testFileName.testClass def suite(): return unittest.TestSuite((\ unittest.makeSuite(access_dvn.AccessDVN), )) if __name__ == "__main__": result = unittest.TextTestRunner(verbosity=2).run(suite()) # sys.exit(not result.wasSuccessful())
import unittest import access_dvn import test_dataverse import test_dataset # This is a list of testFileName.testClass def suite(): return unittest.TestSuite((\ unittest.makeSuite(access_dvn.AccessDVN), unittest.makeSuite(test_dataverse.TestDataverseFunctions), unittest.makeSuite(test_dataset.TestDatasetFunctions), )) if __name__ == "__main__": result = unittest.TextTestRunner(verbosity=2).run(suite()) # sys.exit(not result.wasSuccessful())
Update test suite to include dataverse and dataset tests.
Update test suite to include dataverse and dataset tests.
Python
apache-2.0
ekoi/DANS-DVN-4.6.1,ekoi/DANS-DVN-4.6.1,quarian/dataverse,leeper/dataverse-1,leeper/dataverse-1,bmckinney/dataverse-canonical,bmckinney/dataverse-canonical,JayanthyChengan/dataverse,quarian/dataverse,quarian/dataverse,leeper/dataverse-1,leeper/dataverse-1,JayanthyChengan/dataverse,JayanthyChengan/dataverse,quarian/dataverse,majorseitan/dataverse,bmckinney/dataverse-canonical,JayanthyChengan/dataverse,ekoi/DANS-DVN-4.6.1,quarian/dataverse,majorseitan/dataverse,JayanthyChengan/dataverse,quarian/dataverse,majorseitan/dataverse,jacksonokuhn/dataverse,majorseitan/dataverse,ekoi/DANS-DVN-4.6.1,JayanthyChengan/dataverse,jacksonokuhn/dataverse,jacksonokuhn/dataverse,quarian/dataverse,ekoi/DANS-DVN-4.6.1,majorseitan/dataverse,jacksonokuhn/dataverse,leeper/dataverse-1,leeper/dataverse-1,bmckinney/dataverse-canonical,ekoi/DANS-DVN-4.6.1,jacksonokuhn/dataverse,jacksonokuhn/dataverse,JayanthyChengan/dataverse,bmckinney/dataverse-canonical,leeper/dataverse-1,majorseitan/dataverse,JayanthyChengan/dataverse,leeper/dataverse-1,majorseitan/dataverse,ekoi/DANS-DVN-4.6.1,bmckinney/dataverse-canonical,jacksonokuhn/dataverse,majorseitan/dataverse,bmckinney/dataverse-canonical,jacksonokuhn/dataverse,bmckinney/dataverse-canonical,ekoi/DANS-DVN-4.6.1,quarian/dataverse
import unittest import access_dvn + import test_dataverse + import test_dataset # This is a list of testFileName.testClass def suite(): return unittest.TestSuite((\ unittest.makeSuite(access_dvn.AccessDVN), + unittest.makeSuite(test_dataverse.TestDataverseFunctions), + unittest.makeSuite(test_dataset.TestDatasetFunctions), )) if __name__ == "__main__": result = unittest.TextTestRunner(verbosity=2).run(suite()) # sys.exit(not result.wasSuccessful())
Update test suite to include dataverse and dataset tests.
## Code Before: import unittest import access_dvn # This is a list of testFileName.testClass def suite(): return unittest.TestSuite((\ unittest.makeSuite(access_dvn.AccessDVN), )) if __name__ == "__main__": result = unittest.TextTestRunner(verbosity=2).run(suite()) # sys.exit(not result.wasSuccessful()) ## Instruction: Update test suite to include dataverse and dataset tests. ## Code After: import unittest import access_dvn import test_dataverse import test_dataset # This is a list of testFileName.testClass def suite(): return unittest.TestSuite((\ unittest.makeSuite(access_dvn.AccessDVN), unittest.makeSuite(test_dataverse.TestDataverseFunctions), unittest.makeSuite(test_dataset.TestDatasetFunctions), )) if __name__ == "__main__": result = unittest.TextTestRunner(verbosity=2).run(suite()) # sys.exit(not result.wasSuccessful())
import unittest import access_dvn + import test_dataverse + import test_dataset # This is a list of testFileName.testClass def suite(): return unittest.TestSuite((\ unittest.makeSuite(access_dvn.AccessDVN), + unittest.makeSuite(test_dataverse.TestDataverseFunctions), + unittest.makeSuite(test_dataset.TestDatasetFunctions), )) if __name__ == "__main__": result = unittest.TextTestRunner(verbosity=2).run(suite()) # sys.exit(not result.wasSuccessful())
41ec266722eefb01b7e884696c7825bd5273e4ca
tests/test_diff.py
tests/test_diff.py
from livemark.diff import _is_same_node, _next_noempty from wdom.tests.util import TestCase from wdom.parser import parse_html class TestSameNode(TestCase): def test_same_node(self): node1_src = '<h1>A</h1>' node1 = parse_html(node1_src) node2 = parse_html(node1_src) self.assertTrue(_is_same_node(node1.firstChild, node2.firstChild))
from livemark.diff import _is_same_node, _next_noempty from wdom.tests.util import TestCase from wdom.parser import parse_html class TestSameNode(TestCase): def setUp(self): self.src1 = '<h1>text1</h1>' self.src2 = '<h1>text2</h1>' self.src3 = '<h2>text1</h2>' self.text1 = 'text1' self.text2 = 'text2' self.node1 = parse_html(self.src1).firstChild self.node2 = parse_html(self.src2).firstChild self.node3 = parse_html(self.src3).firstChild self.t_node1 = parse_html(self.text1).firstChild self.t_node2 = parse_html(self.text2).firstChild def test_same_node(self): node1 = parse_html(self.src1).firstChild node2 = parse_html(self.src1).firstChild self.assertTrue(_is_same_node(node1, node2)) def test_different_text(self): self.assertFalse(_is_same_node(self.node1, self.node2)) def test_different_tag(self): self.assertFalse(_is_same_node(self.node1, self.node3)) def test_same_text(self): node1 = parse_html(self.text1).firstChild node2 = parse_html(self.text1).firstChild self.assertTrue(_is_same_node(node1, node2)) def test_different_text_node(self): self.assertFalse(_is_same_node(self.t_node1, self.t_node2)) def test_different_tag_text(self): self.assertFalse(_is_same_node(self.node1, self.t_node1)) self.assertFalse(_is_same_node(self.node2, self.t_node2)) self.assertFalse(_is_same_node(self.node3, self.t_node1))
Add test for same node check
Add test for same node check
Python
mit
miyakogi/livemark
from livemark.diff import _is_same_node, _next_noempty from wdom.tests.util import TestCase from wdom.parser import parse_html class TestSameNode(TestCase): + def setUp(self): + self.src1 = '<h1>text1</h1>' + self.src2 = '<h1>text2</h1>' + self.src3 = '<h2>text1</h2>' + self.text1 = 'text1' + self.text2 = 'text2' + self.node1 = parse_html(self.src1).firstChild + self.node2 = parse_html(self.src2).firstChild + self.node3 = parse_html(self.src3).firstChild + self.t_node1 = parse_html(self.text1).firstChild + self.t_node2 = parse_html(self.text2).firstChild + def test_same_node(self): - node1_src = '<h1>A</h1>' - node1 = parse_html(node1_src) + node1 = parse_html(self.src1).firstChild - node2 = parse_html(node1_src) + node2 = parse_html(self.src1).firstChild - self.assertTrue(_is_same_node(node1.firstChild, node2.firstChild)) + self.assertTrue(_is_same_node(node1, node2)) + def test_different_text(self): + self.assertFalse(_is_same_node(self.node1, self.node2)) + + def test_different_tag(self): + self.assertFalse(_is_same_node(self.node1, self.node3)) + + def test_same_text(self): + node1 = parse_html(self.text1).firstChild + node2 = parse_html(self.text1).firstChild + self.assertTrue(_is_same_node(node1, node2)) + + def test_different_text_node(self): + self.assertFalse(_is_same_node(self.t_node1, self.t_node2)) + + def test_different_tag_text(self): + self.assertFalse(_is_same_node(self.node1, self.t_node1)) + self.assertFalse(_is_same_node(self.node2, self.t_node2)) + self.assertFalse(_is_same_node(self.node3, self.t_node1)) +
Add test for same node check
## Code Before: from livemark.diff import _is_same_node, _next_noempty from wdom.tests.util import TestCase from wdom.parser import parse_html class TestSameNode(TestCase): def test_same_node(self): node1_src = '<h1>A</h1>' node1 = parse_html(node1_src) node2 = parse_html(node1_src) self.assertTrue(_is_same_node(node1.firstChild, node2.firstChild)) ## Instruction: Add test for same node check ## Code After: from livemark.diff import _is_same_node, _next_noempty from wdom.tests.util import TestCase from wdom.parser import parse_html class TestSameNode(TestCase): def setUp(self): self.src1 = '<h1>text1</h1>' self.src2 = '<h1>text2</h1>' self.src3 = '<h2>text1</h2>' self.text1 = 'text1' self.text2 = 'text2' self.node1 = parse_html(self.src1).firstChild self.node2 = parse_html(self.src2).firstChild self.node3 = parse_html(self.src3).firstChild self.t_node1 = parse_html(self.text1).firstChild self.t_node2 = parse_html(self.text2).firstChild def test_same_node(self): node1 = parse_html(self.src1).firstChild node2 = parse_html(self.src1).firstChild self.assertTrue(_is_same_node(node1, node2)) def test_different_text(self): self.assertFalse(_is_same_node(self.node1, self.node2)) def test_different_tag(self): self.assertFalse(_is_same_node(self.node1, self.node3)) def test_same_text(self): node1 = parse_html(self.text1).firstChild node2 = parse_html(self.text1).firstChild self.assertTrue(_is_same_node(node1, node2)) def test_different_text_node(self): self.assertFalse(_is_same_node(self.t_node1, self.t_node2)) def test_different_tag_text(self): self.assertFalse(_is_same_node(self.node1, self.t_node1)) self.assertFalse(_is_same_node(self.node2, self.t_node2)) self.assertFalse(_is_same_node(self.node3, self.t_node1))
from livemark.diff import _is_same_node, _next_noempty from wdom.tests.util import TestCase from wdom.parser import parse_html class TestSameNode(TestCase): + def setUp(self): + self.src1 = '<h1>text1</h1>' + self.src2 = '<h1>text2</h1>' + self.src3 = '<h2>text1</h2>' + self.text1 = 'text1' + self.text2 = 'text2' + self.node1 = parse_html(self.src1).firstChild + self.node2 = parse_html(self.src2).firstChild + self.node3 = parse_html(self.src3).firstChild + self.t_node1 = parse_html(self.text1).firstChild + self.t_node2 = parse_html(self.text2).firstChild + def test_same_node(self): - node1_src = '<h1>A</h1>' - node1 = parse_html(node1_src) ? ^^^ ^^ + node1 = parse_html(self.src1).firstChild ? ^ ^^^ + +++++++++++ - node2 = parse_html(node1_src) ? ^^^ ^^ + node2 = parse_html(self.src1).firstChild ? ^ ^^^ + +++++++++++ - self.assertTrue(_is_same_node(node1.firstChild, node2.firstChild)) ? ----------- ----------- + self.assertTrue(_is_same_node(node1, node2)) + + def test_different_text(self): + self.assertFalse(_is_same_node(self.node1, self.node2)) + + def test_different_tag(self): + self.assertFalse(_is_same_node(self.node1, self.node3)) + + def test_same_text(self): + node1 = parse_html(self.text1).firstChild + node2 = parse_html(self.text1).firstChild + self.assertTrue(_is_same_node(node1, node2)) + + def test_different_text_node(self): + self.assertFalse(_is_same_node(self.t_node1, self.t_node2)) + + def test_different_tag_text(self): + self.assertFalse(_is_same_node(self.node1, self.t_node1)) + self.assertFalse(_is_same_node(self.node2, self.t_node2)) + self.assertFalse(_is_same_node(self.node3, self.t_node1))
cc19d0af1c22c9677960f406ced425aa48da54c1
src/sentry/migrations/0063_remove_bad_groupedmessage_index.py
src/sentry/migrations/0063_remove_bad_groupedmessage_index.py
import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Removing unique constraint on 'GroupedMessage', fields ['logger', 'view', 'checksum'] try: db.delete_unique('sentry_groupedmessage', ['logger', 'view', 'checksum']) except Exception: db.rollback_transaction() def backwards(self, orm): # Adding unique constraint on 'GroupedMessage', fields ['logger', 'view', 'checksum'] db.create_unique('sentry_groupedmessage', ['logger', 'view', 'checksum'])
import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Removing unique constraint on 'GroupedMessage', fields ['logger', 'view', 'checksum'] db.delete_unique('sentry_groupedmessage', ['logger', 'view', 'checksum']) def backwards(self, orm): # Adding unique constraint on 'GroupedMessage', fields ['logger', 'view', 'checksum'] db.create_unique('sentry_groupedmessage', ['logger', 'view', 'checksum'])
Revert "Dont error if 0063 index was already cleaned up"
Revert "Dont error if 0063 index was already cleaned up" This reverts commit b3a51fa482fc949de75d962ddd9fe3464fa70e58.
Python
bsd-3-clause
felixbuenemann/sentry,JackDanger/sentry,zenefits/sentry,korealerts1/sentry,fuziontech/sentry,daevaorn/sentry,argonemyth/sentry,beeftornado/sentry,vperron/sentry,mvaled/sentry,rdio/sentry,gg7/sentry,hongliang5623/sentry,felixbuenemann/sentry,jokey2k/sentry,pauloschilling/sentry,beni55/sentry,rdio/sentry,BayanGroup/sentry,ngonzalvez/sentry,gencer/sentry,mitsuhiko/sentry,Natim/sentry,gg7/sentry,boneyao/sentry,argonemyth/sentry,NickPresta/sentry,llonchj/sentry,SilentCircle/sentry,looker/sentry,Kryz/sentry,ngonzalvez/sentry,daevaorn/sentry,jokey2k/sentry,alexm92/sentry,BuildingLink/sentry,JamesMura/sentry,pauloschilling/sentry,drcapulet/sentry,beni55/sentry,camilonova/sentry,looker/sentry,vperron/sentry,JamesMura/sentry,mvaled/sentry,songyi199111/sentry,fotinakis/sentry,1tush/sentry,gencer/sentry,jean/sentry,wujuguang/sentry,ifduyue/sentry,JTCunning/sentry,fuziontech/sentry,vperron/sentry,mvaled/sentry,fuziontech/sentry,beeftornado/sentry,Kryz/sentry,camilonova/sentry,ewdurbin/sentry,kevinastone/sentry,llonchj/sentry,gencer/sentry,nicholasserra/sentry,jean/sentry,TedaLIEz/sentry,drcapulet/sentry,mvaled/sentry,looker/sentry,imankulov/sentry,gg7/sentry,NickPresta/sentry,songyi199111/sentry,mvaled/sentry,SilentCircle/sentry,JamesMura/sentry,BuildingLink/sentry,kevinlondon/sentry,Natim/sentry,hongliang5623/sentry,wujuguang/sentry,felixbuenemann/sentry,kevinastone/sentry,gencer/sentry,daevaorn/sentry,JamesMura/sentry,korealerts1/sentry,korealerts1/sentry,alexm92/sentry,BuildingLink/sentry,ifduyue/sentry,looker/sentry,beeftornado/sentry,JackDanger/sentry,BayanGroup/sentry,gencer/sentry,fotinakis/sentry,songyi199111/sentry,beni55/sentry,nicholasserra/sentry,Natim/sentry,zenefits/sentry,hongliang5623/sentry,1tush/sentry,argonemyth/sentry,llonchj/sentry,alexm92/sentry,zenefits/sentry,JackDanger/sentry,ifduyue/sentry,1tush/sentry,boneyao/sentry,JamesMura/sentry,SilentCircle/sentry,pauloschilling/sentry,ewdurbin/sentry,zenefits/sentry,mvaled/sentry,imankulov/sentry,mitsuhiko/sentry,Kryz/sentry,looker/sentry,kevinastone/sentry,kevinlondon/sentry,zenefits/sentry,ifduyue/sentry,NickPresta/sentry,wong2/sentry,ngonzalvez/sentry,rdio/sentry,fotinakis/sentry,jokey2k/sentry,SilentCircle/sentry,TedaLIEz/sentry,fotinakis/sentry,nicholasserra/sentry,daevaorn/sentry,TedaLIEz/sentry,camilonova/sentry,wong2/sentry,BuildingLink/sentry,jean/sentry,JTCunning/sentry,imankulov/sentry,jean/sentry,ifduyue/sentry,JTCunning/sentry,NickPresta/sentry,kevinlondon/sentry,drcapulet/sentry,rdio/sentry,wujuguang/sentry,jean/sentry,BayanGroup/sentry,boneyao/sentry,ewdurbin/sentry,wong2/sentry,BuildingLink/sentry
import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Removing unique constraint on 'GroupedMessage', fields ['logger', 'view', 'checksum'] - try: - db.delete_unique('sentry_groupedmessage', ['logger', 'view', 'checksum']) + db.delete_unique('sentry_groupedmessage', ['logger', 'view', 'checksum']) - except Exception: - db.rollback_transaction() - def backwards(self, orm): # Adding unique constraint on 'GroupedMessage', fields ['logger', 'view', 'checksum'] db.create_unique('sentry_groupedmessage', ['logger', 'view', 'checksum'])
Revert "Dont error if 0063 index was already cleaned up"
## Code Before: import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Removing unique constraint on 'GroupedMessage', fields ['logger', 'view', 'checksum'] try: db.delete_unique('sentry_groupedmessage', ['logger', 'view', 'checksum']) except Exception: db.rollback_transaction() def backwards(self, orm): # Adding unique constraint on 'GroupedMessage', fields ['logger', 'view', 'checksum'] db.create_unique('sentry_groupedmessage', ['logger', 'view', 'checksum']) ## Instruction: Revert "Dont error if 0063 index was already cleaned up" ## Code After: import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Removing unique constraint on 'GroupedMessage', fields ['logger', 'view', 'checksum'] db.delete_unique('sentry_groupedmessage', ['logger', 'view', 'checksum']) def backwards(self, orm): # Adding unique constraint on 'GroupedMessage', fields ['logger', 'view', 'checksum'] db.create_unique('sentry_groupedmessage', ['logger', 'view', 'checksum'])
import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Removing unique constraint on 'GroupedMessage', fields ['logger', 'view', 'checksum'] - try: - db.delete_unique('sentry_groupedmessage', ['logger', 'view', 'checksum']) ? ---- + db.delete_unique('sentry_groupedmessage', ['logger', 'view', 'checksum']) - except Exception: - db.rollback_transaction() - def backwards(self, orm): # Adding unique constraint on 'GroupedMessage', fields ['logger', 'view', 'checksum'] db.create_unique('sentry_groupedmessage', ['logger', 'view', 'checksum'])
cd006f8d3885005e867255e63819fc8a5c7430bf
redactor/TextEditor.py
redactor/TextEditor.py
from tkinter import * class TextEditor(): def __init__(self): self.root = Tk() self.root.wm_title("BrickText") self.text_panel = Text(self.root) self.text_panel.pack(fill=BOTH, expand=YES) def start(self): self.root.mainloop() def get_root(self): return self.root def get_text_panel(self): return self.text_panel
from tkinter import * class TextEditor(): def __init__(self): self.root = Tk() self.root.wm_title("BrickText") self.text_panel = Text(self.root) self.text_panel.pack(fill=BOTH, expand=YES) def start(self): self.root.mainloop() def get_root(self): return self.root def get_text_widget(self): return self.editor def get_text_panel(self): return self.text_panel
Add getter for text widget
Add getter for text widget
Python
mit
BrickText/BrickText
from tkinter import * class TextEditor(): def __init__(self): self.root = Tk() self.root.wm_title("BrickText") self.text_panel = Text(self.root) self.text_panel.pack(fill=BOTH, expand=YES) def start(self): self.root.mainloop() def get_root(self): return self.root + + def get_text_widget(self): + return self.editor def get_text_panel(self): return self.text_panel
Add getter for text widget
## Code Before: from tkinter import * class TextEditor(): def __init__(self): self.root = Tk() self.root.wm_title("BrickText") self.text_panel = Text(self.root) self.text_panel.pack(fill=BOTH, expand=YES) def start(self): self.root.mainloop() def get_root(self): return self.root def get_text_panel(self): return self.text_panel ## Instruction: Add getter for text widget ## Code After: from tkinter import * class TextEditor(): def __init__(self): self.root = Tk() self.root.wm_title("BrickText") self.text_panel = Text(self.root) self.text_panel.pack(fill=BOTH, expand=YES) def start(self): self.root.mainloop() def get_root(self): return self.root def get_text_widget(self): return self.editor def get_text_panel(self): return self.text_panel
from tkinter import * class TextEditor(): def __init__(self): self.root = Tk() self.root.wm_title("BrickText") self.text_panel = Text(self.root) self.text_panel.pack(fill=BOTH, expand=YES) def start(self): self.root.mainloop() def get_root(self): return self.root + + def get_text_widget(self): + return self.editor def get_text_panel(self): return self.text_panel
1fce6a621ad4fe149988147478e15c7415295a7b
changes/api/serializer/models/source.py
changes/api/serializer/models/source.py
from changes.api.serializer import Serializer, register from changes.models import Source @register(Source) class SourceSerializer(Serializer): def serialize(self, instance, attrs): if instance.patch_id: patch = { 'id': instance.patch_id.hex, } else: patch = None return { 'id': instance.id.hex, 'patch': patch, 'revision': instance.revision, 'dateCreated': instance.date_created, }
from changes.api.serializer import Serializer, register from changes.models import Source @register(Source) class SourceSerializer(Serializer): def serialize(self, instance, attrs): if instance.patch_id: patch = { 'id': instance.patch_id.hex, } else: patch = None return { 'id': instance.id.hex, 'patch': patch, 'revision': instance.revision, 'dateCreated': instance.date_created, 'tails_data': dict(instance.data), }
Add data to Source serialization
Add data to Source serialization
Python
apache-2.0
dropbox/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,wfxiang08/changes
from changes.api.serializer import Serializer, register from changes.models import Source @register(Source) class SourceSerializer(Serializer): def serialize(self, instance, attrs): if instance.patch_id: patch = { 'id': instance.patch_id.hex, } else: patch = None return { 'id': instance.id.hex, 'patch': patch, 'revision': instance.revision, 'dateCreated': instance.date_created, + 'tails_data': dict(instance.data), }
Add data to Source serialization
## Code Before: from changes.api.serializer import Serializer, register from changes.models import Source @register(Source) class SourceSerializer(Serializer): def serialize(self, instance, attrs): if instance.patch_id: patch = { 'id': instance.patch_id.hex, } else: patch = None return { 'id': instance.id.hex, 'patch': patch, 'revision': instance.revision, 'dateCreated': instance.date_created, } ## Instruction: Add data to Source serialization ## Code After: from changes.api.serializer import Serializer, register from changes.models import Source @register(Source) class SourceSerializer(Serializer): def serialize(self, instance, attrs): if instance.patch_id: patch = { 'id': instance.patch_id.hex, } else: patch = None return { 'id': instance.id.hex, 'patch': patch, 'revision': instance.revision, 'dateCreated': instance.date_created, 'tails_data': dict(instance.data), }
from changes.api.serializer import Serializer, register from changes.models import Source @register(Source) class SourceSerializer(Serializer): def serialize(self, instance, attrs): if instance.patch_id: patch = { 'id': instance.patch_id.hex, } else: patch = None return { 'id': instance.id.hex, 'patch': patch, 'revision': instance.revision, 'dateCreated': instance.date_created, + 'tails_data': dict(instance.data), }
95eb2c11a4f35e594eda25c10bdf85a25b2f4392
src/ConfigLoader.py
src/ConfigLoader.py
import json import sys def load_config_file(out=sys.stdout): default_filepath = "../resources/config/default-config.json" user_filepath = "../resources/config/user-config.json" try: default_json = read_json(default_filepath) user_json = read_json(user_filepath) for property in user_json: default_json[property] = user_json[property] except FileNotFoundError as e: out.write("Cannot find file: " + e.filename) else: out.write("Read styling config JSON correctly.") return default_json def read_json(filepath): config_string = '' with open(filepath) as f: for line in f: line = line.lstrip() if not line.startswith("//"): config_string += line config_json = json.loads(config_string) return config_json if __name__ == "__main__": load_config_file()
import json import sys def load_config_file(out=sys.stdout): if sys.argv[0].endswith('nosetests'): default_filepath = "./resources/config/default-config.json" user_filepath = "./resources/config/user-config.json" else: default_filepath = "../resources/config/default-config.json" user_filepath = "../resources/config/user-config.json" try: default_json = read_json(default_filepath) user_json = read_json(user_filepath) for property in user_json: default_json[property] = user_json[property] except FileNotFoundError as e: out.write("Cannot find file: " + e.filename) else: out.write("Read styling config JSON correctly.") return default_json def read_json(filepath): config_string = '' with open(filepath) as f: for line in f: line = line.lstrip() if not line.startswith("//"): config_string += line config_json = json.loads(config_string) return config_json if __name__ == "__main__": load_config_file()
Fix nosetests for config file loading
Fix nosetests for config file loading
Python
bsd-3-clause
sky-uk/bslint
import json import sys def load_config_file(out=sys.stdout): + if sys.argv[0].endswith('nosetests'): + default_filepath = "./resources/config/default-config.json" + user_filepath = "./resources/config/user-config.json" + else: - default_filepath = "../resources/config/default-config.json" + default_filepath = "../resources/config/default-config.json" - user_filepath = "../resources/config/user-config.json" + user_filepath = "../resources/config/user-config.json" try: default_json = read_json(default_filepath) user_json = read_json(user_filepath) for property in user_json: default_json[property] = user_json[property] except FileNotFoundError as e: out.write("Cannot find file: " + e.filename) else: out.write("Read styling config JSON correctly.") return default_json def read_json(filepath): config_string = '' with open(filepath) as f: for line in f: line = line.lstrip() if not line.startswith("//"): config_string += line config_json = json.loads(config_string) return config_json if __name__ == "__main__": load_config_file()
Fix nosetests for config file loading
## Code Before: import json import sys def load_config_file(out=sys.stdout): default_filepath = "../resources/config/default-config.json" user_filepath = "../resources/config/user-config.json" try: default_json = read_json(default_filepath) user_json = read_json(user_filepath) for property in user_json: default_json[property] = user_json[property] except FileNotFoundError as e: out.write("Cannot find file: " + e.filename) else: out.write("Read styling config JSON correctly.") return default_json def read_json(filepath): config_string = '' with open(filepath) as f: for line in f: line = line.lstrip() if not line.startswith("//"): config_string += line config_json = json.loads(config_string) return config_json if __name__ == "__main__": load_config_file() ## Instruction: Fix nosetests for config file loading ## Code After: import json import sys def load_config_file(out=sys.stdout): if sys.argv[0].endswith('nosetests'): default_filepath = "./resources/config/default-config.json" user_filepath = "./resources/config/user-config.json" else: default_filepath = "../resources/config/default-config.json" user_filepath = "../resources/config/user-config.json" try: default_json = read_json(default_filepath) user_json = read_json(user_filepath) for property in user_json: default_json[property] = user_json[property] except FileNotFoundError as e: out.write("Cannot find file: " + e.filename) else: out.write("Read styling config JSON correctly.") return default_json def read_json(filepath): config_string = '' with open(filepath) as f: for line in f: line = line.lstrip() if not line.startswith("//"): config_string += line config_json = json.loads(config_string) return config_json if __name__ == "__main__": load_config_file()
import json import sys def load_config_file(out=sys.stdout): + if sys.argv[0].endswith('nosetests'): + default_filepath = "./resources/config/default-config.json" + user_filepath = "./resources/config/user-config.json" + else: - default_filepath = "../resources/config/default-config.json" + default_filepath = "../resources/config/default-config.json" ? ++++ - user_filepath = "../resources/config/user-config.json" + user_filepath = "../resources/config/user-config.json" ? ++++ try: default_json = read_json(default_filepath) user_json = read_json(user_filepath) for property in user_json: default_json[property] = user_json[property] except FileNotFoundError as e: out.write("Cannot find file: " + e.filename) else: out.write("Read styling config JSON correctly.") return default_json def read_json(filepath): config_string = '' with open(filepath) as f: for line in f: line = line.lstrip() if not line.startswith("//"): config_string += line config_json = json.loads(config_string) return config_json if __name__ == "__main__": load_config_file()
346ddf5e26351fe1fadbed1bf06482565080a728
stack.py
stack.py
'''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): pass def peek(self): return self.top.value
'''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): try: pop_item = self.top self.top = pop_item.next_item return pop_item.value except AttributeError: raise ValueError('No items in stack') def peek(self): return self.top.value
Add pop method on Stack class
Add pop method on Stack class
Python
mit
jwarren116/data-structures-deux
'''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): - pass + try: + pop_item = self.top + self.top = pop_item.next_item + return pop_item.value + except AttributeError: + raise ValueError('No items in stack') def peek(self): return self.top.value
Add pop method on Stack class
## Code Before: '''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): pass def peek(self): return self.top.value ## Instruction: Add pop method on Stack class ## Code After: '''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): try: pop_item = self.top self.top = pop_item.next_item return pop_item.value except AttributeError: raise ValueError('No items in stack') def peek(self): return self.top.value
'''Implementation of a simple stack data structure. The stack has push, pop, and peek methods. Items in the stack have a value, and next_item attribute. The stack has a top attribute. ''' class Item(object): def __init__(self, value, next_item=None): self.value = value self.next_item = next_item def __str__(self): return self.value class Stack(object): def __init__(self, top=None): self.top = top def push(self, value): item = Item(value) item.next_item = self.top self.top = item def pop(self): - pass + try: + pop_item = self.top + self.top = pop_item.next_item + return pop_item.value + except AttributeError: + raise ValueError('No items in stack') def peek(self): return self.top.value
3d2b4536803df4a202d8c1c9b5d0e689f1053378
tests/config.py
tests/config.py
import sys sys.path.append('../ideascaly') from ideascaly.auth import AuthNonSSO from ideascaly.api import API import unittest testing_community = 'fiveheads.ideascale.com' testing_token = '5b3326f8-50a5-419d-8f02-eef6a42fd61a' class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = testing_community def create_auth(): auth = AuthNonSSO(testing_token) return auth
import os import sys import unittest sys.path.append('../ideascaly') from ideascaly.auth import AuthNonSSO from ideascaly.api import API testing_community = 'fiveheads.ideascale.com' testing_token = os.environ.get('TOKEN', '') class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = testing_community def create_auth(): auth = AuthNonSSO(testing_token) return auth
Read token from environment variable
Read token from environment variable
Python
mit
joausaga/ideascaly
+ import os import sys + import unittest sys.path.append('../ideascaly') from ideascaly.auth import AuthNonSSO from ideascaly.api import API - import unittest - testing_community = 'fiveheads.ideascale.com' - testing_token = '5b3326f8-50a5-419d-8f02-eef6a42fd61a' + testing_token = os.environ.get('TOKEN', '') class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = testing_community def create_auth(): auth = AuthNonSSO(testing_token) return auth
Read token from environment variable
## Code Before: import sys sys.path.append('../ideascaly') from ideascaly.auth import AuthNonSSO from ideascaly.api import API import unittest testing_community = 'fiveheads.ideascale.com' testing_token = '5b3326f8-50a5-419d-8f02-eef6a42fd61a' class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = testing_community def create_auth(): auth = AuthNonSSO(testing_token) return auth ## Instruction: Read token from environment variable ## Code After: import os import sys import unittest sys.path.append('../ideascaly') from ideascaly.auth import AuthNonSSO from ideascaly.api import API testing_community = 'fiveheads.ideascale.com' testing_token = os.environ.get('TOKEN', '') class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = testing_community def create_auth(): auth = AuthNonSSO(testing_token) return auth
+ import os import sys + import unittest sys.path.append('../ideascaly') from ideascaly.auth import AuthNonSSO from ideascaly.api import API - import unittest - testing_community = 'fiveheads.ideascale.com' - testing_token = '5b3326f8-50a5-419d-8f02-eef6a42fd61a' + testing_token = os.environ.get('TOKEN', '') class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = testing_community def create_auth(): auth = AuthNonSSO(testing_token) return auth
850803d02868e20bc637f777ee201ac778c63606
lms/djangoapps/edraak_misc/utils.py
lms/djangoapps/edraak_misc/utils.py
from courseware.access import has_access from django.conf import settings def is_certificate_allowed(user, course): return (course.has_ended() and settings.FEATURES.get('ENABLE_ISSUE_CERTIFICATE') or has_access(user, 'staff', course.id))
from courseware.access import has_access from django.conf import settings def is_certificate_allowed(user, course): if not settings.FEATURES.get('ENABLE_ISSUE_CERTIFICATE'): return False return course.has_ended() or has_access(user, 'staff', course.id)
Disable certificate for all if ENABLE_ISSUE_CERTIFICATE == False
Disable certificate for all if ENABLE_ISSUE_CERTIFICATE == False
Python
agpl-3.0
Edraak/edx-platform,Edraak/edx-platform,Edraak/circleci-edx-platform,Edraak/circleci-edx-platform,Edraak/circleci-edx-platform,Edraak/edx-platform,Edraak/edx-platform,Edraak/circleci-edx-platform,Edraak/circleci-edx-platform,Edraak/edx-platform
from courseware.access import has_access from django.conf import settings def is_certificate_allowed(user, course): - return (course.has_ended() - and settings.FEATURES.get('ENABLE_ISSUE_CERTIFICATE') + if not settings.FEATURES.get('ENABLE_ISSUE_CERTIFICATE'): - or has_access(user, 'staff', course.id)) + return False + return course.has_ended() or has_access(user, 'staff', course.id) +
Disable certificate for all if ENABLE_ISSUE_CERTIFICATE == False
## Code Before: from courseware.access import has_access from django.conf import settings def is_certificate_allowed(user, course): return (course.has_ended() and settings.FEATURES.get('ENABLE_ISSUE_CERTIFICATE') or has_access(user, 'staff', course.id)) ## Instruction: Disable certificate for all if ENABLE_ISSUE_CERTIFICATE == False ## Code After: from courseware.access import has_access from django.conf import settings def is_certificate_allowed(user, course): if not settings.FEATURES.get('ENABLE_ISSUE_CERTIFICATE'): return False return course.has_ended() or has_access(user, 'staff', course.id)
from courseware.access import has_access from django.conf import settings def is_certificate_allowed(user, course): - return (course.has_ended() - and settings.FEATURES.get('ENABLE_ISSUE_CERTIFICATE') ? -------- ^ + if not settings.FEATURES.get('ENABLE_ISSUE_CERTIFICATE'): ? ++ ^^ + - or has_access(user, 'staff', course.id)) + return False + + return course.has_ended() or has_access(user, 'staff', course.id)
f01c6d22d30e175d120e5ffe10bef93378375ea7
example/myshop/models/__init__.py
example/myshop/models/__init__.py
from __future__ import unicode_literals from django.conf import settings # import default models from djangoSHOP to materialize them from shop.models.defaults.address import ShippingAddress, BillingAddress from shop.models.defaults.cart import Cart from shop.models.defaults.cart_item import CartItem from shop.models.defaults.customer import Customer # models defined by the myshop instance itself if settings.SHOP_TUTORIAL == 'commodity' or settings.SHOP_TUTORIAL == 'i18n_commodity': from shop.models.defaults.order_item import OrderItem from shop.models.defaults.commodity import Commodity elif settings.SHOP_TUTORIAL == 'smartcard': from shop.models.defaults.order_item import OrderItem from .smartcard import SmartCard elif settings.SHOP_TUTORIAL == 'i18n_smartcard': from shop.models.defaults.order_item import OrderItem from .i18n_smartcard import SmartCard elif settings.SHOP_TUTORIAL == 'polymorphic': from .polymorphic.order import OrderItem from .polymorphic.smartcard import SmartCard from .polymorphic.smartphone import SmartPhoneModel, SmartPhone from shop.models.defaults.delivery import Delivery, DeliveryItem from shop.models.defaults.order import Order
from __future__ import unicode_literals from django.conf import settings # import default models from djangoSHOP to materialize them from shop.models.defaults.address import ShippingAddress, BillingAddress from shop.models.defaults.cart import Cart from shop.models.defaults.cart_item import CartItem from shop.models.defaults.customer import Customer # models defined by the myshop instance itself if settings.SHOP_TUTORIAL == 'commodity' or settings.SHOP_TUTORIAL == 'i18n_commodity': from shop.models.defaults.order_item import OrderItem from shop.models.defaults.commodity import Commodity elif settings.SHOP_TUTORIAL == 'smartcard': from shop.models.defaults.order_item import OrderItem from .smartcard import SmartCard elif settings.SHOP_TUTORIAL == 'i18n_smartcard': from shop.models.defaults.order_item import OrderItem from .i18n_smartcard import SmartCard elif settings.SHOP_TUTORIAL == 'polymorphic': from .polymorphic.order import OrderItem from .polymorphic.smartcard import SmartCard from .polymorphic.smartphone import SmartPhoneModel, SmartPhone from shop.models.defaults.delivery import Delivery, DeliveryItem from shop.models.defaults.order import Order __all__ = ['ShippingAddress', 'BillingAddress', 'Cart', 'CartItem', 'Customer', 'Order', 'OrderItem', 'Commodity', 'SmartCard', 'SmartPhoneModel', 'SmartPhone', 'Delivery', 'DeliveryItem']
Use __all__ for restricted exports
Use __all__ for restricted exports
Python
bsd-3-clause
jrief/django-shop,khchine5/django-shop,nimbis/django-shop,jrief/django-shop,nimbis/django-shop,khchine5/django-shop,divio/django-shop,nimbis/django-shop,jrief/django-shop,khchine5/django-shop,divio/django-shop,awesto/django-shop,jrief/django-shop,awesto/django-shop,awesto/django-shop,divio/django-shop,nimbis/django-shop,khchine5/django-shop
from __future__ import unicode_literals from django.conf import settings # import default models from djangoSHOP to materialize them from shop.models.defaults.address import ShippingAddress, BillingAddress from shop.models.defaults.cart import Cart from shop.models.defaults.cart_item import CartItem from shop.models.defaults.customer import Customer # models defined by the myshop instance itself if settings.SHOP_TUTORIAL == 'commodity' or settings.SHOP_TUTORIAL == 'i18n_commodity': from shop.models.defaults.order_item import OrderItem from shop.models.defaults.commodity import Commodity elif settings.SHOP_TUTORIAL == 'smartcard': from shop.models.defaults.order_item import OrderItem from .smartcard import SmartCard elif settings.SHOP_TUTORIAL == 'i18n_smartcard': from shop.models.defaults.order_item import OrderItem from .i18n_smartcard import SmartCard elif settings.SHOP_TUTORIAL == 'polymorphic': from .polymorphic.order import OrderItem from .polymorphic.smartcard import SmartCard from .polymorphic.smartphone import SmartPhoneModel, SmartPhone from shop.models.defaults.delivery import Delivery, DeliveryItem from shop.models.defaults.order import Order + __all__ = ['ShippingAddress', 'BillingAddress', 'Cart', 'CartItem', 'Customer', 'Order', 'OrderItem', + 'Commodity', 'SmartCard', 'SmartPhoneModel', 'SmartPhone', 'Delivery', 'DeliveryItem'] +
Use __all__ for restricted exports
## Code Before: from __future__ import unicode_literals from django.conf import settings # import default models from djangoSHOP to materialize them from shop.models.defaults.address import ShippingAddress, BillingAddress from shop.models.defaults.cart import Cart from shop.models.defaults.cart_item import CartItem from shop.models.defaults.customer import Customer # models defined by the myshop instance itself if settings.SHOP_TUTORIAL == 'commodity' or settings.SHOP_TUTORIAL == 'i18n_commodity': from shop.models.defaults.order_item import OrderItem from shop.models.defaults.commodity import Commodity elif settings.SHOP_TUTORIAL == 'smartcard': from shop.models.defaults.order_item import OrderItem from .smartcard import SmartCard elif settings.SHOP_TUTORIAL == 'i18n_smartcard': from shop.models.defaults.order_item import OrderItem from .i18n_smartcard import SmartCard elif settings.SHOP_TUTORIAL == 'polymorphic': from .polymorphic.order import OrderItem from .polymorphic.smartcard import SmartCard from .polymorphic.smartphone import SmartPhoneModel, SmartPhone from shop.models.defaults.delivery import Delivery, DeliveryItem from shop.models.defaults.order import Order ## Instruction: Use __all__ for restricted exports ## Code After: from __future__ import unicode_literals from django.conf import settings # import default models from djangoSHOP to materialize them from shop.models.defaults.address import ShippingAddress, BillingAddress from shop.models.defaults.cart import Cart from shop.models.defaults.cart_item import CartItem from shop.models.defaults.customer import Customer # models defined by the myshop instance itself if settings.SHOP_TUTORIAL == 'commodity' or settings.SHOP_TUTORIAL == 'i18n_commodity': from shop.models.defaults.order_item import OrderItem from shop.models.defaults.commodity import Commodity elif settings.SHOP_TUTORIAL == 'smartcard': from shop.models.defaults.order_item import OrderItem from .smartcard import SmartCard elif settings.SHOP_TUTORIAL == 'i18n_smartcard': from shop.models.defaults.order_item import OrderItem from .i18n_smartcard import SmartCard elif settings.SHOP_TUTORIAL == 'polymorphic': from .polymorphic.order import OrderItem from .polymorphic.smartcard import SmartCard from .polymorphic.smartphone import SmartPhoneModel, SmartPhone from shop.models.defaults.delivery import Delivery, DeliveryItem from shop.models.defaults.order import Order __all__ = ['ShippingAddress', 'BillingAddress', 'Cart', 'CartItem', 'Customer', 'Order', 'OrderItem', 'Commodity', 'SmartCard', 'SmartPhoneModel', 'SmartPhone', 'Delivery', 'DeliveryItem']
from __future__ import unicode_literals from django.conf import settings # import default models from djangoSHOP to materialize them from shop.models.defaults.address import ShippingAddress, BillingAddress from shop.models.defaults.cart import Cart from shop.models.defaults.cart_item import CartItem from shop.models.defaults.customer import Customer # models defined by the myshop instance itself if settings.SHOP_TUTORIAL == 'commodity' or settings.SHOP_TUTORIAL == 'i18n_commodity': from shop.models.defaults.order_item import OrderItem from shop.models.defaults.commodity import Commodity elif settings.SHOP_TUTORIAL == 'smartcard': from shop.models.defaults.order_item import OrderItem from .smartcard import SmartCard elif settings.SHOP_TUTORIAL == 'i18n_smartcard': from shop.models.defaults.order_item import OrderItem from .i18n_smartcard import SmartCard elif settings.SHOP_TUTORIAL == 'polymorphic': from .polymorphic.order import OrderItem from .polymorphic.smartcard import SmartCard from .polymorphic.smartphone import SmartPhoneModel, SmartPhone from shop.models.defaults.delivery import Delivery, DeliveryItem from shop.models.defaults.order import Order + + __all__ = ['ShippingAddress', 'BillingAddress', 'Cart', 'CartItem', 'Customer', 'Order', 'OrderItem', + 'Commodity', 'SmartCard', 'SmartPhoneModel', 'SmartPhone', 'Delivery', 'DeliveryItem']
c5d22fd143f952ce5e0c86b9e8bce4a06fe47063
bigsi/storage/__init__.py
bigsi/storage/__init__.py
from bigsi.storage.berkeleydb import BerkeleyDBStorage from bigsi.storage.redis import RedisStorage from bigsi.storage.rocksdb import RocksDBStorage def get_storage(config): return { "rocksdb": RocksDBStorage, "berkeleydb": BerkeleyDBStorage, "redis": RedisStorage, }[config["storage-engine"]](config["storage-config"])
from bigsi.storage.redis import RedisStorage try: from bigsi.storage.berkeleydb import BerkeleyDBStorage except ModuleNotFoundError: pass try: from bigsi.storage.rocksdb import RocksDBStorage except ModuleNotFoundError: pass def get_storage(config): return { "rocksdb": RocksDBStorage, "berkeleydb": BerkeleyDBStorage, "redis": RedisStorage, }[config["storage-engine"]](config["storage-config"])
Allow import without optional requirements
Allow import without optional requirements
Python
mit
Phelimb/cbg,Phelimb/cbg,Phelimb/cbg,Phelimb/cbg
- from bigsi.storage.berkeleydb import BerkeleyDBStorage from bigsi.storage.redis import RedisStorage + + try: + from bigsi.storage.berkeleydb import BerkeleyDBStorage + except ModuleNotFoundError: + pass + try: - from bigsi.storage.rocksdb import RocksDBStorage + from bigsi.storage.rocksdb import RocksDBStorage + except ModuleNotFoundError: + pass def get_storage(config): return { "rocksdb": RocksDBStorage, "berkeleydb": BerkeleyDBStorage, "redis": RedisStorage, }[config["storage-engine"]](config["storage-config"])
Allow import without optional requirements
## Code Before: from bigsi.storage.berkeleydb import BerkeleyDBStorage from bigsi.storage.redis import RedisStorage from bigsi.storage.rocksdb import RocksDBStorage def get_storage(config): return { "rocksdb": RocksDBStorage, "berkeleydb": BerkeleyDBStorage, "redis": RedisStorage, }[config["storage-engine"]](config["storage-config"]) ## Instruction: Allow import without optional requirements ## Code After: from bigsi.storage.redis import RedisStorage try: from bigsi.storage.berkeleydb import BerkeleyDBStorage except ModuleNotFoundError: pass try: from bigsi.storage.rocksdb import RocksDBStorage except ModuleNotFoundError: pass def get_storage(config): return { "rocksdb": RocksDBStorage, "berkeleydb": BerkeleyDBStorage, "redis": RedisStorage, }[config["storage-engine"]](config["storage-config"])
- from bigsi.storage.berkeleydb import BerkeleyDBStorage from bigsi.storage.redis import RedisStorage + + try: + from bigsi.storage.berkeleydb import BerkeleyDBStorage + except ModuleNotFoundError: + pass + try: - from bigsi.storage.rocksdb import RocksDBStorage + from bigsi.storage.rocksdb import RocksDBStorage ? ++++ + except ModuleNotFoundError: + pass def get_storage(config): return { "rocksdb": RocksDBStorage, "berkeleydb": BerkeleyDBStorage, "redis": RedisStorage, }[config["storage-engine"]](config["storage-config"])
7872a2327f9dea7d4c1f5a3054b6be6bba25fdd4
scripts/migration/migrate_deleted_wikis.py
scripts/migration/migrate_deleted_wikis.py
import logging import sys from modularodm import Q from framework.transactions.context import TokuTransaction from website.app import init_app from website.models import NodeLog from scripts import utils as script_utils logger = logging.getLogger(__name__) def get_targets(): return NodeLog.find(Q('action', 'eq', NodeLog.WIKI_DELETED)) def migrate(targets, dry_run=True): # iterate over targets for log in targets: node = log.node versions = node.wiki_pages_versions current = node.wiki_pages_current updated_versions = {} for wiki in versions: if wiki in current: updated_versions[wiki] = versions[wiki] with TokuTransaction(): node.wiki_pages_versions = updated_versions node.save() if dry_run: raise RuntimeError('Dry run, transaction rolled back.') def main(): dry_run = False if '--dry' in sys.argv: dry_run = True if not dry_run: script_utils.add_file_logger(logger, __file__) init_app(set_backends=True, routes=False) with TokuTransaction(): migrate(targets=get_targets(), dry_run=dry_run) if __name__ == "__main__": main()
import logging import sys from modularodm import Q from framework.transactions.context import TokuTransaction from website.app import init_app from website.models import NodeLog from scripts import utils as script_utils logger = logging.getLogger(__name__) def get_targets(): return NodeLog.find(Q('action', 'eq', NodeLog.WIKI_DELETED)) def migrate(targets, dry_run=True): # iterate over targets for log in targets: node = log.node versions = node.wiki_pages_versions current = node.wiki_pages_current updated_versions = {} for wiki in versions: if wiki in current: updated_versions[wiki] = versions[wiki] node.wiki_pages_versions = updated_versions node.save() def main(): dry_run = False if '--dry' in sys.argv: dry_run = True if not dry_run: script_utils.add_file_logger(logger, __file__) init_app(set_backends=True, routes=False) with TokuTransaction(): migrate(targets=get_targets(), dry_run=dry_run) if dry_run: raise RuntimeError('Dry run, transaction rolled back.') if __name__ == "__main__": main()
Remove TokuTransaction in migrate function
Remove TokuTransaction in migrate function
Python
apache-2.0
hmoco/osf.io,samchrisinger/osf.io,hmoco/osf.io,icereval/osf.io,caneruguz/osf.io,cwisecarver/osf.io,chrisseto/osf.io,erinspace/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,crcresearch/osf.io,crcresearch/osf.io,laurenrevere/osf.io,leb2dg/osf.io,crcresearch/osf.io,baylee-d/osf.io,leb2dg/osf.io,saradbowman/osf.io,sloria/osf.io,felliott/osf.io,mluke93/osf.io,adlius/osf.io,SSJohns/osf.io,mluke93/osf.io,binoculars/osf.io,mluo613/osf.io,felliott/osf.io,amyshi188/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,DanielSBrown/osf.io,mluo613/osf.io,erinspace/osf.io,adlius/osf.io,mluke93/osf.io,acshi/osf.io,abought/osf.io,wearpants/osf.io,laurenrevere/osf.io,wearpants/osf.io,cslzchen/osf.io,mattclark/osf.io,rdhyee/osf.io,binoculars/osf.io,hmoco/osf.io,laurenrevere/osf.io,monikagrabowska/osf.io,baylee-d/osf.io,hmoco/osf.io,emetsger/osf.io,saradbowman/osf.io,mfraezz/osf.io,TomBaxter/osf.io,Nesiehr/osf.io,rdhyee/osf.io,abought/osf.io,abought/osf.io,rdhyee/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,aaxelb/osf.io,samchrisinger/osf.io,chrisseto/osf.io,caseyrollins/osf.io,felliott/osf.io,alexschiller/osf.io,DanielSBrown/osf.io,Nesiehr/osf.io,amyshi188/osf.io,chrisseto/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,mluo613/osf.io,rdhyee/osf.io,felliott/osf.io,abought/osf.io,samchrisinger/osf.io,alexschiller/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,mluo613/osf.io,DanielSBrown/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,acshi/osf.io,TomBaxter/osf.io,emetsger/osf.io,adlius/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,emetsger/osf.io,monikagrabowska/osf.io,acshi/osf.io,aaxelb/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,amyshi188/osf.io,chennan47/osf.io,sloria/osf.io,icereval/osf.io,mattclark/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,binoculars/osf.io,wearpants/osf.io,samchrisinger/osf.io,HalcyonChimera/osf.io,amyshi188/osf.io,chrisseto/osf.io,cwisecarver/osf.io,aaxelb/osf.io,erinspace/osf.io,cslzchen/osf.io,aaxelb/osf.io,caneruguz/osf.io,pattisdr/osf.io,mluo613/osf.io,cwisecarver/osf.io,alexschiller/osf.io,chennan47/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,mattclark/osf.io,mfraezz/osf.io,Nesiehr/osf.io,emetsger/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,cslzchen/osf.io,pattisdr/osf.io,SSJohns/osf.io,icereval/osf.io,caneruguz/osf.io,alexschiller/osf.io,chennan47/osf.io,acshi/osf.io,caseyrollins/osf.io,mluke93/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,acshi/osf.io,caneruguz/osf.io,mfraezz/osf.io,cwisecarver/osf.io
import logging import sys from modularodm import Q from framework.transactions.context import TokuTransaction from website.app import init_app from website.models import NodeLog from scripts import utils as script_utils logger = logging.getLogger(__name__) def get_targets(): return NodeLog.find(Q('action', 'eq', NodeLog.WIKI_DELETED)) def migrate(targets, dry_run=True): # iterate over targets for log in targets: node = log.node versions = node.wiki_pages_versions current = node.wiki_pages_current updated_versions = {} for wiki in versions: if wiki in current: updated_versions[wiki] = versions[wiki] - with TokuTransaction(): - node.wiki_pages_versions = updated_versions + node.wiki_pages_versions = updated_versions - node.save() + node.save() - if dry_run: - raise RuntimeError('Dry run, transaction rolled back.') def main(): dry_run = False if '--dry' in sys.argv: dry_run = True if not dry_run: script_utils.add_file_logger(logger, __file__) init_app(set_backends=True, routes=False) with TokuTransaction(): migrate(targets=get_targets(), dry_run=dry_run) + if dry_run: + raise RuntimeError('Dry run, transaction rolled back.') if __name__ == "__main__": main()
Remove TokuTransaction in migrate function
## Code Before: import logging import sys from modularodm import Q from framework.transactions.context import TokuTransaction from website.app import init_app from website.models import NodeLog from scripts import utils as script_utils logger = logging.getLogger(__name__) def get_targets(): return NodeLog.find(Q('action', 'eq', NodeLog.WIKI_DELETED)) def migrate(targets, dry_run=True): # iterate over targets for log in targets: node = log.node versions = node.wiki_pages_versions current = node.wiki_pages_current updated_versions = {} for wiki in versions: if wiki in current: updated_versions[wiki] = versions[wiki] with TokuTransaction(): node.wiki_pages_versions = updated_versions node.save() if dry_run: raise RuntimeError('Dry run, transaction rolled back.') def main(): dry_run = False if '--dry' in sys.argv: dry_run = True if not dry_run: script_utils.add_file_logger(logger, __file__) init_app(set_backends=True, routes=False) with TokuTransaction(): migrate(targets=get_targets(), dry_run=dry_run) if __name__ == "__main__": main() ## Instruction: Remove TokuTransaction in migrate function ## Code After: import logging import sys from modularodm import Q from framework.transactions.context import TokuTransaction from website.app import init_app from website.models import NodeLog from scripts import utils as script_utils logger = logging.getLogger(__name__) def get_targets(): return NodeLog.find(Q('action', 'eq', NodeLog.WIKI_DELETED)) def migrate(targets, dry_run=True): # iterate over targets for log in targets: node = log.node versions = node.wiki_pages_versions current = node.wiki_pages_current updated_versions = {} for wiki in versions: if wiki in current: updated_versions[wiki] = versions[wiki] node.wiki_pages_versions = updated_versions node.save() def main(): dry_run = False if '--dry' in sys.argv: dry_run = True if not dry_run: script_utils.add_file_logger(logger, __file__) init_app(set_backends=True, routes=False) with TokuTransaction(): migrate(targets=get_targets(), dry_run=dry_run) if dry_run: raise RuntimeError('Dry run, transaction rolled back.') if __name__ == "__main__": main()
import logging import sys from modularodm import Q from framework.transactions.context import TokuTransaction from website.app import init_app from website.models import NodeLog from scripts import utils as script_utils logger = logging.getLogger(__name__) def get_targets(): return NodeLog.find(Q('action', 'eq', NodeLog.WIKI_DELETED)) def migrate(targets, dry_run=True): # iterate over targets for log in targets: node = log.node versions = node.wiki_pages_versions current = node.wiki_pages_current updated_versions = {} for wiki in versions: if wiki in current: updated_versions[wiki] = versions[wiki] - with TokuTransaction(): - node.wiki_pages_versions = updated_versions ? ---- + node.wiki_pages_versions = updated_versions - node.save() ? ---- + node.save() - if dry_run: - raise RuntimeError('Dry run, transaction rolled back.') def main(): dry_run = False if '--dry' in sys.argv: dry_run = True if not dry_run: script_utils.add_file_logger(logger, __file__) init_app(set_backends=True, routes=False) with TokuTransaction(): migrate(targets=get_targets(), dry_run=dry_run) + if dry_run: + raise RuntimeError('Dry run, transaction rolled back.') if __name__ == "__main__": main()
de1c2842d7f07025f23e9b12efc7dd52e4d0efbf
device_notifications/tests/model_tests.py
device_notifications/tests/model_tests.py
from mock import patch from django.test.testcases import TestCase from device_notifications import settings from device_notifications.models import AbstractBaseDevice from device_notifications.models import InvalidDeviceType class ConcreteTestDevice(AbstractBaseDevice): pass @patch.object(settings, 'get_device_model', return_value=ConcreteTestDevice) class AbstractBaseDeviceTests(TestCase): @patch('device_notifications.models.gcm_send_message_task') def test_send_message(self, gcm_send_message_task): device = ConcreteTestDevice( pk=1, device_type='android') message = 'Hello World' device.send_message(message) gcm_send_message_task.apply_async.assert_called_with( args=[device.pk, message]) @patch('device_notifications.models.gcm_send_message_task') def test_send_message_bad_device_type(self, gcm_send_message_task): device = ConcreteTestDevice( pk=1, device_type='windows_phone') self.assertRaises(InvalidDeviceType, device.send_message, 'Hi')
from mock import patch from django.test.testcases import TestCase from device_notifications import settings from device_notifications.models import AbstractBaseDevice from device_notifications.models import InvalidDeviceType class ConcreteTestDevice(AbstractBaseDevice): pass class AbstractBaseDeviceTests(TestCase): def setUp(self): self.get_device_model_patcher = patch.object( settings, 'get_device_model', return_value=ConcreteTestDevice) self.get_device_model_patcher.start() super(AbstractBaseDeviceTests, self).setUp() def tearDown(self): super(AbstractBaseDeviceTests, self).tearDown() self.get_device_model_patcher.stop() @patch('device_notifications.models.gcm_send_message_task') def test_send_message(self, gcm_send_message_task): device = ConcreteTestDevice( pk=1, device_type='android') message = 'Hello World' device.send_message(message) gcm_send_message_task.apply_async.assert_called_with( args=[device.pk, message]) @patch('device_notifications.models.gcm_send_message_task') def test_send_message_bad_device_type(self, gcm_send_message_task): device = ConcreteTestDevice( pk=1, device_type='windows_phone') self.assertRaises(InvalidDeviceType, device.send_message, 'Hi')
Patch get_device_model in the setUp and tearDown methods so that we don't send the mock object to each test method.
Patch get_device_model in the setUp and tearDown methods so that we don't send the mock object to each test method.
Python
bsd-3-clause
roverdotcom/django-device-notifications
from mock import patch from django.test.testcases import TestCase from device_notifications import settings from device_notifications.models import AbstractBaseDevice from device_notifications.models import InvalidDeviceType class ConcreteTestDevice(AbstractBaseDevice): pass - @patch.object(settings, 'get_device_model', return_value=ConcreteTestDevice) class AbstractBaseDeviceTests(TestCase): + def setUp(self): + self.get_device_model_patcher = patch.object( + settings, + 'get_device_model', + return_value=ConcreteTestDevice) + self.get_device_model_patcher.start() + super(AbstractBaseDeviceTests, self).setUp() + + def tearDown(self): + super(AbstractBaseDeviceTests, self).tearDown() + self.get_device_model_patcher.stop() + @patch('device_notifications.models.gcm_send_message_task') def test_send_message(self, gcm_send_message_task): device = ConcreteTestDevice( pk=1, device_type='android') message = 'Hello World' device.send_message(message) gcm_send_message_task.apply_async.assert_called_with( args=[device.pk, message]) @patch('device_notifications.models.gcm_send_message_task') def test_send_message_bad_device_type(self, gcm_send_message_task): device = ConcreteTestDevice( pk=1, device_type='windows_phone') self.assertRaises(InvalidDeviceType, device.send_message, 'Hi')
Patch get_device_model in the setUp and tearDown methods so that we don't send the mock object to each test method.
## Code Before: from mock import patch from django.test.testcases import TestCase from device_notifications import settings from device_notifications.models import AbstractBaseDevice from device_notifications.models import InvalidDeviceType class ConcreteTestDevice(AbstractBaseDevice): pass @patch.object(settings, 'get_device_model', return_value=ConcreteTestDevice) class AbstractBaseDeviceTests(TestCase): @patch('device_notifications.models.gcm_send_message_task') def test_send_message(self, gcm_send_message_task): device = ConcreteTestDevice( pk=1, device_type='android') message = 'Hello World' device.send_message(message) gcm_send_message_task.apply_async.assert_called_with( args=[device.pk, message]) @patch('device_notifications.models.gcm_send_message_task') def test_send_message_bad_device_type(self, gcm_send_message_task): device = ConcreteTestDevice( pk=1, device_type='windows_phone') self.assertRaises(InvalidDeviceType, device.send_message, 'Hi') ## Instruction: Patch get_device_model in the setUp and tearDown methods so that we don't send the mock object to each test method. ## Code After: from mock import patch from django.test.testcases import TestCase from device_notifications import settings from device_notifications.models import AbstractBaseDevice from device_notifications.models import InvalidDeviceType class ConcreteTestDevice(AbstractBaseDevice): pass class AbstractBaseDeviceTests(TestCase): def setUp(self): self.get_device_model_patcher = patch.object( settings, 'get_device_model', return_value=ConcreteTestDevice) self.get_device_model_patcher.start() super(AbstractBaseDeviceTests, self).setUp() def tearDown(self): super(AbstractBaseDeviceTests, self).tearDown() self.get_device_model_patcher.stop() @patch('device_notifications.models.gcm_send_message_task') def test_send_message(self, gcm_send_message_task): device = ConcreteTestDevice( pk=1, device_type='android') message = 'Hello World' device.send_message(message) gcm_send_message_task.apply_async.assert_called_with( args=[device.pk, message]) @patch('device_notifications.models.gcm_send_message_task') def test_send_message_bad_device_type(self, gcm_send_message_task): device = ConcreteTestDevice( pk=1, device_type='windows_phone') self.assertRaises(InvalidDeviceType, device.send_message, 'Hi')
from mock import patch from django.test.testcases import TestCase from device_notifications import settings from device_notifications.models import AbstractBaseDevice from device_notifications.models import InvalidDeviceType class ConcreteTestDevice(AbstractBaseDevice): pass - @patch.object(settings, 'get_device_model', return_value=ConcreteTestDevice) class AbstractBaseDeviceTests(TestCase): + def setUp(self): + self.get_device_model_patcher = patch.object( + settings, + 'get_device_model', + return_value=ConcreteTestDevice) + self.get_device_model_patcher.start() + super(AbstractBaseDeviceTests, self).setUp() + + def tearDown(self): + super(AbstractBaseDeviceTests, self).tearDown() + self.get_device_model_patcher.stop() + @patch('device_notifications.models.gcm_send_message_task') def test_send_message(self, gcm_send_message_task): device = ConcreteTestDevice( pk=1, device_type='android') message = 'Hello World' device.send_message(message) gcm_send_message_task.apply_async.assert_called_with( args=[device.pk, message]) @patch('device_notifications.models.gcm_send_message_task') def test_send_message_bad_device_type(self, gcm_send_message_task): device = ConcreteTestDevice( pk=1, device_type='windows_phone') self.assertRaises(InvalidDeviceType, device.send_message, 'Hi')
fa0513fe303a0111291d459d3bf275229b1eb052
main.py
main.py
import datetime import argparse from auth import twitter_auth as auth from twitterbot import TwitterBot from apscheduler.schedulers.blocking import BlockingScheduler parser = argparse.ArgumentParser(description='Respond to Twitter mentions.') parser.add_argument('-l', '--listen', nargs='+', default='happy birthday', help='phrase(s) to reply to (separate by space)') parser.add_argument('-r', '--reply', default='HANDLE thanks!', help='reply text (use HANDLE for user handle)') args = parser.parse_args() bot = TwitterBot(auth, args.listen, args.reply.replace('HANDLE', '@{}')) def reply(): print(' Running...') bot.reply_to_mention() print(' Finished running at {}'.format(datetime.datetime.now())) def main(): print('Starting bot...') # run once every minute scheduler = BlockingScheduler() scheduler.add_job(reply, 'interval', minutes=1) scheduler.start() if __name__ == '__main__': main()
import sys import datetime import argparse from auth import twitter_auth as auth from twitterbot import TwitterBot from apscheduler.schedulers.blocking import BlockingScheduler parser = argparse.ArgumentParser(description='Respond to Twitter mentions.') parser.add_argument('-l', '--listen', nargs='+', default=['happy birthday'], help='phrase(s) to reply to (separate by space)') parser.add_argument('-r', '--reply', default='HANDLE thanks!', help='reply text (use HANDLE for user handle)') args = parser.parse_args() bot = TwitterBot(auth, args.listen, args.reply.replace('HANDLE', '@{}')) def reply(): print(' Running...') bot.reply_to_mention() print(' Finished running at {}'.format(datetime.datetime.now())) def main(): print('Starting bot...') # run once every minute scheduler = BlockingScheduler() scheduler.add_job(reply, 'interval', minutes=1) scheduler.start() if __name__ == '__main__': main()
Change default type for listen argument
Change default type for listen argument str->list
Python
mit
kshvmdn/TwitterBirthdayResponder,kshvmdn/twitter-birthday-responder,kshvmdn/twitter-autoreply
+ import sys import datetime import argparse from auth import twitter_auth as auth from twitterbot import TwitterBot from apscheduler.schedulers.blocking import BlockingScheduler parser = argparse.ArgumentParser(description='Respond to Twitter mentions.') - parser.add_argument('-l', '--listen', nargs='+', default='happy birthday', + parser.add_argument('-l', '--listen', nargs='+', default=['happy birthday'], help='phrase(s) to reply to (separate by space)') parser.add_argument('-r', '--reply', default='HANDLE thanks!', help='reply text (use HANDLE for user handle)') args = parser.parse_args() bot = TwitterBot(auth, args.listen, args.reply.replace('HANDLE', '@{}')) def reply(): print(' Running...') bot.reply_to_mention() print(' Finished running at {}'.format(datetime.datetime.now())) def main(): print('Starting bot...') # run once every minute scheduler = BlockingScheduler() scheduler.add_job(reply, 'interval', minutes=1) scheduler.start() if __name__ == '__main__': main()
Change default type for listen argument
## Code Before: import datetime import argparse from auth import twitter_auth as auth from twitterbot import TwitterBot from apscheduler.schedulers.blocking import BlockingScheduler parser = argparse.ArgumentParser(description='Respond to Twitter mentions.') parser.add_argument('-l', '--listen', nargs='+', default='happy birthday', help='phrase(s) to reply to (separate by space)') parser.add_argument('-r', '--reply', default='HANDLE thanks!', help='reply text (use HANDLE for user handle)') args = parser.parse_args() bot = TwitterBot(auth, args.listen, args.reply.replace('HANDLE', '@{}')) def reply(): print(' Running...') bot.reply_to_mention() print(' Finished running at {}'.format(datetime.datetime.now())) def main(): print('Starting bot...') # run once every minute scheduler = BlockingScheduler() scheduler.add_job(reply, 'interval', minutes=1) scheduler.start() if __name__ == '__main__': main() ## Instruction: Change default type for listen argument ## Code After: import sys import datetime import argparse from auth import twitter_auth as auth from twitterbot import TwitterBot from apscheduler.schedulers.blocking import BlockingScheduler parser = argparse.ArgumentParser(description='Respond to Twitter mentions.') parser.add_argument('-l', '--listen', nargs='+', default=['happy birthday'], help='phrase(s) to reply to (separate by space)') parser.add_argument('-r', '--reply', default='HANDLE thanks!', help='reply text (use HANDLE for user handle)') args = parser.parse_args() bot = TwitterBot(auth, args.listen, args.reply.replace('HANDLE', '@{}')) def reply(): print(' Running...') bot.reply_to_mention() print(' Finished running at {}'.format(datetime.datetime.now())) def main(): print('Starting bot...') # run once every minute scheduler = BlockingScheduler() scheduler.add_job(reply, 'interval', minutes=1) scheduler.start() if __name__ == '__main__': main()
+ import sys import datetime import argparse from auth import twitter_auth as auth from twitterbot import TwitterBot from apscheduler.schedulers.blocking import BlockingScheduler parser = argparse.ArgumentParser(description='Respond to Twitter mentions.') - parser.add_argument('-l', '--listen', nargs='+', default='happy birthday', + parser.add_argument('-l', '--listen', nargs='+', default=['happy birthday'], ? + + help='phrase(s) to reply to (separate by space)') parser.add_argument('-r', '--reply', default='HANDLE thanks!', help='reply text (use HANDLE for user handle)') args = parser.parse_args() bot = TwitterBot(auth, args.listen, args.reply.replace('HANDLE', '@{}')) def reply(): print(' Running...') bot.reply_to_mention() print(' Finished running at {}'.format(datetime.datetime.now())) def main(): print('Starting bot...') # run once every minute scheduler = BlockingScheduler() scheduler.add_job(reply, 'interval', minutes=1) scheduler.start() if __name__ == '__main__': main()
85fce5f5ab57b6c2144c92ec0d9b185740d7dc91
pyinform/__init__.py
pyinform/__init__.py
from ctypes import CDLL def get_libpath(): """ Get the library path of the the distributed inform binary. """ import os import re from os.path import dirname, abspath, realpath, join libre = re.compile(r"^inform-(\d+)\.(\d+)\.(\d+)$") root = dirname(abspath(realpath(__file__))) libdir = None major, minor, revision = 0, 0, 0 for _, dirnames, _ in os.walk(root): for dirname in dirnames: match = libre.match(dirname) if match: a, b, c = tuple(int(x) for x in match.group(1,2,3)) if (major, minor, revision) < (a,b,c): major, minor, revision = a, b, c libdir = join(root, match.group()) break break if libdir is None: raise ImportError("cannot find libinform") else: return "{}/lib/libinform.so.{}.{}.{}".format(libdir,major,minor,revision) _inform = CDLL(get_libpath())
from ctypes import CDLL def get_libpath(): """ Get the library path of the the distributed inform binary. """ import os import re from os.path import dirname, abspath, realpath, join from platform import system libre = re.compile(r"^inform-(\d+)\.(\d+)\.(\d+)$") root = dirname(abspath(realpath(__file__))) libdir = None major, minor, revision = 0, 0, 0 for _, dirnames, _ in os.walk(root): for dirname in dirnames: match = libre.match(dirname) if match: a, b, c = tuple(int(x) for x in match.group(1,2,3)) if (major, minor, revision) < (a,b,c): major, minor, revision = a, b, c libdir = join(root, match.group()) break break if libdir is None: raise ImportError("cannot find libinform") if system() is 'Windows': return "{}/lib/inform.dll".format(libdir) else: return "{}/lib/libinform.so.{}.{}.{}".format(libdir,major,minor,revision) _inform = CDLL(get_libpath())
Resolve the library on windows
Resolve the library on windows
Python
mit
ELIFE-ASU/PyInform
from ctypes import CDLL def get_libpath(): """ Get the library path of the the distributed inform binary. """ import os import re from os.path import dirname, abspath, realpath, join + from platform import system libre = re.compile(r"^inform-(\d+)\.(\d+)\.(\d+)$") root = dirname(abspath(realpath(__file__))) libdir = None major, minor, revision = 0, 0, 0 for _, dirnames, _ in os.walk(root): for dirname in dirnames: match = libre.match(dirname) if match: a, b, c = tuple(int(x) for x in match.group(1,2,3)) if (major, minor, revision) < (a,b,c): major, minor, revision = a, b, c libdir = join(root, match.group()) break break if libdir is None: raise ImportError("cannot find libinform") + + if system() is 'Windows': + return "{}/lib/inform.dll".format(libdir) else: return "{}/lib/libinform.so.{}.{}.{}".format(libdir,major,minor,revision) _inform = CDLL(get_libpath())
Resolve the library on windows
## Code Before: from ctypes import CDLL def get_libpath(): """ Get the library path of the the distributed inform binary. """ import os import re from os.path import dirname, abspath, realpath, join libre = re.compile(r"^inform-(\d+)\.(\d+)\.(\d+)$") root = dirname(abspath(realpath(__file__))) libdir = None major, minor, revision = 0, 0, 0 for _, dirnames, _ in os.walk(root): for dirname in dirnames: match = libre.match(dirname) if match: a, b, c = tuple(int(x) for x in match.group(1,2,3)) if (major, minor, revision) < (a,b,c): major, minor, revision = a, b, c libdir = join(root, match.group()) break break if libdir is None: raise ImportError("cannot find libinform") else: return "{}/lib/libinform.so.{}.{}.{}".format(libdir,major,minor,revision) _inform = CDLL(get_libpath()) ## Instruction: Resolve the library on windows ## Code After: from ctypes import CDLL def get_libpath(): """ Get the library path of the the distributed inform binary. """ import os import re from os.path import dirname, abspath, realpath, join from platform import system libre = re.compile(r"^inform-(\d+)\.(\d+)\.(\d+)$") root = dirname(abspath(realpath(__file__))) libdir = None major, minor, revision = 0, 0, 0 for _, dirnames, _ in os.walk(root): for dirname in dirnames: match = libre.match(dirname) if match: a, b, c = tuple(int(x) for x in match.group(1,2,3)) if (major, minor, revision) < (a,b,c): major, minor, revision = a, b, c libdir = join(root, match.group()) break break if libdir is None: raise ImportError("cannot find libinform") if system() is 'Windows': return "{}/lib/inform.dll".format(libdir) else: return "{}/lib/libinform.so.{}.{}.{}".format(libdir,major,minor,revision) _inform = CDLL(get_libpath())
from ctypes import CDLL def get_libpath(): """ Get the library path of the the distributed inform binary. """ import os import re from os.path import dirname, abspath, realpath, join + from platform import system libre = re.compile(r"^inform-(\d+)\.(\d+)\.(\d+)$") root = dirname(abspath(realpath(__file__))) libdir = None major, minor, revision = 0, 0, 0 for _, dirnames, _ in os.walk(root): for dirname in dirnames: match = libre.match(dirname) if match: a, b, c = tuple(int(x) for x in match.group(1,2,3)) if (major, minor, revision) < (a,b,c): major, minor, revision = a, b, c libdir = join(root, match.group()) break break if libdir is None: raise ImportError("cannot find libinform") + + if system() is 'Windows': + return "{}/lib/inform.dll".format(libdir) else: return "{}/lib/libinform.so.{}.{}.{}".format(libdir,major,minor,revision) _inform = CDLL(get_libpath())
393a2f5f0ccfedc1c5ebd7de987c870419ca2d89
scripts/calculate_lqr_gain.py
scripts/calculate_lqr_gain.py
import numpy as np import scipy import control from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices def compute_whipple_lqr_gain(velocity): _, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity) Q = np.diag([1e5, 1e3, 1e3, 1e2]) R = np.eye(2) gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)] return gains if __name__ == '__main__': import sys v_low = 0 # m/s if len(sys.argv) > 1: v_high = int(sys.argv[1]) else: v_high = 1 # m/s velocities = [v_low, v_high] gains = compute_whipple_lqr_gain(velocities) for v, K in zip(velocities, gains): print('computed LQR controller feedback gain for v = {}'.format(v)) print(-K) print()
import numpy as np import scipy import control from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices def compute_whipple_lqr_gain(velocity): _, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity) Q = np.diag([1e5, 1e3, 1e3, 1e2]) R = np.eye(2) gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)] return gains if __name__ == '__main__': import sys v_low = 0 # m/s if len(sys.argv) > 1: v_high = int(sys.argv[1]) else: v_high = 1 # m/s velocities = [v_low, v_high] gains = compute_whipple_lqr_gain(velocities) for v, K in zip(velocities, gains): print('computed LQR controller feedback gain for v = {}'.format(v)) K = -K for r in range(K.shape[0]): row = ', '.join(str(elem) for elem in K[r, :]) if r != K.shape[0] - 1: row += ',' print(row) print()
Change LQR gain element printing
Change LQR gain element printing Change printing of LQR gain elements for easier copying.
Python
bsd-2-clause
oliverlee/phobos,oliverlee/phobos,oliverlee/phobos,oliverlee/phobos
import numpy as np import scipy import control from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices def compute_whipple_lqr_gain(velocity): _, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity) Q = np.diag([1e5, 1e3, 1e3, 1e2]) R = np.eye(2) gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)] return gains if __name__ == '__main__': import sys v_low = 0 # m/s if len(sys.argv) > 1: v_high = int(sys.argv[1]) else: v_high = 1 # m/s velocities = [v_low, v_high] gains = compute_whipple_lqr_gain(velocities) for v, K in zip(velocities, gains): print('computed LQR controller feedback gain for v = {}'.format(v)) + K = -K + for r in range(K.shape[0]): + row = ', '.join(str(elem) for elem in K[r, :]) + if r != K.shape[0] - 1: + row += ',' - print(-K) + print(row) print()
Change LQR gain element printing
## Code Before: import numpy as np import scipy import control from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices def compute_whipple_lqr_gain(velocity): _, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity) Q = np.diag([1e5, 1e3, 1e3, 1e2]) R = np.eye(2) gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)] return gains if __name__ == '__main__': import sys v_low = 0 # m/s if len(sys.argv) > 1: v_high = int(sys.argv[1]) else: v_high = 1 # m/s velocities = [v_low, v_high] gains = compute_whipple_lqr_gain(velocities) for v, K in zip(velocities, gains): print('computed LQR controller feedback gain for v = {}'.format(v)) print(-K) print() ## Instruction: Change LQR gain element printing ## Code After: import numpy as np import scipy import control from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices def compute_whipple_lqr_gain(velocity): _, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity) Q = np.diag([1e5, 1e3, 1e3, 1e2]) R = np.eye(2) gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)] return gains if __name__ == '__main__': import sys v_low = 0 # m/s if len(sys.argv) > 1: v_high = int(sys.argv[1]) else: v_high = 1 # m/s velocities = [v_low, v_high] gains = compute_whipple_lqr_gain(velocities) for v, K in zip(velocities, gains): print('computed LQR controller feedback gain for v = {}'.format(v)) K = -K for r in range(K.shape[0]): row = ', '.join(str(elem) for elem in K[r, :]) if r != K.shape[0] - 1: row += ',' print(row) print()
import numpy as np import scipy import control from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices def compute_whipple_lqr_gain(velocity): _, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity) Q = np.diag([1e5, 1e3, 1e3, 1e2]) R = np.eye(2) gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)] return gains if __name__ == '__main__': import sys v_low = 0 # m/s if len(sys.argv) > 1: v_high = int(sys.argv[1]) else: v_high = 1 # m/s velocities = [v_low, v_high] gains = compute_whipple_lqr_gain(velocities) for v, K in zip(velocities, gains): print('computed LQR controller feedback gain for v = {}'.format(v)) + K = -K + for r in range(K.shape[0]): + row = ', '.join(str(elem) for elem in K[r, :]) + if r != K.shape[0] - 1: + row += ',' - print(-K) ? ^^ + print(row) ? ++++ ^^^ print()
8603d5e83f1eeac84990cb5353b166dd35fa8140
cyder/base/eav/forms.py
cyder/base/eav/forms.py
from django import forms from django.core.exceptions import ValidationError from cyder.base.eav.models import Attribute class AttributeFormField(forms.CharField): def to_python(self, value): try: return Attribute.objects.get( name=value) except Attribute.DoesNotExist: raise ValidationError("No such attribute") def get_eav_form(eav_model, entity_model): class EAVForm(forms.ModelForm): def __init__(self, *args, **kwargs): if 'initial' not in kwargs: kwargs['initial'] = dict() kwargs['initial']['attribute'] = kwargs['instance'].attribute.name super(EAVForm, self).__init__(*args, **kwargs) entity = forms.ModelChoiceField( queryset=entity_model.objects.all(), widget=forms.HiddenInput()) attribute = AttributeFormField() class Meta: model = eav_model fields = ('entity', 'attribute', 'value') return EAVForm
from django import forms from django.core.exceptions import ValidationError from cyder.base.eav.models import Attribute class AttributeFormField(forms.CharField): def to_python(self, value): try: return Attribute.objects.get( name=value) except Attribute.DoesNotExist: raise ValidationError("No such attribute") def get_eav_form(eav_model, entity_model): class EAVForm(forms.ModelForm): def __init__(self, *args, **kwargs): if 'instance' in kwargs and kwargs['instance'] is not None: # This is a bound form with a real instance if 'initial' not in kwargs: kwargs['initial'] = dict() # Set the attribute field to the name, not the pk kwargs['initial']['attribute'] = \ kwargs['instance'].attribute.name super(EAVForm, self).__init__(*args, **kwargs) entity = forms.ModelChoiceField( queryset=entity_model.objects.all(), widget=forms.HiddenInput()) attribute = AttributeFormField() class Meta: model = eav_model fields = ('entity', 'attribute', 'value') return EAVForm
Fix EAV creation form; fix form error bug
Fix EAV creation form; fix form error bug
Python
bsd-3-clause
drkitty/cyder,OSU-Net/cyder,OSU-Net/cyder,zeeman/cyder,akeym/cyder,OSU-Net/cyder,akeym/cyder,zeeman/cyder,zeeman/cyder,murrown/cyder,murrown/cyder,OSU-Net/cyder,drkitty/cyder,murrown/cyder,zeeman/cyder,drkitty/cyder,murrown/cyder,akeym/cyder,akeym/cyder,drkitty/cyder
from django import forms from django.core.exceptions import ValidationError from cyder.base.eav.models import Attribute class AttributeFormField(forms.CharField): def to_python(self, value): try: return Attribute.objects.get( name=value) except Attribute.DoesNotExist: raise ValidationError("No such attribute") def get_eav_form(eav_model, entity_model): class EAVForm(forms.ModelForm): def __init__(self, *args, **kwargs): + if 'instance' in kwargs and kwargs['instance'] is not None: + # This is a bound form with a real instance + - if 'initial' not in kwargs: + if 'initial' not in kwargs: - kwargs['initial'] = dict() + kwargs['initial'] = dict() - kwargs['initial']['attribute'] = kwargs['instance'].attribute.name + + # Set the attribute field to the name, not the pk + kwargs['initial']['attribute'] = \ + kwargs['instance'].attribute.name super(EAVForm, self).__init__(*args, **kwargs) entity = forms.ModelChoiceField( queryset=entity_model.objects.all(), widget=forms.HiddenInput()) attribute = AttributeFormField() class Meta: model = eav_model fields = ('entity', 'attribute', 'value') return EAVForm
Fix EAV creation form; fix form error bug
## Code Before: from django import forms from django.core.exceptions import ValidationError from cyder.base.eav.models import Attribute class AttributeFormField(forms.CharField): def to_python(self, value): try: return Attribute.objects.get( name=value) except Attribute.DoesNotExist: raise ValidationError("No such attribute") def get_eav_form(eav_model, entity_model): class EAVForm(forms.ModelForm): def __init__(self, *args, **kwargs): if 'initial' not in kwargs: kwargs['initial'] = dict() kwargs['initial']['attribute'] = kwargs['instance'].attribute.name super(EAVForm, self).__init__(*args, **kwargs) entity = forms.ModelChoiceField( queryset=entity_model.objects.all(), widget=forms.HiddenInput()) attribute = AttributeFormField() class Meta: model = eav_model fields = ('entity', 'attribute', 'value') return EAVForm ## Instruction: Fix EAV creation form; fix form error bug ## Code After: from django import forms from django.core.exceptions import ValidationError from cyder.base.eav.models import Attribute class AttributeFormField(forms.CharField): def to_python(self, value): try: return Attribute.objects.get( name=value) except Attribute.DoesNotExist: raise ValidationError("No such attribute") def get_eav_form(eav_model, entity_model): class EAVForm(forms.ModelForm): def __init__(self, *args, **kwargs): if 'instance' in kwargs and kwargs['instance'] is not None: # This is a bound form with a real instance if 'initial' not in kwargs: kwargs['initial'] = dict() # Set the attribute field to the name, not the pk kwargs['initial']['attribute'] = \ kwargs['instance'].attribute.name super(EAVForm, self).__init__(*args, **kwargs) entity = forms.ModelChoiceField( queryset=entity_model.objects.all(), widget=forms.HiddenInput()) attribute = AttributeFormField() class Meta: model = eav_model fields = ('entity', 'attribute', 'value') return EAVForm
from django import forms from django.core.exceptions import ValidationError from cyder.base.eav.models import Attribute class AttributeFormField(forms.CharField): def to_python(self, value): try: return Attribute.objects.get( name=value) except Attribute.DoesNotExist: raise ValidationError("No such attribute") def get_eav_form(eav_model, entity_model): class EAVForm(forms.ModelForm): def __init__(self, *args, **kwargs): + if 'instance' in kwargs and kwargs['instance'] is not None: + # This is a bound form with a real instance + - if 'initial' not in kwargs: + if 'initial' not in kwargs: ? ++++ - kwargs['initial'] = dict() + kwargs['initial'] = dict() ? ++++ - kwargs['initial']['attribute'] = kwargs['instance'].attribute.name + + # Set the attribute field to the name, not the pk + kwargs['initial']['attribute'] = \ + kwargs['instance'].attribute.name super(EAVForm, self).__init__(*args, **kwargs) entity = forms.ModelChoiceField( queryset=entity_model.objects.all(), widget=forms.HiddenInput()) attribute = AttributeFormField() class Meta: model = eav_model fields = ('entity', 'attribute', 'value') return EAVForm
2652919c8d2e6fad8f7b3d47f5e82528b4b5214e
plots/monotone.py
plots/monotone.py
from string import * import sys # Set PYTHONPATH=$PWD from plottools import * if len(sys.argv) != 3: abort("usage: <input file> <output file>") input_file = sys.argv[1] output_file = sys.argv[2] val_loss_min = sys.float_info.max with open(input_file, "r") as fp_i, \ open(output_file, "w") as fp_o: for line in fp_i: (t, val_loss_string) = split(line) val_loss = float(val_loss_string) if val_loss < val_loss_min: val_loss_min = val_loss fp_o.write("%s, %f\n" % (t, val_loss_min))
from string import * import sys # Set PYTHONPATH=$PWD from plottools import * if len(sys.argv) != 3: abort("usage: <input file> <output file>") input_file = sys.argv[1] output_file = sys.argv[2] val_loss_min = sys.float_info.max with open(input_file, "r") as fp_i, \ open(output_file, "w") as fp_o: for line in fp_i: (t, val_loss_string) = split(line) val_loss = float(val_loss_string) if val_loss < val_loss_min: val_loss_min = val_loss fp_o.write("%s, %f\n" % (t, val_loss_min)) # Ensure the last data point is written for the plot: if val_loss >= val_loss_min: fp_o.write("%s, %f\n" % (t, val_loss_min))
Write the last point for plot completeness
Write the last point for plot completeness
Python
mit
ECP-CANDLE/Database,ECP-CANDLE/Database
from string import * import sys # Set PYTHONPATH=$PWD from plottools import * if len(sys.argv) != 3: abort("usage: <input file> <output file>") input_file = sys.argv[1] output_file = sys.argv[2] val_loss_min = sys.float_info.max with open(input_file, "r") as fp_i, \ open(output_file, "w") as fp_o: for line in fp_i: (t, val_loss_string) = split(line) val_loss = float(val_loss_string) if val_loss < val_loss_min: val_loss_min = val_loss + fp_o.write("%s, %f\n" % (t, val_loss_min)) + # Ensure the last data point is written for the plot: + if val_loss >= val_loss_min: fp_o.write("%s, %f\n" % (t, val_loss_min))
Write the last point for plot completeness
## Code Before: from string import * import sys # Set PYTHONPATH=$PWD from plottools import * if len(sys.argv) != 3: abort("usage: <input file> <output file>") input_file = sys.argv[1] output_file = sys.argv[2] val_loss_min = sys.float_info.max with open(input_file, "r") as fp_i, \ open(output_file, "w") as fp_o: for line in fp_i: (t, val_loss_string) = split(line) val_loss = float(val_loss_string) if val_loss < val_loss_min: val_loss_min = val_loss fp_o.write("%s, %f\n" % (t, val_loss_min)) ## Instruction: Write the last point for plot completeness ## Code After: from string import * import sys # Set PYTHONPATH=$PWD from plottools import * if len(sys.argv) != 3: abort("usage: <input file> <output file>") input_file = sys.argv[1] output_file = sys.argv[2] val_loss_min = sys.float_info.max with open(input_file, "r") as fp_i, \ open(output_file, "w") as fp_o: for line in fp_i: (t, val_loss_string) = split(line) val_loss = float(val_loss_string) if val_loss < val_loss_min: val_loss_min = val_loss fp_o.write("%s, %f\n" % (t, val_loss_min)) # Ensure the last data point is written for the plot: if val_loss >= val_loss_min: fp_o.write("%s, %f\n" % (t, val_loss_min))
from string import * import sys # Set PYTHONPATH=$PWD from plottools import * if len(sys.argv) != 3: abort("usage: <input file> <output file>") input_file = sys.argv[1] output_file = sys.argv[2] val_loss_min = sys.float_info.max with open(input_file, "r") as fp_i, \ open(output_file, "w") as fp_o: for line in fp_i: (t, val_loss_string) = split(line) val_loss = float(val_loss_string) if val_loss < val_loss_min: val_loss_min = val_loss + fp_o.write("%s, %f\n" % (t, val_loss_min)) + # Ensure the last data point is written for the plot: + if val_loss >= val_loss_min: fp_o.write("%s, %f\n" % (t, val_loss_min))
bfcec696308ee8bfd226a54c17a7e15d49e2aed7
var/spack/repos/builtin/packages/nextflow/package.py
var/spack/repos/builtin/packages/nextflow/package.py
from spack import * from glob import glob import os class Nextflow(Package): """Data-driven computational pipelines""" homepage = "http://www.nextflow.io" version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a', url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow', expand=False) depends_on('jdk') def unpack(self): pass def install(self, spec, prefix): chmod = which('chmod') mkdirp(prefix.bin) install("nextflow", join_path(prefix.bin, "nextflow")) chmod('+x', join_path(prefix.bin, "nextflow"))
from spack import * class Nextflow(Package): """Data-driven computational pipelines""" homepage = "http://www.nextflow.io" version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a', url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow', expand=False) depends_on('jdk') def unpack(self): pass def install(self, spec, prefix): mkdirp(prefix.bin) install("nextflow", join_path(prefix.bin, "nextflow")) set_executable( join_path(prefix.bin, "nextflow"))
Add standard header, use spack helpers
Add standard header, use spack helpers Added the standard header (stolen from R). Touched up the install to use set_executable rather than doing it myself.
Python
lgpl-2.1
matthiasdiener/spack,mfherbst/spack,lgarren/spack,tmerrick1/spack,TheTimmy/spack,LLNL/spack,tmerrick1/spack,TheTimmy/spack,TheTimmy/spack,matthiasdiener/spack,LLNL/spack,iulian787/spack,matthiasdiener/spack,krafczyk/spack,tmerrick1/spack,EmreAtes/spack,TheTimmy/spack,tmerrick1/spack,iulian787/spack,matthiasdiener/spack,mfherbst/spack,iulian787/spack,tmerrick1/spack,EmreAtes/spack,skosukhin/spack,skosukhin/spack,krafczyk/spack,lgarren/spack,LLNL/spack,matthiasdiener/spack,EmreAtes/spack,iulian787/spack,lgarren/spack,skosukhin/spack,LLNL/spack,krafczyk/spack,mfherbst/spack,TheTimmy/spack,skosukhin/spack,iulian787/spack,lgarren/spack,EmreAtes/spack,krafczyk/spack,mfherbst/spack,LLNL/spack,lgarren/spack,mfherbst/spack,EmreAtes/spack,skosukhin/spack,krafczyk/spack
from spack import * + - from glob import glob - import os class Nextflow(Package): """Data-driven computational pipelines""" homepage = "http://www.nextflow.io" version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a', url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow', expand=False) depends_on('jdk') def unpack(self): pass def install(self, spec, prefix): - chmod = which('chmod') - mkdirp(prefix.bin) install("nextflow", join_path(prefix.bin, "nextflow")) - chmod('+x', join_path(prefix.bin, "nextflow")) + set_executable( join_path(prefix.bin, "nextflow"))
Add standard header, use spack helpers
## Code Before: from spack import * from glob import glob import os class Nextflow(Package): """Data-driven computational pipelines""" homepage = "http://www.nextflow.io" version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a', url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow', expand=False) depends_on('jdk') def unpack(self): pass def install(self, spec, prefix): chmod = which('chmod') mkdirp(prefix.bin) install("nextflow", join_path(prefix.bin, "nextflow")) chmod('+x', join_path(prefix.bin, "nextflow")) ## Instruction: Add standard header, use spack helpers ## Code After: from spack import * class Nextflow(Package): """Data-driven computational pipelines""" homepage = "http://www.nextflow.io" version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a', url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow', expand=False) depends_on('jdk') def unpack(self): pass def install(self, spec, prefix): mkdirp(prefix.bin) install("nextflow", join_path(prefix.bin, "nextflow")) set_executable( join_path(prefix.bin, "nextflow"))
from spack import * + - from glob import glob - import os class Nextflow(Package): """Data-driven computational pipelines""" homepage = "http://www.nextflow.io" version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a', url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow', expand=False) depends_on('jdk') def unpack(self): pass def install(self, spec, prefix): - chmod = which('chmod') - mkdirp(prefix.bin) install("nextflow", join_path(prefix.bin, "nextflow")) - chmod('+x', join_path(prefix.bin, "nextflow")) ? ^^^^ ----- + set_executable( join_path(prefix.bin, "nextflow")) ? +++++++ ^^^^^^
3352920f7e92e2732eb2914313bdee6b5ab7f549
setup.py
setup.py
from distutils.core import setup try: from mujincommon.setuptools import Distribution except ImportError: from distutils.dist import Distribution version = {} exec(open('python/mujincontrollerclient/version.py').read(), version) setup( distclass=Distribution, name='mujincontrollerclient', version=version['__version__'], packages=['mujincontrollerclient'], package_dir={'mujincontrollerclient': 'python/mujincontrollerclient'}, scripts=['bin/mujin_controllerclientpy_registerscene.py', 'bin/mujin_controllerclientpy_applyconfig.py'], locale_dir='locale', license='Apache License, Version 2.0', long_description=open('README.rst').read(), # flake8 compliance configuration enable_flake8=True, # Enable checks fail_on_flake=True, # Fail builds when checks fail install_requires=[], )
from distutils.core import setup try: from mujincommon.setuptools import Distribution except ImportError: from distutils.dist import Distribution version = {} exec(open('python/mujincontrollerclient/version.py').read(), version) setup( distclass=Distribution, name='mujincontrollerclient', version=version['__version__'], packages=['mujincontrollerclient'], package_dir={'mujincontrollerclient': 'python/mujincontrollerclient'}, data_files=[ # using scripts= will cause the first line of the script being modified for python2 or python3 # put the scripts in data_files will copy them as-is ('bin', ['bin/mujin_controllerclientpy_registerscene.py', 'bin/mujin_controllerclientpy_applyconfig.py']), ], locale_dir='locale', license='Apache License, Version 2.0', long_description=open('README.rst').read(), # flake8 compliance configuration enable_flake8=True, # Enable checks fail_on_flake=True, # Fail builds when checks fail install_requires=[], )
Fix bin scripts having python2 or python3 specific path.
Fix bin scripts having python2 or python3 specific path.
Python
apache-2.0
mujin/mujincontrollerclientpy
from distutils.core import setup try: from mujincommon.setuptools import Distribution except ImportError: from distutils.dist import Distribution version = {} exec(open('python/mujincontrollerclient/version.py').read(), version) setup( distclass=Distribution, name='mujincontrollerclient', version=version['__version__'], packages=['mujincontrollerclient'], package_dir={'mujincontrollerclient': 'python/mujincontrollerclient'}, + data_files=[ + # using scripts= will cause the first line of the script being modified for python2 or python3 + # put the scripts in data_files will copy them as-is - scripts=['bin/mujin_controllerclientpy_registerscene.py', 'bin/mujin_controllerclientpy_applyconfig.py'], + ('bin', ['bin/mujin_controllerclientpy_registerscene.py', 'bin/mujin_controllerclientpy_applyconfig.py']), + ], locale_dir='locale', license='Apache License, Version 2.0', long_description=open('README.rst').read(), # flake8 compliance configuration enable_flake8=True, # Enable checks fail_on_flake=True, # Fail builds when checks fail install_requires=[], )
Fix bin scripts having python2 or python3 specific path.
## Code Before: from distutils.core import setup try: from mujincommon.setuptools import Distribution except ImportError: from distutils.dist import Distribution version = {} exec(open('python/mujincontrollerclient/version.py').read(), version) setup( distclass=Distribution, name='mujincontrollerclient', version=version['__version__'], packages=['mujincontrollerclient'], package_dir={'mujincontrollerclient': 'python/mujincontrollerclient'}, scripts=['bin/mujin_controllerclientpy_registerscene.py', 'bin/mujin_controllerclientpy_applyconfig.py'], locale_dir='locale', license='Apache License, Version 2.0', long_description=open('README.rst').read(), # flake8 compliance configuration enable_flake8=True, # Enable checks fail_on_flake=True, # Fail builds when checks fail install_requires=[], ) ## Instruction: Fix bin scripts having python2 or python3 specific path. ## Code After: from distutils.core import setup try: from mujincommon.setuptools import Distribution except ImportError: from distutils.dist import Distribution version = {} exec(open('python/mujincontrollerclient/version.py').read(), version) setup( distclass=Distribution, name='mujincontrollerclient', version=version['__version__'], packages=['mujincontrollerclient'], package_dir={'mujincontrollerclient': 'python/mujincontrollerclient'}, data_files=[ # using scripts= will cause the first line of the script being modified for python2 or python3 # put the scripts in data_files will copy them as-is ('bin', ['bin/mujin_controllerclientpy_registerscene.py', 'bin/mujin_controllerclientpy_applyconfig.py']), ], locale_dir='locale', license='Apache License, Version 2.0', long_description=open('README.rst').read(), # flake8 compliance configuration enable_flake8=True, # Enable checks fail_on_flake=True, # Fail builds when checks fail install_requires=[], )
from distutils.core import setup try: from mujincommon.setuptools import Distribution except ImportError: from distutils.dist import Distribution version = {} exec(open('python/mujincontrollerclient/version.py').read(), version) setup( distclass=Distribution, name='mujincontrollerclient', version=version['__version__'], packages=['mujincontrollerclient'], package_dir={'mujincontrollerclient': 'python/mujincontrollerclient'}, + data_files=[ + # using scripts= will cause the first line of the script being modified for python2 or python3 + # put the scripts in data_files will copy them as-is - scripts=['bin/mujin_controllerclientpy_registerscene.py', 'bin/mujin_controllerclientpy_applyconfig.py'], ? ^^^ ^^^^ + ('bin', ['bin/mujin_controllerclientpy_registerscene.py', 'bin/mujin_controllerclientpy_applyconfig.py']), ? ^^^^^^^ ^^^^ + + ], locale_dir='locale', license='Apache License, Version 2.0', long_description=open('README.rst').read(), # flake8 compliance configuration enable_flake8=True, # Enable checks fail_on_flake=True, # Fail builds when checks fail install_requires=[], )
b17104be53389604b4b7f5f109895bdaa6389e43
hic/flow.py
hic/flow.py
from __future__ import division import numpy as np import numexpr as ne __all__ = 'qn', 'FlowCumulant' def qn(n, phi): return ne.evaluate('sum(exp(1j*n*phi))') class FlowCumulant(object): def __init__(self, multiplicities, qn): self.multiplicities = np.asarray(multiplicities) self._qn = dict(qn) self._corr2 = {} self._corr4 = {} def _calculate_corr2(self, n): try: qn = self._qn[n] # noqa except KeyError: raise M = self.multiplicities # noqa self._corr[n][2] = ne.evaluate( 'sum(qn*conj(qn) - M) / sum(M*(M-1))' ) def _calculate_corr4(self, n): pass def _get_corr(self, n, k): pass def correlation(self, n, k): pass def cumulant(self, n, k, error=False, negative_imaginary=False): pass
from __future__ import division import numpy as np import numexpr as ne __all__ = 'qn', 'FlowCumulant' # If a variable is only ever used by numexpr, flake8 will flag it as unused. # The comment 'noqa' prevents this warning. def qn(n, phi): return ne.evaluate('sum(exp(1j*n*phi))') class FlowCumulant(object): def __init__(self, multiplicities, qn): self.multiplicities = np.asarray(multiplicities) self._qn = dict(qn) self._corr2 = {} self._corr4 = {} def _calculate_corr2(self, n): try: qn = self._qn[n] # noqa except KeyError: raise M = self.multiplicities # noqa self._corr[n][2] = ne.evaluate( 'sum(qn*conj(qn) - M) / sum(M*(M-1))' ) def _calculate_corr4(self, n): pass def _get_corr(self, n, k): pass def correlation(self, n, k): pass def cumulant(self, n, k, error=False, negative_imaginary=False): pass
Add note about flake8 ignore flag.
Add note about flake8 ignore flag.
Python
mit
jbernhard/hic,Duke-QCD/hic
from __future__ import division import numpy as np import numexpr as ne __all__ = 'qn', 'FlowCumulant' + + + # If a variable is only ever used by numexpr, flake8 will flag it as unused. + # The comment 'noqa' prevents this warning. def qn(n, phi): return ne.evaluate('sum(exp(1j*n*phi))') class FlowCumulant(object): def __init__(self, multiplicities, qn): self.multiplicities = np.asarray(multiplicities) self._qn = dict(qn) self._corr2 = {} self._corr4 = {} def _calculate_corr2(self, n): try: qn = self._qn[n] # noqa except KeyError: raise M = self.multiplicities # noqa self._corr[n][2] = ne.evaluate( 'sum(qn*conj(qn) - M) / sum(M*(M-1))' ) def _calculate_corr4(self, n): pass def _get_corr(self, n, k): pass def correlation(self, n, k): pass def cumulant(self, n, k, error=False, negative_imaginary=False): pass
Add note about flake8 ignore flag.
## Code Before: from __future__ import division import numpy as np import numexpr as ne __all__ = 'qn', 'FlowCumulant' def qn(n, phi): return ne.evaluate('sum(exp(1j*n*phi))') class FlowCumulant(object): def __init__(self, multiplicities, qn): self.multiplicities = np.asarray(multiplicities) self._qn = dict(qn) self._corr2 = {} self._corr4 = {} def _calculate_corr2(self, n): try: qn = self._qn[n] # noqa except KeyError: raise M = self.multiplicities # noqa self._corr[n][2] = ne.evaluate( 'sum(qn*conj(qn) - M) / sum(M*(M-1))' ) def _calculate_corr4(self, n): pass def _get_corr(self, n, k): pass def correlation(self, n, k): pass def cumulant(self, n, k, error=False, negative_imaginary=False): pass ## Instruction: Add note about flake8 ignore flag. ## Code After: from __future__ import division import numpy as np import numexpr as ne __all__ = 'qn', 'FlowCumulant' # If a variable is only ever used by numexpr, flake8 will flag it as unused. # The comment 'noqa' prevents this warning. def qn(n, phi): return ne.evaluate('sum(exp(1j*n*phi))') class FlowCumulant(object): def __init__(self, multiplicities, qn): self.multiplicities = np.asarray(multiplicities) self._qn = dict(qn) self._corr2 = {} self._corr4 = {} def _calculate_corr2(self, n): try: qn = self._qn[n] # noqa except KeyError: raise M = self.multiplicities # noqa self._corr[n][2] = ne.evaluate( 'sum(qn*conj(qn) - M) / sum(M*(M-1))' ) def _calculate_corr4(self, n): pass def _get_corr(self, n, k): pass def correlation(self, n, k): pass def cumulant(self, n, k, error=False, negative_imaginary=False): pass
from __future__ import division import numpy as np import numexpr as ne __all__ = 'qn', 'FlowCumulant' + + + # If a variable is only ever used by numexpr, flake8 will flag it as unused. + # The comment 'noqa' prevents this warning. def qn(n, phi): return ne.evaluate('sum(exp(1j*n*phi))') class FlowCumulant(object): def __init__(self, multiplicities, qn): self.multiplicities = np.asarray(multiplicities) self._qn = dict(qn) self._corr2 = {} self._corr4 = {} def _calculate_corr2(self, n): try: qn = self._qn[n] # noqa except KeyError: raise M = self.multiplicities # noqa self._corr[n][2] = ne.evaluate( 'sum(qn*conj(qn) - M) / sum(M*(M-1))' ) def _calculate_corr4(self, n): pass def _get_corr(self, n, k): pass def correlation(self, n, k): pass def cumulant(self, n, k, error=False, negative_imaginary=False): pass
cfdbe06da6e35f2cb166374cf249d51f18e1224e
pryvate/blueprints/packages/packages.py
pryvate/blueprints/packages/packages.py
"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype})
"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype}) return make_response('Package not found', 404)
Return a 404 if the package was not found
Return a 404 if the package was not found
Python
mit
Dinoshauer/pryvate,Dinoshauer/pryvate
"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype}) + return make_response('Package not found', 404)
Return a 404 if the package was not found
## Code Before: """Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype}) ## Instruction: Return a 404 if the package was not found ## Code After: """Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype}) return make_response('Package not found', 404)
"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype}) + return make_response('Package not found', 404)
db14ed2c23b3838796e648faade2c73b786d61ff
tartpy/eventloop.py
tartpy/eventloop.py
import queue import sys import threading import time import traceback from .singleton import Singleton def _format_exception(exc_info): """Create a message with details on the exception.""" exc_type, exc_value, exc_tb = exc_info return {'exception': {'type': exc_type, 'value': exc_value, 'traceback': exc_tb}, 'traceback': traceback.format_exception(*exc_info)} class EventLoop(object, metaclass=Singleton): """A generic event loop object.""" def __init__(self): self.queue = queue.Queue() def schedule(self, event): """Schedule an event. The events have the form:: (event, error) where `event` is a thunk and `error` is called with an exception message (output of `_format_exception`) if there is an error when executing `event`. """ self.queue.put(event) def stop(self): """Stop the loop.""" pass def run_step(self, block=True): """Process one event.""" ev, error = self.queue.get(block=block) try: ev() except Exception as exc: error(_format_exception(sys.exc_info())) def run(self): """Process all events in the queue.""" try: while True: self.run_step(block=False) except queue.Empty: return
import queue import sys import threading import time import traceback from .singleton import Singleton def exception_message(): """Create a message with details on the exception.""" exc_type, exc_value, exc_tb = exc_info = sys.exc_info() return {'exception': {'type': exc_type, 'value': exc_value, 'traceback': exc_tb}, 'traceback': traceback.format_exception(*exc_info)} class EventLoop(object, metaclass=Singleton): """A generic event loop object.""" def __init__(self): self.queue = queue.Queue() def schedule(self, event): """Schedule an event. The events have the form:: (event, error) where `event` is a thunk and `error` is called with an exception message (output of `exception_message`) if there is an error when executing `event`. """ self.queue.put(event) def stop(self): """Stop the loop.""" pass def run_step(self, block=True): """Process one event.""" ev, error = self.queue.get(block=block) try: ev() except Exception as exc: error(exception_message()) def run(self): """Process all events in the queue.""" try: while True: self.run_step(block=False) except queue.Empty: return
Make exception message builder a nicer function
Make exception message builder a nicer function It is used by clients in other modules.
Python
mit
waltermoreira/tartpy
import queue import sys import threading import time import traceback from .singleton import Singleton - def _format_exception(exc_info): + def exception_message(): """Create a message with details on the exception.""" - exc_type, exc_value, exc_tb = exc_info + exc_type, exc_value, exc_tb = exc_info = sys.exc_info() return {'exception': {'type': exc_type, 'value': exc_value, 'traceback': exc_tb}, 'traceback': traceback.format_exception(*exc_info)} class EventLoop(object, metaclass=Singleton): """A generic event loop object.""" def __init__(self): self.queue = queue.Queue() def schedule(self, event): """Schedule an event. The events have the form:: (event, error) where `event` is a thunk and `error` is called with an - exception message (output of `_format_exception`) if there is + exception message (output of `exception_message`) if there is an error when executing `event`. """ self.queue.put(event) def stop(self): """Stop the loop.""" pass def run_step(self, block=True): """Process one event.""" ev, error = self.queue.get(block=block) try: ev() except Exception as exc: - error(_format_exception(sys.exc_info())) + error(exception_message()) def run(self): """Process all events in the queue.""" try: while True: self.run_step(block=False) except queue.Empty: return
Make exception message builder a nicer function
## Code Before: import queue import sys import threading import time import traceback from .singleton import Singleton def _format_exception(exc_info): """Create a message with details on the exception.""" exc_type, exc_value, exc_tb = exc_info return {'exception': {'type': exc_type, 'value': exc_value, 'traceback': exc_tb}, 'traceback': traceback.format_exception(*exc_info)} class EventLoop(object, metaclass=Singleton): """A generic event loop object.""" def __init__(self): self.queue = queue.Queue() def schedule(self, event): """Schedule an event. The events have the form:: (event, error) where `event` is a thunk and `error` is called with an exception message (output of `_format_exception`) if there is an error when executing `event`. """ self.queue.put(event) def stop(self): """Stop the loop.""" pass def run_step(self, block=True): """Process one event.""" ev, error = self.queue.get(block=block) try: ev() except Exception as exc: error(_format_exception(sys.exc_info())) def run(self): """Process all events in the queue.""" try: while True: self.run_step(block=False) except queue.Empty: return ## Instruction: Make exception message builder a nicer function ## Code After: import queue import sys import threading import time import traceback from .singleton import Singleton def exception_message(): """Create a message with details on the exception.""" exc_type, exc_value, exc_tb = exc_info = sys.exc_info() return {'exception': {'type': exc_type, 'value': exc_value, 'traceback': exc_tb}, 'traceback': traceback.format_exception(*exc_info)} class EventLoop(object, metaclass=Singleton): """A generic event loop object.""" def __init__(self): self.queue = queue.Queue() def schedule(self, event): """Schedule an event. The events have the form:: (event, error) where `event` is a thunk and `error` is called with an exception message (output of `exception_message`) if there is an error when executing `event`. """ self.queue.put(event) def stop(self): """Stop the loop.""" pass def run_step(self, block=True): """Process one event.""" ev, error = self.queue.get(block=block) try: ev() except Exception as exc: error(exception_message()) def run(self): """Process all events in the queue.""" try: while True: self.run_step(block=False) except queue.Empty: return
import queue import sys import threading import time import traceback from .singleton import Singleton - def _format_exception(exc_info): + def exception_message(): """Create a message with details on the exception.""" - exc_type, exc_value, exc_tb = exc_info + exc_type, exc_value, exc_tb = exc_info = sys.exc_info() ? +++++++++++++++++ return {'exception': {'type': exc_type, 'value': exc_value, 'traceback': exc_tb}, 'traceback': traceback.format_exception(*exc_info)} class EventLoop(object, metaclass=Singleton): """A generic event loop object.""" def __init__(self): self.queue = queue.Queue() def schedule(self, event): """Schedule an event. The events have the form:: (event, error) where `event` is a thunk and `error` is called with an - exception message (output of `_format_exception`) if there is ? -------- + exception message (output of `exception_message`) if there is ? ++++++++ an error when executing `event`. """ self.queue.put(event) def stop(self): """Stop the loop.""" pass def run_step(self, block=True): """Process one event.""" ev, error = self.queue.get(block=block) try: ev() except Exception as exc: - error(_format_exception(sys.exc_info())) + error(exception_message()) def run(self): """Process all events in the queue.""" try: while True: self.run_step(block=False) except queue.Empty: return
b46dc26e5e1b4c0388c330017dc52393417c3323
tests/test_init.py
tests/test_init.py
from disco.test import TestCase, TestJob class InitJob(TestJob): sort = False @staticmethod def map_reader(stream, size, url, params): params.x = 10 return (stream, size, url) @staticmethod def map_init(iter, params): assert hasattr(params, 'x') iter.next() params['x'] += 100 @staticmethod def map(e, params): yield e, int(e) + params['x'] @staticmethod def reduce_init(iter, params): params['y'] = 1000 @staticmethod def reduce(iter, params): for k, v in iter: yield k, int(v) + params['y'] class InitTestCase(TestCase): def serve(self, path): return 'skipthis\n' + ('%s\n' % path) * 10 def runTest(self): self.job = InitJob().run(input=self.test_server.urls(range(10))) results = list(self.results(self.job)) for k, v in results: self.assertEquals(int(k) + 1110, int(v)) self.assertEquals(len(results), 100)
from disco.test import TestCase, TestJob class InitJob(TestJob): params = {'x': 10} sort = False @staticmethod def map_init(iter, params): iter.next() params['x'] += 100 @staticmethod def map(e, params): yield e, int(e) + params['x'] @staticmethod def reduce_init(iter, params): params['y'] = 1000 @staticmethod def reduce(iter, params): for k, v in iter: yield k, int(v) + params['y'] class InitTestCase(TestCase): def serve(self, path): return 'skipthis\n' + ('%s\n' % path) * 10 def runTest(self): self.job = InitJob().run(input=self.test_server.urls(range(10))) results = list(self.results(self.job)) for k, v in results: self.assertEquals(int(k) + 1110, int(v)) self.assertEquals(len(results), 100)
Revert "added a test for the map_reader before map_init -case which fails currently" (deprecate init functions instead)
Revert "added a test for the map_reader before map_init -case which fails currently" (deprecate init functions instead) This reverts commit 88551bf444b7b358fea8e7eb4475df2c5d87ceeb.
Python
bsd-3-clause
ErikDubbelboer/disco,pombredanne/disco,mwilliams3/disco,simudream/disco,pombredanne/disco,simudream/disco,mozilla/disco,beni55/disco,ErikDubbelboer/disco,pavlobaron/disco_playground,pooya/disco,ktkt2009/disco,scrapinghub/disco,seabirdzh/disco,pombredanne/disco,pooya/disco,pombredanne/disco,mwilliams3/disco,ktkt2009/disco,oldmantaiter/disco,simudream/disco,discoproject/disco,seabirdzh/disco,discoproject/disco,mwilliams3/disco,pavlobaron/disco_playground,oldmantaiter/disco,discoproject/disco,simudream/disco,beni55/disco,mozilla/disco,scrapinghub/disco,oldmantaiter/disco,pombredanne/disco,mwilliams3/disco,beni55/disco,mozilla/disco,scrapinghub/disco,ktkt2009/disco,scrapinghub/disco,seabirdzh/disco,discoproject/disco,seabirdzh/disco,beni55/disco,ErikDubbelboer/disco,mozilla/disco,ErikDubbelboer/disco,mwilliams3/disco,pooya/disco,seabirdzh/disco,pavlobaron/disco_playground,pavlobaron/disco_playground,ktkt2009/disco,ktkt2009/disco,discoproject/disco,simudream/disco,pooya/disco,beni55/disco,oldmantaiter/disco,oldmantaiter/disco,ErikDubbelboer/disco
from disco.test import TestCase, TestJob class InitJob(TestJob): + params = {'x': 10} sort = False @staticmethod - def map_reader(stream, size, url, params): - params.x = 10 - return (stream, size, url) - - @staticmethod def map_init(iter, params): - assert hasattr(params, 'x') iter.next() params['x'] += 100 @staticmethod def map(e, params): yield e, int(e) + params['x'] @staticmethod def reduce_init(iter, params): params['y'] = 1000 @staticmethod def reduce(iter, params): for k, v in iter: yield k, int(v) + params['y'] class InitTestCase(TestCase): def serve(self, path): return 'skipthis\n' + ('%s\n' % path) * 10 def runTest(self): self.job = InitJob().run(input=self.test_server.urls(range(10))) results = list(self.results(self.job)) for k, v in results: self.assertEquals(int(k) + 1110, int(v)) self.assertEquals(len(results), 100)
Revert "added a test for the map_reader before map_init -case which fails currently" (deprecate init functions instead)
## Code Before: from disco.test import TestCase, TestJob class InitJob(TestJob): sort = False @staticmethod def map_reader(stream, size, url, params): params.x = 10 return (stream, size, url) @staticmethod def map_init(iter, params): assert hasattr(params, 'x') iter.next() params['x'] += 100 @staticmethod def map(e, params): yield e, int(e) + params['x'] @staticmethod def reduce_init(iter, params): params['y'] = 1000 @staticmethod def reduce(iter, params): for k, v in iter: yield k, int(v) + params['y'] class InitTestCase(TestCase): def serve(self, path): return 'skipthis\n' + ('%s\n' % path) * 10 def runTest(self): self.job = InitJob().run(input=self.test_server.urls(range(10))) results = list(self.results(self.job)) for k, v in results: self.assertEquals(int(k) + 1110, int(v)) self.assertEquals(len(results), 100) ## Instruction: Revert "added a test for the map_reader before map_init -case which fails currently" (deprecate init functions instead) ## Code After: from disco.test import TestCase, TestJob class InitJob(TestJob): params = {'x': 10} sort = False @staticmethod def map_init(iter, params): iter.next() params['x'] += 100 @staticmethod def map(e, params): yield e, int(e) + params['x'] @staticmethod def reduce_init(iter, params): params['y'] = 1000 @staticmethod def reduce(iter, params): for k, v in iter: yield k, int(v) + params['y'] class InitTestCase(TestCase): def serve(self, path): return 'skipthis\n' + ('%s\n' % path) * 10 def runTest(self): self.job = InitJob().run(input=self.test_server.urls(range(10))) results = list(self.results(self.job)) for k, v in results: self.assertEquals(int(k) + 1110, int(v)) self.assertEquals(len(results), 100)
from disco.test import TestCase, TestJob class InitJob(TestJob): + params = {'x': 10} sort = False @staticmethod - def map_reader(stream, size, url, params): - params.x = 10 - return (stream, size, url) - - @staticmethod def map_init(iter, params): - assert hasattr(params, 'x') iter.next() params['x'] += 100 @staticmethod def map(e, params): yield e, int(e) + params['x'] @staticmethod def reduce_init(iter, params): params['y'] = 1000 @staticmethod def reduce(iter, params): for k, v in iter: yield k, int(v) + params['y'] class InitTestCase(TestCase): def serve(self, path): return 'skipthis\n' + ('%s\n' % path) * 10 def runTest(self): self.job = InitJob().run(input=self.test_server.urls(range(10))) results = list(self.results(self.job)) for k, v in results: self.assertEquals(int(k) + 1110, int(v)) self.assertEquals(len(results), 100)
c07b6d2abae4ccd1eacb846a947945ecd6e963af
photutils/utils/_round.py
photutils/utils/_round.py
import numpy as np def _py2intround(a): """ Round the input to the nearest integer. If two integers are equally close, rounding is done away from 0. """ data = np.asanyarray(a) value = np.where(data >= 0, np.floor(data + 0.5), np.ceil(data - 0.5)).astype(int) if not hasattr(a, '__iter__'): value = np.asscalar(value) return value
import numpy as np def _py2intround(a): """ Round the input to the nearest integer. If two integers are equally close, rounding is done away from 0. """ data = np.asanyarray(a) value = np.where(data >= 0, np.floor(data + 0.5), np.ceil(data - 0.5)).astype(int) if not hasattr(a, '__iter__'): value = value.item() return value
Replace deprecated np.asscalar(a) with a.item()
Replace deprecated np.asscalar(a) with a.item()
Python
bsd-3-clause
larrybradley/photutils,astropy/photutils
import numpy as np def _py2intround(a): """ Round the input to the nearest integer. If two integers are equally close, rounding is done away from 0. """ data = np.asanyarray(a) value = np.where(data >= 0, np.floor(data + 0.5), np.ceil(data - 0.5)).astype(int) if not hasattr(a, '__iter__'): - value = np.asscalar(value) + value = value.item() return value
Replace deprecated np.asscalar(a) with a.item()
## Code Before: import numpy as np def _py2intround(a): """ Round the input to the nearest integer. If two integers are equally close, rounding is done away from 0. """ data = np.asanyarray(a) value = np.where(data >= 0, np.floor(data + 0.5), np.ceil(data - 0.5)).astype(int) if not hasattr(a, '__iter__'): value = np.asscalar(value) return value ## Instruction: Replace deprecated np.asscalar(a) with a.item() ## Code After: import numpy as np def _py2intround(a): """ Round the input to the nearest integer. If two integers are equally close, rounding is done away from 0. """ data = np.asanyarray(a) value = np.where(data >= 0, np.floor(data + 0.5), np.ceil(data - 0.5)).astype(int) if not hasattr(a, '__iter__'): value = value.item() return value
import numpy as np def _py2intround(a): """ Round the input to the nearest integer. If two integers are equally close, rounding is done away from 0. """ data = np.asanyarray(a) value = np.where(data >= 0, np.floor(data + 0.5), np.ceil(data - 0.5)).astype(int) if not hasattr(a, '__iter__'): - value = np.asscalar(value) + value = value.item() return value
c31bc6f1b0782a7d9c409e233a363be651594006
exporters/decompressors.py
exporters/decompressors.py
from exporters.pipeline.base_pipeline_item import BasePipelineItem import logging import zlib __all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor'] class BaseDecompressor(BasePipelineItem): def decompress(self): raise NotImplementedError() def create_decompressor(): # create zlib decompressor enabling automatic header detection: # See: http://stackoverflow.com/a/22310760/149872 AUTOMATIC_HEADER_DETECTION_MASK = 32 return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS) class ZLibDecompressor(BaseDecompressor): def decompress(self, stream): try: dec = create_decompressor() for chunk in stream: rv = dec.decompress(chunk) if rv: yield rv if dec.unused_data: stream.unshift(dec.unused_data) dec = create_decompressor() except zlib.error as e: logging.error('Error decoding stream using ZlibDecompressor') if str(e).startswith('Error -3 '): logging.error("Use NoDecompressor if you're using uncompressed input") raise class NoDecompressor(BaseDecompressor): def decompress(self, stream): return stream # Input already uncompressed
from exporters.pipeline.base_pipeline_item import BasePipelineItem import sys import zlib import six __all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor'] class BaseDecompressor(BasePipelineItem): def decompress(self): raise NotImplementedError() def create_decompressor(): # create zlib decompressor enabling automatic header detection: # See: http://stackoverflow.com/a/22310760/149872 AUTOMATIC_HEADER_DETECTION_MASK = 32 return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS) class ZLibDecompressor(BaseDecompressor): def decompress(self, stream): try: dec = create_decompressor() for chunk in stream: rv = dec.decompress(chunk) if rv: yield rv if dec.unused_data: stream.unshift(dec.unused_data) dec = create_decompressor() except zlib.error as e: msg = str(e) if msg.startswith('Error -3 '): msg += ". Use NoDecompressor if you're using uncompressed input." six.reraise(zlib.error, zlib.error(msg), sys.exc_info()[2]) class NoDecompressor(BaseDecompressor): def decompress(self, stream): return stream # Input already uncompressed
Append information to the zlib error
Append information to the zlib error
Python
bsd-3-clause
scrapinghub/exporters
from exporters.pipeline.base_pipeline_item import BasePipelineItem - import logging + import sys import zlib + import six __all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor'] class BaseDecompressor(BasePipelineItem): def decompress(self): raise NotImplementedError() def create_decompressor(): # create zlib decompressor enabling automatic header detection: # See: http://stackoverflow.com/a/22310760/149872 AUTOMATIC_HEADER_DETECTION_MASK = 32 return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS) class ZLibDecompressor(BaseDecompressor): def decompress(self, stream): try: dec = create_decompressor() for chunk in stream: rv = dec.decompress(chunk) if rv: yield rv if dec.unused_data: stream.unshift(dec.unused_data) dec = create_decompressor() except zlib.error as e: - logging.error('Error decoding stream using ZlibDecompressor') + msg = str(e) - if str(e).startswith('Error -3 '): + if msg.startswith('Error -3 '): - logging.error("Use NoDecompressor if you're using uncompressed input") + msg += ". Use NoDecompressor if you're using uncompressed input." - raise + six.reraise(zlib.error, zlib.error(msg), sys.exc_info()[2]) class NoDecompressor(BaseDecompressor): def decompress(self, stream): return stream # Input already uncompressed
Append information to the zlib error
## Code Before: from exporters.pipeline.base_pipeline_item import BasePipelineItem import logging import zlib __all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor'] class BaseDecompressor(BasePipelineItem): def decompress(self): raise NotImplementedError() def create_decompressor(): # create zlib decompressor enabling automatic header detection: # See: http://stackoverflow.com/a/22310760/149872 AUTOMATIC_HEADER_DETECTION_MASK = 32 return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS) class ZLibDecompressor(BaseDecompressor): def decompress(self, stream): try: dec = create_decompressor() for chunk in stream: rv = dec.decompress(chunk) if rv: yield rv if dec.unused_data: stream.unshift(dec.unused_data) dec = create_decompressor() except zlib.error as e: logging.error('Error decoding stream using ZlibDecompressor') if str(e).startswith('Error -3 '): logging.error("Use NoDecompressor if you're using uncompressed input") raise class NoDecompressor(BaseDecompressor): def decompress(self, stream): return stream # Input already uncompressed ## Instruction: Append information to the zlib error ## Code After: from exporters.pipeline.base_pipeline_item import BasePipelineItem import sys import zlib import six __all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor'] class BaseDecompressor(BasePipelineItem): def decompress(self): raise NotImplementedError() def create_decompressor(): # create zlib decompressor enabling automatic header detection: # See: http://stackoverflow.com/a/22310760/149872 AUTOMATIC_HEADER_DETECTION_MASK = 32 return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS) class ZLibDecompressor(BaseDecompressor): def decompress(self, stream): try: dec = create_decompressor() for chunk in stream: rv = dec.decompress(chunk) if rv: yield rv if dec.unused_data: stream.unshift(dec.unused_data) dec = create_decompressor() except zlib.error as e: msg = str(e) if msg.startswith('Error -3 '): msg += ". Use NoDecompressor if you're using uncompressed input." six.reraise(zlib.error, zlib.error(msg), sys.exc_info()[2]) class NoDecompressor(BaseDecompressor): def decompress(self, stream): return stream # Input already uncompressed
from exporters.pipeline.base_pipeline_item import BasePipelineItem - import logging + import sys import zlib + import six __all__ = ['BaseDecompressor', 'ZLibDecompressor', 'NoDecompressor'] class BaseDecompressor(BasePipelineItem): def decompress(self): raise NotImplementedError() def create_decompressor(): # create zlib decompressor enabling automatic header detection: # See: http://stackoverflow.com/a/22310760/149872 AUTOMATIC_HEADER_DETECTION_MASK = 32 return zlib.decompressobj(AUTOMATIC_HEADER_DETECTION_MASK | zlib.MAX_WBITS) class ZLibDecompressor(BaseDecompressor): def decompress(self, stream): try: dec = create_decompressor() for chunk in stream: rv = dec.decompress(chunk) if rv: yield rv if dec.unused_data: stream.unshift(dec.unused_data) dec = create_decompressor() except zlib.error as e: - logging.error('Error decoding stream using ZlibDecompressor') + msg = str(e) - if str(e).startswith('Error -3 '): ? ^^^^^ + if msg.startswith('Error -3 '): ? + ^ - logging.error("Use NoDecompressor if you're using uncompressed input") ? ^^ ^^^^ ^^^^^^^ - + msg += ". Use NoDecompressor if you're using uncompressed input." ? ^^ ^^^^^ ^ + - raise + six.reraise(zlib.error, zlib.error(msg), sys.exc_info()[2]) class NoDecompressor(BaseDecompressor): def decompress(self, stream): return stream # Input already uncompressed
b4d43bfbcc03b93826c194fb98a52b411dc6304b
turbustat/tests/test_wrapper.py
turbustat/tests/test_wrapper.py
from ..statistics import stats_wrapper from ._testing_data import \ dataset1, dataset2 def test_wrapper(): run_wrapper = stats_wrapper(dataset1, dataset2)
import pytest import numpy as np from ..statistics import stats_wrapper, statistics_list from ._testing_data import \ dataset1, dataset2 spacers = np.arange(2, len(statistics_list) + 1, 2) # Split these into smaller tests to avoid timeout errors on Travis @pytest.mark.parametrize(('stats'), [statistics_list[i - 2:i] for i in spacers]) def test_wrapper(stats): stats_wrapper(dataset1, dataset2, statistics=stats)
Split wrapper tests into smaller chunks
Split wrapper tests into smaller chunks
Python
mit
Astroua/TurbuStat,e-koch/TurbuStat
+ import pytest + import numpy as np + - from ..statistics import stats_wrapper + from ..statistics import stats_wrapper, statistics_list from ._testing_data import \ dataset1, dataset2 + spacers = np.arange(2, len(statistics_list) + 1, 2) - def test_wrapper(): - run_wrapper = stats_wrapper(dataset1, dataset2) + # Split these into smaller tests to avoid timeout errors on Travis + @pytest.mark.parametrize(('stats'), + [statistics_list[i - 2:i] for i in + spacers]) + def test_wrapper(stats): + stats_wrapper(dataset1, dataset2, + statistics=stats) +
Split wrapper tests into smaller chunks
## Code Before: from ..statistics import stats_wrapper from ._testing_data import \ dataset1, dataset2 def test_wrapper(): run_wrapper = stats_wrapper(dataset1, dataset2) ## Instruction: Split wrapper tests into smaller chunks ## Code After: import pytest import numpy as np from ..statistics import stats_wrapper, statistics_list from ._testing_data import \ dataset1, dataset2 spacers = np.arange(2, len(statistics_list) + 1, 2) # Split these into smaller tests to avoid timeout errors on Travis @pytest.mark.parametrize(('stats'), [statistics_list[i - 2:i] for i in spacers]) def test_wrapper(stats): stats_wrapper(dataset1, dataset2, statistics=stats)
+ import pytest + import numpy as np + - from ..statistics import stats_wrapper + from ..statistics import stats_wrapper, statistics_list ? +++++++++++++++++ from ._testing_data import \ dataset1, dataset2 + spacers = np.arange(2, len(statistics_list) + 1, 2) - def test_wrapper(): + # Split these into smaller tests to avoid timeout errors on Travis + @pytest.mark.parametrize(('stats'), + [statistics_list[i - 2:i] for i in + spacers]) + def test_wrapper(stats): + - run_wrapper = stats_wrapper(dataset1, dataset2) ? -------------- ^ + stats_wrapper(dataset1, dataset2, ? ^ + statistics=stats)
591aaa938c22b797fc6bbeb5050ec489cc966a47
tests/run_tests.py
tests/run_tests.py
from unittest import main from test_core import * from test_lazy import * if __name__ == '__main__': main()
import sys, os sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '..'))) from unittest import main from test_core import * from test_lazy import * if __name__ == '__main__': main()
Make running unit tests more friendly
Make running unit tests more friendly
Python
mit
CovenantEyes/py_stringlike
+ import sys, os + sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '..'))) from unittest import main from test_core import * from test_lazy import * if __name__ == '__main__': main()
Make running unit tests more friendly
## Code Before: from unittest import main from test_core import * from test_lazy import * if __name__ == '__main__': main() ## Instruction: Make running unit tests more friendly ## Code After: import sys, os sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '..'))) from unittest import main from test_core import * from test_lazy import * if __name__ == '__main__': main()
+ import sys, os + sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '..'))) from unittest import main from test_core import * from test_lazy import * if __name__ == '__main__': main()
93903d065cd1ff8f3f0c715668f05c804c5561f9
profile/linearsvc.py
profile/linearsvc.py
import cProfile from sklearn.svm import LinearSVC from sklearn.datasets import load_svmlight_file from sklearn.metrics import accuracy_score X, y = load_svmlight_file("data.txt") svc = LinearSVC() cProfile.runctx('svc.fit(X, y)', {'svc': svc, 'X': X, 'y': y}, {}) svc.fit(X, y) results = svc.predict(X) accuracy = accuracy_score(y, results) print("Accuracy: {}".format(accuracy))
import timeit from sklearn.svm import LinearSVC from sklearn.datasets import load_svmlight_file from sklearn.metrics import accuracy_score setup = """ from sklearn.svm import LinearSVC from sklearn.datasets import load_svmlight_file X, y = load_svmlight_file("data.txt") svc = LinearSVC() """ time = timeit.timeit('svc.fit(X, y)', setup=setup, number=1) print("Time: {}".format(time)) X, y = load_svmlight_file("data.txt") svc = LinearSVC() svc.fit(X, y) results = svc.predict(X) accuracy = accuracy_score(y, results) print("Accuracy: {}".format(accuracy))
Use timeit instead of cProfile
Use timeit instead of cProfile
Python
mit
JuliaPackageMirrors/SoftConfidenceWeighted.jl,IshitaTakeshi/SoftConfidenceWeighted.jl
- import cProfile + import timeit + from sklearn.svm import LinearSVC from sklearn.datasets import load_svmlight_file from sklearn.metrics import accuracy_score + setup = """ + from sklearn.svm import LinearSVC + from sklearn.datasets import load_svmlight_file X, y = load_svmlight_file("data.txt") + svc = LinearSVC() + """ + time = timeit.timeit('svc.fit(X, y)', setup=setup, number=1) + print("Time: {}".format(time)) + + X, y = load_svmlight_file("data.txt") svc = LinearSVC() - - cProfile.runctx('svc.fit(X, y)', {'svc': svc, 'X': X, 'y': y}, {}) - svc.fit(X, y) results = svc.predict(X) accuracy = accuracy_score(y, results) print("Accuracy: {}".format(accuracy))
Use timeit instead of cProfile
## Code Before: import cProfile from sklearn.svm import LinearSVC from sklearn.datasets import load_svmlight_file from sklearn.metrics import accuracy_score X, y = load_svmlight_file("data.txt") svc = LinearSVC() cProfile.runctx('svc.fit(X, y)', {'svc': svc, 'X': X, 'y': y}, {}) svc.fit(X, y) results = svc.predict(X) accuracy = accuracy_score(y, results) print("Accuracy: {}".format(accuracy)) ## Instruction: Use timeit instead of cProfile ## Code After: import timeit from sklearn.svm import LinearSVC from sklearn.datasets import load_svmlight_file from sklearn.metrics import accuracy_score setup = """ from sklearn.svm import LinearSVC from sklearn.datasets import load_svmlight_file X, y = load_svmlight_file("data.txt") svc = LinearSVC() """ time = timeit.timeit('svc.fit(X, y)', setup=setup, number=1) print("Time: {}".format(time)) X, y = load_svmlight_file("data.txt") svc = LinearSVC() svc.fit(X, y) results = svc.predict(X) accuracy = accuracy_score(y, results) print("Accuracy: {}".format(accuracy))
- import cProfile + import timeit + from sklearn.svm import LinearSVC from sklearn.datasets import load_svmlight_file from sklearn.metrics import accuracy_score + setup = """ + from sklearn.svm import LinearSVC + from sklearn.datasets import load_svmlight_file X, y = load_svmlight_file("data.txt") + svc = LinearSVC() + """ + time = timeit.timeit('svc.fit(X, y)', setup=setup, number=1) + print("Time: {}".format(time)) + + X, y = load_svmlight_file("data.txt") svc = LinearSVC() - - cProfile.runctx('svc.fit(X, y)', {'svc': svc, 'X': X, 'y': y}, {}) - svc.fit(X, y) results = svc.predict(X) accuracy = accuracy_score(y, results) print("Accuracy: {}".format(accuracy))
6bd8ecf5719e15674ef67100b92822be3cf8e5ec
dataportal/tests/test_replay_persistance.py
dataportal/tests/test_replay_persistance.py
import nose from dataportal.replay.persist import History h = None def setup(): h = History(':memory:') def test_history(): pass
from nose.tools import assert_equal from dataportal.replay.persist import History import dataportal.replay.persist OBJ_ID_LEN = 36 h = None def setup(): global h h = History(':memory:') def test_history(): run_id = ''.join(['a'] * OBJ_ID_LEN) # Simple round-trip: put and get config1 = {'plot_x': 'long', 'plot_y': 'island'} h.put(run_id, config1) result1 = h.get(run_id) assert_equal(result1, config1) # Put a second entry. Check that get returns most recent. config2 = {'plot_x': 'new', 'plot_y': 'york'} h.put(run_id, config2) result2 = h.get(run_id) assert_equal(result2, config2) # And get(..., 1) returns previous. result1 = h.get(run_id, 1) assert_equal(result1, config1)
Add real tests of replay History.
TST: Add real tests of replay History.
Python
bsd-3-clause
tacaswell/dataportal,danielballan/datamuxer,danielballan/datamuxer,NSLS-II/dataportal,ericdill/datamuxer,danielballan/dataportal,NSLS-II/datamuxer,danielballan/dataportal,ericdill/databroker,tacaswell/dataportal,NSLS-II/dataportal,ericdill/datamuxer,ericdill/databroker
- import nose + from nose.tools import assert_equal from dataportal.replay.persist import History + import dataportal.replay.persist + OBJ_ID_LEN = 36 h = None + def setup(): + global h h = History(':memory:') + def test_history(): - pass + run_id = ''.join(['a'] * OBJ_ID_LEN) + # Simple round-trip: put and get + config1 = {'plot_x': 'long', 'plot_y': 'island'} + h.put(run_id, config1) + result1 = h.get(run_id) + assert_equal(result1, config1) + # Put a second entry. Check that get returns most recent. + config2 = {'plot_x': 'new', 'plot_y': 'york'} + h.put(run_id, config2) + result2 = h.get(run_id) + assert_equal(result2, config2) + # And get(..., 1) returns previous. + result1 = h.get(run_id, 1) + assert_equal(result1, config1) +
Add real tests of replay History.
## Code Before: import nose from dataportal.replay.persist import History h = None def setup(): h = History(':memory:') def test_history(): pass ## Instruction: Add real tests of replay History. ## Code After: from nose.tools import assert_equal from dataportal.replay.persist import History import dataportal.replay.persist OBJ_ID_LEN = 36 h = None def setup(): global h h = History(':memory:') def test_history(): run_id = ''.join(['a'] * OBJ_ID_LEN) # Simple round-trip: put and get config1 = {'plot_x': 'long', 'plot_y': 'island'} h.put(run_id, config1) result1 = h.get(run_id) assert_equal(result1, config1) # Put a second entry. Check that get returns most recent. config2 = {'plot_x': 'new', 'plot_y': 'york'} h.put(run_id, config2) result2 = h.get(run_id) assert_equal(result2, config2) # And get(..., 1) returns previous. result1 = h.get(run_id, 1) assert_equal(result1, config1)
- import nose + from nose.tools import assert_equal from dataportal.replay.persist import History + import dataportal.replay.persist + OBJ_ID_LEN = 36 h = None + def setup(): + global h h = History(':memory:') + def test_history(): - pass + run_id = ''.join(['a'] * OBJ_ID_LEN) + # Simple round-trip: put and get + config1 = {'plot_x': 'long', 'plot_y': 'island'} + h.put(run_id, config1) + result1 = h.get(run_id) + assert_equal(result1, config1) + + # Put a second entry. Check that get returns most recent. + config2 = {'plot_x': 'new', 'plot_y': 'york'} + h.put(run_id, config2) + result2 = h.get(run_id) + assert_equal(result2, config2) + # And get(..., 1) returns previous. + result1 = h.get(run_id, 1) + assert_equal(result1, config1)
ad934e49a43a8340af9d52bbac86bede45d0e84d
aero/adapters/brew.py
aero/adapters/brew.py
__author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ def search(self, query): response = self.command(['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.package_name(line), '\n'.join(map( lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], self.search_info(line) )) ) for line in response.splitlines() if line]) return {} def search_info(self, query): info = self.info(query) return filter( None, [ info[0], info[1] if len(info) > 1 else None, info[2] if len(info) > 2 else None, ] ) def info(self, query): if '/' in query: self.command(['tap', '/'.join(query.split('/')[:-1])]) response = self.command(['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return [['No info available']] def install(self, query): self.shell(['install', query]) return {}
__author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ def search(self, query): response = self.command(['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.package_name(line), self.search_info(self.package_name(line)) ) for line in response.splitlines() if line]) return {} def search_info(self, query): response = self._execute_command('aero', ['info', query], False)[0] from re import split lines = response.splitlines() idx = lines.index(' ________________________________________ __________________________________________________ ') return '\n'.join([''.join(split('\x1b.*?m', l)).replace(' : ', '').strip() for l in response.splitlines()[idx+1:idx+4]]) def info(self, query): if '/' in query: self.command(['tap', '/'.join(query.split('/')[:-1])]) response = self.command(['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return [['No info available']] def install(self, query): self.shell(['install', query]) return {}
Use aero info instead for caching info
Use aero info instead for caching info Brew requires brew info for additional information. If we instead call aero info we can at least cache the info calls for later.
Python
bsd-3-clause
Aeronautics/aero
__author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ def search(self, query): response = self.command(['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.package_name(line), - '\n'.join(map( - lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], - self.search_info(line) + self.search_info(self.package_name(line)) - )) ) for line in response.splitlines() if line]) return {} def search_info(self, query): + response = self._execute_command('aero', ['info', query], False)[0] + from re import split + lines = response.splitlines() + idx = lines.index(' ________________________________________ __________________________________________________ ') + return '\n'.join([''.join(split('\x1b.*?m', l)).replace(' : ', '').strip() for l in response.splitlines()[idx+1:idx+4]]) - info = self.info(query) - return filter( - None, - [ - info[0], - info[1] if len(info) > 1 else None, - info[2] if len(info) > 2 else None, - ] - ) def info(self, query): if '/' in query: self.command(['tap', '/'.join(query.split('/')[:-1])]) response = self.command(['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return [['No info available']] def install(self, query): self.shell(['install', query]) return {}
Use aero info instead for caching info
## Code Before: __author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ def search(self, query): response = self.command(['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.package_name(line), '\n'.join(map( lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], self.search_info(line) )) ) for line in response.splitlines() if line]) return {} def search_info(self, query): info = self.info(query) return filter( None, [ info[0], info[1] if len(info) > 1 else None, info[2] if len(info) > 2 else None, ] ) def info(self, query): if '/' in query: self.command(['tap', '/'.join(query.split('/')[:-1])]) response = self.command(['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return [['No info available']] def install(self, query): self.shell(['install', query]) return {} ## Instruction: Use aero info instead for caching info ## Code After: __author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ def search(self, query): response = self.command(['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.package_name(line), self.search_info(self.package_name(line)) ) for line in response.splitlines() if line]) return {} def search_info(self, query): response = self._execute_command('aero', ['info', query], False)[0] from re import split lines = response.splitlines() idx = lines.index(' ________________________________________ __________________________________________________ ') return '\n'.join([''.join(split('\x1b.*?m', l)).replace(' : ', '').strip() for l in response.splitlines()[idx+1:idx+4]]) def info(self, query): if '/' in query: self.command(['tap', '/'.join(query.split('/')[:-1])]) response = self.command(['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return [['No info available']] def install(self, query): self.shell(['install', query]) return {}
__author__ = 'nickl-' from aero.__version__ import __version__ from .base import BaseAdapter class Brew(BaseAdapter): """ Homebrew adapter. """ def search(self, query): response = self.command(['search', query])[0] if 'No formula found' not in response and 'Error:' not in response: return dict([( self.package_name(line), - '\n'.join(map( - lambda k: k[0] if len(k) < 2 else k[0] + ': ' + k[1], - self.search_info(line) ? ---- + self.search_info(self.package_name(line)) ? ++++++++++++++++++ + - )) ) for line in response.splitlines() if line]) return {} def search_info(self, query): + response = self._execute_command('aero', ['info', query], False)[0] + from re import split + lines = response.splitlines() + idx = lines.index(' ________________________________________ __________________________________________________ ') + return '\n'.join([''.join(split('\x1b.*?m', l)).replace(' : ', '').strip() for l in response.splitlines()[idx+1:idx+4]]) - info = self.info(query) - return filter( - None, - [ - info[0], - info[1] if len(info) > 1 else None, - info[2] if len(info) > 2 else None, - ] - ) def info(self, query): if '/' in query: self.command(['tap', '/'.join(query.split('/')[:-1])]) response = self.command(['info', query])[0] if 'Error:' not in response: response = response.replace(query + ': ', 'version: ') return [line.split(': ', 1) for line in response.splitlines() if 'homebrew' not in line] return [['No info available']] def install(self, query): self.shell(['install', query]) return {}
ed4f786de54dde50cb26cfe4859507579806a14b
portal_sale_distributor/models/ir_action_act_window.py
portal_sale_distributor/models/ir_action_act_window.py
from odoo import models, api from odoo.tools.safe_eval import safe_eval class ActWindowView(models.Model): _inherit = 'ir.actions.act_window' def read(self, fields=None, load='_classic_read'): result = super().read(fields, load=load) if result and result[0].get('context'): ctx = safe_eval(result[0].get('context', '{}')) if ctx.get('portal_products'): pricelist = self.env.user.partner_id.property_product_pricelist ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id}) result[0].update({'context': ctx}) return result
from odoo import models, api from odoo.tools.safe_eval import safe_eval class ActWindowView(models.Model): _inherit = 'ir.actions.act_window' def read(self, fields=None, load='_classic_read'): result = super().read(fields, load=load) for value in result: if value.get('context') and 'portal_products' in value.get('context'): eval_ctx = dict(self.env.context) try: ctx = safe_eval(value.get('context', '{}'), eval_ctx) except: ctx = {} pricelist = self.env.user.partner_id.property_product_pricelist ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id.id}) value.update({'context': str(ctx)}) return result
Adjust to avoid bugs with other values in context
[FIX] portal_sale_distributor: Adjust to avoid bugs with other values in context closes ingadhoc/sale#493 X-original-commit: 441d30af0c3fa8cbbe129893107436ea69cca740 Signed-off-by: Juan José Scarafía <[email protected]>
Python
agpl-3.0
ingadhoc/sale,ingadhoc/sale,ingadhoc/sale,ingadhoc/sale
from odoo import models, api from odoo.tools.safe_eval import safe_eval class ActWindowView(models.Model): _inherit = 'ir.actions.act_window' def read(self, fields=None, load='_classic_read'): result = super().read(fields, load=load) - if result and result[0].get('context'): + for value in result: + if value.get('context') and 'portal_products' in value.get('context'): + eval_ctx = dict(self.env.context) + try: - ctx = safe_eval(result[0].get('context', '{}')) + ctx = safe_eval(value.get('context', '{}'), eval_ctx) - if ctx.get('portal_products'): + except: + ctx = {} pricelist = self.env.user.partner_id.property_product_pricelist - ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id}) + ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id.id}) - result[0].update({'context': ctx}) + value.update({'context': str(ctx)}) return result
Adjust to avoid bugs with other values in context
## Code Before: from odoo import models, api from odoo.tools.safe_eval import safe_eval class ActWindowView(models.Model): _inherit = 'ir.actions.act_window' def read(self, fields=None, load='_classic_read'): result = super().read(fields, load=load) if result and result[0].get('context'): ctx = safe_eval(result[0].get('context', '{}')) if ctx.get('portal_products'): pricelist = self.env.user.partner_id.property_product_pricelist ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id}) result[0].update({'context': ctx}) return result ## Instruction: Adjust to avoid bugs with other values in context ## Code After: from odoo import models, api from odoo.tools.safe_eval import safe_eval class ActWindowView(models.Model): _inherit = 'ir.actions.act_window' def read(self, fields=None, load='_classic_read'): result = super().read(fields, load=load) for value in result: if value.get('context') and 'portal_products' in value.get('context'): eval_ctx = dict(self.env.context) try: ctx = safe_eval(value.get('context', '{}'), eval_ctx) except: ctx = {} pricelist = self.env.user.partner_id.property_product_pricelist ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id.id}) value.update({'context': str(ctx)}) return result
from odoo import models, api from odoo.tools.safe_eval import safe_eval class ActWindowView(models.Model): _inherit = 'ir.actions.act_window' def read(self, fields=None, load='_classic_read'): result = super().read(fields, load=load) - if result and result[0].get('context'): + for value in result: + if value.get('context') and 'portal_products' in value.get('context'): + eval_ctx = dict(self.env.context) + try: - ctx = safe_eval(result[0].get('context', '{}')) ? ^ ------- + ctx = safe_eval(value.get('context', '{}'), eval_ctx) ? ++++++++ ^^^^ ++++++++++ - if ctx.get('portal_products'): + except: + ctx = {} pricelist = self.env.user.partner_id.property_product_pricelist - ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id}) + ctx.update({'pricelist': pricelist.id, 'partner': self.env.user.partner_id.id}) ? +++ - result[0].update({'context': ctx}) ? ^ ------- + value.update({'context': str(ctx)}) ? ^^^^ ++++ + return result
58e636838b0988814905fc163f369dd837aedfde
mopidy/backends/gstreamer.py
mopidy/backends/gstreamer.py
import logging from mopidy.backends import BaseBackend, BasePlaybackController logger = logging.getLogger(u'backends.gstreamer') class GStreamerBackend(BaseBackend): def __init__(self, *args, **kwargs): super(GStreamerBackend, self).__init__(*args, **kwargs) self.playback = GStreamerPlaybackController(self) class GStreamerPlaybackController(BasePlaybackController): def __init__(self, backend): super(GStreamerPlaybackController, self).__init__(backend)
import logging import gst from mopidy.backends import BaseBackend, BasePlaybackController logger = logging.getLogger(u'backends.gstreamer') class GStreamerBackend(BaseBackend): def __init__(self, *args, **kwargs): super(GStreamerBackend, self).__init__(*args, **kwargs) self.playback = GStreamerPlaybackController(self) class GStreamerPlaybackController(BasePlaybackController): PAUSED = gst.STATE_PAUSED PLAYING = gst.STATE_PLAYING STOPPED = gst.STATE_NULL def __init__(self, backend): super(GStreamerPlaybackController, self).__init__(backend)
Add playback states to GStreamer
Add playback states to GStreamer
Python
apache-2.0
abarisain/mopidy,rawdlite/mopidy,jodal/mopidy,SuperStarPL/mopidy,dbrgn/mopidy,hkariti/mopidy,swak/mopidy,jmarsik/mopidy,rawdlite/mopidy,swak/mopidy,adamcik/mopidy,glogiotatidis/mopidy,adamcik/mopidy,ali/mopidy,jmarsik/mopidy,mokieyue/mopidy,vrs01/mopidy,tkem/mopidy,woutervanwijk/mopidy,bencevans/mopidy,priestd09/mopidy,dbrgn/mopidy,jmarsik/mopidy,tkem/mopidy,pacificIT/mopidy,ali/mopidy,ali/mopidy,mokieyue/mopidy,rawdlite/mopidy,mopidy/mopidy,kingosticks/mopidy,bacontext/mopidy,ZenithDK/mopidy,glogiotatidis/mopidy,bacontext/mopidy,tkem/mopidy,kingosticks/mopidy,quartz55/mopidy,vrs01/mopidy,ZenithDK/mopidy,pacificIT/mopidy,mokieyue/mopidy,dbrgn/mopidy,bencevans/mopidy,ZenithDK/mopidy,priestd09/mopidy,bacontext/mopidy,ZenithDK/mopidy,jmarsik/mopidy,dbrgn/mopidy,SuperStarPL/mopidy,glogiotatidis/mopidy,diandiankan/mopidy,bencevans/mopidy,rawdlite/mopidy,jcass77/mopidy,jcass77/mopidy,kingosticks/mopidy,hkariti/mopidy,mokieyue/mopidy,glogiotatidis/mopidy,quartz55/mopidy,vrs01/mopidy,jodal/mopidy,mopidy/mopidy,tkem/mopidy,vrs01/mopidy,SuperStarPL/mopidy,liamw9534/mopidy,swak/mopidy,quartz55/mopidy,bencevans/mopidy,pacificIT/mopidy,diandiankan/mopidy,mopidy/mopidy,diandiankan/mopidy,priestd09/mopidy,quartz55/mopidy,SuperStarPL/mopidy,adamcik/mopidy,swak/mopidy,abarisain/mopidy,jcass77/mopidy,pacificIT/mopidy,hkariti/mopidy,woutervanwijk/mopidy,ali/mopidy,bacontext/mopidy,hkariti/mopidy,jodal/mopidy,diandiankan/mopidy,liamw9534/mopidy
import logging + + import gst from mopidy.backends import BaseBackend, BasePlaybackController logger = logging.getLogger(u'backends.gstreamer') class GStreamerBackend(BaseBackend): def __init__(self, *args, **kwargs): super(GStreamerBackend, self).__init__(*args, **kwargs) self.playback = GStreamerPlaybackController(self) class GStreamerPlaybackController(BasePlaybackController): + PAUSED = gst.STATE_PAUSED + PLAYING = gst.STATE_PLAYING + STOPPED = gst.STATE_NULL + def __init__(self, backend): super(GStreamerPlaybackController, self).__init__(backend)
Add playback states to GStreamer
## Code Before: import logging from mopidy.backends import BaseBackend, BasePlaybackController logger = logging.getLogger(u'backends.gstreamer') class GStreamerBackend(BaseBackend): def __init__(self, *args, **kwargs): super(GStreamerBackend, self).__init__(*args, **kwargs) self.playback = GStreamerPlaybackController(self) class GStreamerPlaybackController(BasePlaybackController): def __init__(self, backend): super(GStreamerPlaybackController, self).__init__(backend) ## Instruction: Add playback states to GStreamer ## Code After: import logging import gst from mopidy.backends import BaseBackend, BasePlaybackController logger = logging.getLogger(u'backends.gstreamer') class GStreamerBackend(BaseBackend): def __init__(self, *args, **kwargs): super(GStreamerBackend, self).__init__(*args, **kwargs) self.playback = GStreamerPlaybackController(self) class GStreamerPlaybackController(BasePlaybackController): PAUSED = gst.STATE_PAUSED PLAYING = gst.STATE_PLAYING STOPPED = gst.STATE_NULL def __init__(self, backend): super(GStreamerPlaybackController, self).__init__(backend)
import logging + + import gst from mopidy.backends import BaseBackend, BasePlaybackController logger = logging.getLogger(u'backends.gstreamer') class GStreamerBackend(BaseBackend): def __init__(self, *args, **kwargs): super(GStreamerBackend, self).__init__(*args, **kwargs) self.playback = GStreamerPlaybackController(self) class GStreamerPlaybackController(BasePlaybackController): + PAUSED = gst.STATE_PAUSED + PLAYING = gst.STATE_PLAYING + STOPPED = gst.STATE_NULL + def __init__(self, backend): super(GStreamerPlaybackController, self).__init__(backend)
0128a0cc3c266848181ed2f6af3db34cc9c99b5d
terroroftinytown/services/googl.py
terroroftinytown/services/googl.py
from terroroftinytown.services.base import BaseService from terroroftinytown.services.status import URLStatus import re class GooglService(BaseService): def process_response(self, response): status_code = response.status_code if status_code in self.params['redirect_codes']: if self.ratelimited(response): return self.process_banned(response) return self.process_redirect(response) elif status_code in self.params['no_redirect_codes']: return self.process_no_redirect(response) elif status_code in self.params['unavailable_codes']: return self.process_unavailable(response) elif status_code in self.params['banned_codes']: return self.process_banned(response) else: return self.process_unknown_code(response) def ratelimited(self, response): if 'Location' not in response.headers: return False result_url = response.headers['Location'] response.content # read the response to allow connection reuse return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url)
from terroroftinytown.services.base import BaseService from terroroftinytown.services.status import URLStatus import re class GooglService(BaseService): def process_response(self, response): status_code = response.status_code if status_code in self.params['redirect_codes']: if self.ratelimited(response): return self.process_banned(response) return self.process_redirect(response) elif status_code in self.params['no_redirect_codes']: return self.process_no_redirect(response) elif status_code in self.params['unavailable_codes']: return self.process_unavailable(response) elif status_code in self.params['banned_codes']: return self.process_banned(response) else: return self.process_unknown_code(response) def ratelimited(self, response): if 'Location' not in response.headers: return False result_url = response.headers['Location'] response.content # read the response to allow connection reuse return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url)
Use spaces instead of tabs
Use spaces instead of tabs
Python
mit
ArchiveTeam/terroroftinytown,ArchiveTeam/terroroftinytown,ArchiveTeam/terroroftinytown
- from terroroftinytown.services.base import BaseService from terroroftinytown.services.status import URLStatus import re class GooglService(BaseService): - def process_response(self, response): + def process_response(self, response): - status_code = response.status_code + status_code = response.status_code - if status_code in self.params['redirect_codes']: + if status_code in self.params['redirect_codes']: - if self.ratelimited(response): + if self.ratelimited(response): - return self.process_banned(response) + return self.process_banned(response) - return self.process_redirect(response) + return self.process_redirect(response) - elif status_code in self.params['no_redirect_codes']: + elif status_code in self.params['no_redirect_codes']: - return self.process_no_redirect(response) + return self.process_no_redirect(response) - elif status_code in self.params['unavailable_codes']: + elif status_code in self.params['unavailable_codes']: - return self.process_unavailable(response) + return self.process_unavailable(response) - elif status_code in self.params['banned_codes']: + elif status_code in self.params['banned_codes']: - return self.process_banned(response) + return self.process_banned(response) - else: + else: - return self.process_unknown_code(response) + return self.process_unknown_code(response) - def ratelimited(self, response): + def ratelimited(self, response): - if 'Location' not in response.headers: + if 'Location' not in response.headers: - return False + return False - result_url = response.headers['Location'] + result_url = response.headers['Location'] - response.content # read the response to allow connection reuse + response.content # read the response to allow connection reuse - return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url) + return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url)
Use spaces instead of tabs
## Code Before: from terroroftinytown.services.base import BaseService from terroroftinytown.services.status import URLStatus import re class GooglService(BaseService): def process_response(self, response): status_code = response.status_code if status_code in self.params['redirect_codes']: if self.ratelimited(response): return self.process_banned(response) return self.process_redirect(response) elif status_code in self.params['no_redirect_codes']: return self.process_no_redirect(response) elif status_code in self.params['unavailable_codes']: return self.process_unavailable(response) elif status_code in self.params['banned_codes']: return self.process_banned(response) else: return self.process_unknown_code(response) def ratelimited(self, response): if 'Location' not in response.headers: return False result_url = response.headers['Location'] response.content # read the response to allow connection reuse return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url) ## Instruction: Use spaces instead of tabs ## Code After: from terroroftinytown.services.base import BaseService from terroroftinytown.services.status import URLStatus import re class GooglService(BaseService): def process_response(self, response): status_code = response.status_code if status_code in self.params['redirect_codes']: if self.ratelimited(response): return self.process_banned(response) return self.process_redirect(response) elif status_code in self.params['no_redirect_codes']: return self.process_no_redirect(response) elif status_code in self.params['unavailable_codes']: return self.process_unavailable(response) elif status_code in self.params['banned_codes']: return self.process_banned(response) else: return self.process_unknown_code(response) def ratelimited(self, response): if 'Location' not in response.headers: return False result_url = response.headers['Location'] response.content # read the response to allow connection reuse return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url)
- from terroroftinytown.services.base import BaseService from terroroftinytown.services.status import URLStatus import re class GooglService(BaseService): - def process_response(self, response): ? ^ + def process_response(self, response): ? ^^^^ - status_code = response.status_code ? ^^ + status_code = response.status_code ? ^^^^^^^^ - if status_code in self.params['redirect_codes']: ? ^^ + if status_code in self.params['redirect_codes']: ? ^^^^^^^^ - if self.ratelimited(response): ? ^^^ + if self.ratelimited(response): ? ^^^^^^^^^^^^ - return self.process_banned(response) ? ^^^^ + return self.process_banned(response) ? ^^^^^^^^^^^^^^^^ - return self.process_redirect(response) ? ^^^ + return self.process_redirect(response) ? ^^^^^^^^^^^^ - elif status_code in self.params['no_redirect_codes']: ? ^^ + elif status_code in self.params['no_redirect_codes']: ? ^^^^^^^^ - return self.process_no_redirect(response) ? ^^^ + return self.process_no_redirect(response) ? ^^^^^^^^^^^^ - elif status_code in self.params['unavailable_codes']: ? ^^ + elif status_code in self.params['unavailable_codes']: ? ^^^^^^^^ - return self.process_unavailable(response) ? ^^^ + return self.process_unavailable(response) ? ^^^^^^^^^^^^ - elif status_code in self.params['banned_codes']: ? ^^ + elif status_code in self.params['banned_codes']: ? ^^^^^^^^ - return self.process_banned(response) ? ^^^ + return self.process_banned(response) ? ^^^^^^^^^^^^ - else: + else: - return self.process_unknown_code(response) ? ^^^ + return self.process_unknown_code(response) ? ^^^^^^^^^^^^ - def ratelimited(self, response): ? ^ + def ratelimited(self, response): ? ^^^^ - if 'Location' not in response.headers: ? ^^ + if 'Location' not in response.headers: ? ^^^^^^^^ - return False + return False - result_url = response.headers['Location'] ? ^^ + result_url = response.headers['Location'] ? ^^^^^^^^ - response.content # read the response to allow connection reuse ? ^^ + response.content # read the response to allow connection reuse ? ^^^^^^^^ - return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url) ? ^^ + return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url) ? ^^^^^^^^
2141e4fd2b09d3a8a95e032fb02eafb9e6f818c9
i3pystatus/shell.py
i3pystatus/shell.py
from i3pystatus import IntervalModule from subprocess import check_output, CalledProcessError class Shell(IntervalModule): """ Shows output of shell command """ color = "#FFFFFF" error_color = "#FF0000" settings = ( ("command", "command to be executed"), ("color", "standard color"), ("error_color", "color to use when non zero exit code is returned") ) required = ("command",) def run(self): try: out = check_output(self.command, shell=True) color = self.color except CalledProcessError as e: out = e.output color = self.error_color out = out.decode("UTF-8").replace("\n", " ") if out[-1] == " ": out = out[:-1] self.output = { "full_text": out, "color": color }
from i3pystatus import IntervalModule from subprocess import check_output, CalledProcessError class Shell(IntervalModule): """ Shows output of shell command """ color = "#FFFFFF" error_color = "#FF0000" settings = ( ("command", "command to be executed"), ("color", "standard color"), ("error_color", "color to use when non zero exit code is returned") ) required = ("command",) def run(self): try: out = check_output(self.command, shell=True) color = self.color except CalledProcessError as e: out = e.output color = self.error_color out = out.decode("UTF-8").replace("\n", " ") try: if out[-1] == " ": out = out[:-1] except: out = "" self.output = { "full_text": out, "color": color }
Add exception handling for output
Add exception handling for output
Python
mit
opatut/i3pystatus,teto/i3pystatus,schroeji/i3pystatus,ncoop/i3pystatus,juliushaertl/i3pystatus,m45t3r/i3pystatus,richese/i3pystatus,claria/i3pystatus,ncoop/i3pystatus,paulollivier/i3pystatus,paulollivier/i3pystatus,ismaelpuerto/i3pystatus,asmikhailov/i3pystatus,eBrnd/i3pystatus,fmarchenko/i3pystatus,plumps/i3pystatus,onkelpit/i3pystatus,yang-ling/i3pystatus,yang-ling/i3pystatus,MaicoTimmerman/i3pystatus,richese/i3pystatus,plumps/i3pystatus,onkelpit/i3pystatus,schroeji/i3pystatus,juliushaertl/i3pystatus,drwahl/i3pystatus,asmikhailov/i3pystatus,eBrnd/i3pystatus,drwahl/i3pystatus,facetoe/i3pystatus,Arvedui/i3pystatus,m45t3r/i3pystatus,Arvedui/i3pystatus,Elder-of-Ozone/i3pystatus,fmarchenko/i3pystatus,enkore/i3pystatus,opatut/i3pystatus,facetoe/i3pystatus,claria/i3pystatus,Elder-of-Ozone/i3pystatus,enkore/i3pystatus,MaicoTimmerman/i3pystatus,ismaelpuerto/i3pystatus,teto/i3pystatus
from i3pystatus import IntervalModule from subprocess import check_output, CalledProcessError class Shell(IntervalModule): """ Shows output of shell command """ color = "#FFFFFF" error_color = "#FF0000" settings = ( ("command", "command to be executed"), ("color", "standard color"), ("error_color", "color to use when non zero exit code is returned") ) required = ("command",) def run(self): try: out = check_output(self.command, shell=True) color = self.color except CalledProcessError as e: out = e.output color = self.error_color out = out.decode("UTF-8").replace("\n", " ") - + try: - if out[-1] == " ": + if out[-1] == " ": - out = out[:-1] + out = out[:-1] + except: + out = "" self.output = { "full_text": out, "color": color }
Add exception handling for output
## Code Before: from i3pystatus import IntervalModule from subprocess import check_output, CalledProcessError class Shell(IntervalModule): """ Shows output of shell command """ color = "#FFFFFF" error_color = "#FF0000" settings = ( ("command", "command to be executed"), ("color", "standard color"), ("error_color", "color to use when non zero exit code is returned") ) required = ("command",) def run(self): try: out = check_output(self.command, shell=True) color = self.color except CalledProcessError as e: out = e.output color = self.error_color out = out.decode("UTF-8").replace("\n", " ") if out[-1] == " ": out = out[:-1] self.output = { "full_text": out, "color": color } ## Instruction: Add exception handling for output ## Code After: from i3pystatus import IntervalModule from subprocess import check_output, CalledProcessError class Shell(IntervalModule): """ Shows output of shell command """ color = "#FFFFFF" error_color = "#FF0000" settings = ( ("command", "command to be executed"), ("color", "standard color"), ("error_color", "color to use when non zero exit code is returned") ) required = ("command",) def run(self): try: out = check_output(self.command, shell=True) color = self.color except CalledProcessError as e: out = e.output color = self.error_color out = out.decode("UTF-8").replace("\n", " ") try: if out[-1] == " ": out = out[:-1] except: out = "" self.output = { "full_text": out, "color": color }
from i3pystatus import IntervalModule from subprocess import check_output, CalledProcessError class Shell(IntervalModule): """ Shows output of shell command """ color = "#FFFFFF" error_color = "#FF0000" settings = ( ("command", "command to be executed"), ("color", "standard color"), ("error_color", "color to use when non zero exit code is returned") ) required = ("command",) def run(self): try: out = check_output(self.command, shell=True) color = self.color except CalledProcessError as e: out = e.output color = self.error_color out = out.decode("UTF-8").replace("\n", " ") - + try: - if out[-1] == " ": + if out[-1] == " ": ? ++++ - out = out[:-1] + out = out[:-1] ? ++++ + except: + out = "" self.output = { "full_text": out, "color": color }
ad888e5c5423fcb2419c497597990868216edfe3
pubrunner/__init__.py
pubrunner/__init__.py
from pubrunner.command_line import * from pubrunner.upload import * from pubrunner.FTPClient import * from pubrunner.getresource import * from pubrunner.pubrun import pubrun,cleanWorkingDirectory from pubrunner.convert import * def loadYAML(yamlFilename): yamlData = None with open(yamlFilename,'r') as f: try: yamlData = yaml.load(f) except yaml.YAMLError as exc: print(exc) raise return yamlData def findSettingsFile(): possibilities = [ os.getcwd(), os.path.expanduser("~") ] for directory in possibilities: settingsPath = os.path.join(directory,'.pubrunner.settings.yml') if os.path.isfile(settingsPath): return settingsPath raise RuntimeError("Unable to find .pubrunner.settings.yml file. Tried current directory first, then home directory") globalSettings = None def getGlobalSettings(): global globalSettings if globalSettings is None: settingsYamlFile = findSettingsFile() globalSettings = loadYAML(settingsYamlFile) return globalSettings
from pubrunner.command_line import * from pubrunner.upload import * from pubrunner.FTPClient import * from pubrunner.getresource import * from pubrunner.pubrun import pubrun,cleanWorkingDirectory from pubrunner.convert import * from pubrunner.pubmed_hash import pubmed_hash def loadYAML(yamlFilename): yamlData = None with open(yamlFilename,'r') as f: try: yamlData = yaml.load(f) except yaml.YAMLError as exc: print(exc) raise return yamlData def findSettingsFile(): possibilities = [ os.getcwd(), os.path.expanduser("~") ] for directory in possibilities: settingsPath = os.path.join(directory,'.pubrunner.settings.yml') if os.path.isfile(settingsPath): return settingsPath raise RuntimeError("Unable to find .pubrunner.settings.yml file. Tried current directory first, then home directory") globalSettings = None def getGlobalSettings(): global globalSettings if globalSettings is None: settingsYamlFile = findSettingsFile() globalSettings = loadYAML(settingsYamlFile) return globalSettings
Make pubmed hash accessible to API
Make pubmed hash accessible to API
Python
mit
jakelever/pubrunner,jakelever/pubrunner
from pubrunner.command_line import * from pubrunner.upload import * from pubrunner.FTPClient import * from pubrunner.getresource import * from pubrunner.pubrun import pubrun,cleanWorkingDirectory from pubrunner.convert import * + from pubrunner.pubmed_hash import pubmed_hash def loadYAML(yamlFilename): yamlData = None with open(yamlFilename,'r') as f: try: yamlData = yaml.load(f) except yaml.YAMLError as exc: print(exc) raise return yamlData def findSettingsFile(): possibilities = [ os.getcwd(), os.path.expanduser("~") ] for directory in possibilities: settingsPath = os.path.join(directory,'.pubrunner.settings.yml') if os.path.isfile(settingsPath): return settingsPath raise RuntimeError("Unable to find .pubrunner.settings.yml file. Tried current directory first, then home directory") globalSettings = None def getGlobalSettings(): global globalSettings if globalSettings is None: settingsYamlFile = findSettingsFile() globalSettings = loadYAML(settingsYamlFile) return globalSettings
Make pubmed hash accessible to API
## Code Before: from pubrunner.command_line import * from pubrunner.upload import * from pubrunner.FTPClient import * from pubrunner.getresource import * from pubrunner.pubrun import pubrun,cleanWorkingDirectory from pubrunner.convert import * def loadYAML(yamlFilename): yamlData = None with open(yamlFilename,'r') as f: try: yamlData = yaml.load(f) except yaml.YAMLError as exc: print(exc) raise return yamlData def findSettingsFile(): possibilities = [ os.getcwd(), os.path.expanduser("~") ] for directory in possibilities: settingsPath = os.path.join(directory,'.pubrunner.settings.yml') if os.path.isfile(settingsPath): return settingsPath raise RuntimeError("Unable to find .pubrunner.settings.yml file. Tried current directory first, then home directory") globalSettings = None def getGlobalSettings(): global globalSettings if globalSettings is None: settingsYamlFile = findSettingsFile() globalSettings = loadYAML(settingsYamlFile) return globalSettings ## Instruction: Make pubmed hash accessible to API ## Code After: from pubrunner.command_line import * from pubrunner.upload import * from pubrunner.FTPClient import * from pubrunner.getresource import * from pubrunner.pubrun import pubrun,cleanWorkingDirectory from pubrunner.convert import * from pubrunner.pubmed_hash import pubmed_hash def loadYAML(yamlFilename): yamlData = None with open(yamlFilename,'r') as f: try: yamlData = yaml.load(f) except yaml.YAMLError as exc: print(exc) raise return yamlData def findSettingsFile(): possibilities = [ os.getcwd(), os.path.expanduser("~") ] for directory in possibilities: settingsPath = os.path.join(directory,'.pubrunner.settings.yml') if os.path.isfile(settingsPath): return settingsPath raise RuntimeError("Unable to find .pubrunner.settings.yml file. Tried current directory first, then home directory") globalSettings = None def getGlobalSettings(): global globalSettings if globalSettings is None: settingsYamlFile = findSettingsFile() globalSettings = loadYAML(settingsYamlFile) return globalSettings
from pubrunner.command_line import * from pubrunner.upload import * from pubrunner.FTPClient import * from pubrunner.getresource import * from pubrunner.pubrun import pubrun,cleanWorkingDirectory from pubrunner.convert import * + from pubrunner.pubmed_hash import pubmed_hash def loadYAML(yamlFilename): yamlData = None with open(yamlFilename,'r') as f: try: yamlData = yaml.load(f) except yaml.YAMLError as exc: print(exc) raise return yamlData def findSettingsFile(): possibilities = [ os.getcwd(), os.path.expanduser("~") ] for directory in possibilities: settingsPath = os.path.join(directory,'.pubrunner.settings.yml') if os.path.isfile(settingsPath): return settingsPath raise RuntimeError("Unable to find .pubrunner.settings.yml file. Tried current directory first, then home directory") globalSettings = None def getGlobalSettings(): global globalSettings if globalSettings is None: settingsYamlFile = findSettingsFile() globalSettings = loadYAML(settingsYamlFile) return globalSettings
9b9582a1b7226ceb9cc65657ffb7fd7d51c8ea2a
lib/exp/featx/__init__.py
lib/exp/featx/__init__.py
__all__ = [] from lib.exp.featx.base import Featx from lib.exp.tools.slider import Slider class SlideFeatx(Featx, Slider): def __init__(self, root, name): Featx.__init__(self, root, name) Slider.__init__(self, root, name) def get_feats(self): imgl = self.get_slides(None, gray=True, resize=True) self.feats(imgl, prefix="s")
__all__ = [] from lib.exp.featx.base import Feats from lib.exp.tools.slider import Slider from lib.exp.tools.video import Video from lib.exp.prepare import Prepare class Featx(Feats): def __init__(self, root, name): Feats.__init__(self, root, name) def get_slide_feats(self): ss = Slider(self.root, self.name) imgl = ss.get_slides(None, gray=True, resize=True) self.feats(imgl, prefix="s") def get_frame_feats(self): pp = Prepare(self.root, self.name) vv = Video(self.root, self.name) imgl = vv.get_frames(pp.frame_ids(), gray=True) self.feats(imgl, prefix="f")
Change to use `featx` in package
Change to use `featx` in package
Python
agpl-3.0
speed-of-light/pyslider
__all__ = [] - from lib.exp.featx.base import Featx + from lib.exp.featx.base import Feats from lib.exp.tools.slider import Slider + from lib.exp.tools.video import Video + from lib.exp.prepare import Prepare - class SlideFeatx(Featx, Slider): + class Featx(Feats): def __init__(self, root, name): - Featx.__init__(self, root, name) + Feats.__init__(self, root, name) - Slider.__init__(self, root, name) - def get_feats(self): + def get_slide_feats(self): + ss = Slider(self.root, self.name) - imgl = self.get_slides(None, gray=True, resize=True) + imgl = ss.get_slides(None, gray=True, resize=True) self.feats(imgl, prefix="s") + def get_frame_feats(self): + pp = Prepare(self.root, self.name) + vv = Video(self.root, self.name) + imgl = vv.get_frames(pp.frame_ids(), gray=True) + self.feats(imgl, prefix="f") +
Change to use `featx` in package
## Code Before: __all__ = [] from lib.exp.featx.base import Featx from lib.exp.tools.slider import Slider class SlideFeatx(Featx, Slider): def __init__(self, root, name): Featx.__init__(self, root, name) Slider.__init__(self, root, name) def get_feats(self): imgl = self.get_slides(None, gray=True, resize=True) self.feats(imgl, prefix="s") ## Instruction: Change to use `featx` in package ## Code After: __all__ = [] from lib.exp.featx.base import Feats from lib.exp.tools.slider import Slider from lib.exp.tools.video import Video from lib.exp.prepare import Prepare class Featx(Feats): def __init__(self, root, name): Feats.__init__(self, root, name) def get_slide_feats(self): ss = Slider(self.root, self.name) imgl = ss.get_slides(None, gray=True, resize=True) self.feats(imgl, prefix="s") def get_frame_feats(self): pp = Prepare(self.root, self.name) vv = Video(self.root, self.name) imgl = vv.get_frames(pp.frame_ids(), gray=True) self.feats(imgl, prefix="f")
__all__ = [] - from lib.exp.featx.base import Featx ? ^ + from lib.exp.featx.base import Feats ? ^ from lib.exp.tools.slider import Slider + from lib.exp.tools.video import Video + from lib.exp.prepare import Prepare - class SlideFeatx(Featx, Slider): + class Featx(Feats): def __init__(self, root, name): - Featx.__init__(self, root, name) ? ^ + Feats.__init__(self, root, name) ? ^ - Slider.__init__(self, root, name) - def get_feats(self): + def get_slide_feats(self): ? ++++++ + ss = Slider(self.root, self.name) - imgl = self.get_slides(None, gray=True, resize=True) ? ^^^ + imgl = ss.get_slides(None, gray=True, resize=True) ? ^ self.feats(imgl, prefix="s") + + def get_frame_feats(self): + pp = Prepare(self.root, self.name) + vv = Video(self.root, self.name) + imgl = vv.get_frames(pp.frame_ids(), gray=True) + self.feats(imgl, prefix="f")
2f8a2fdad8deb96b7b3c971baf866f248c23fdda
madam_rest/views.py
madam_rest/views.py
from flask import jsonify, url_for from madam_rest import app, asset_storage @app.route('/assets/') def assets_retrieve(): assets = [asset_key for asset_key in asset_storage] return jsonify({ "data": assets, "meta": { "count": len(assets) } }) @app.route('/assets/<asset_key>') def asset_retrieve(asset_key): asset = asset_storage[asset_key] return jsonify({ "links": { "self": url_for(asset_retrieve, asset_key=asset_key) }, "meta": {} # TODO: _mutable(asset.metadata) })
from datetime import datetime from flask import jsonify, url_for from fractions import Fraction from frozendict import frozendict from madam_rest import app, asset_storage def _serializable(value): """ Utility function to convert data structures with immutable types to mutable, serializable data structures. :param value: data structure with immutable types :return: mutable, serializable data structure """ if isinstance(value, (tuple, set, frozenset)): return [_serializable(v) for v in value] elif isinstance(value, frozendict): return {k: _serializable(v) for k, v in value.items()} elif isinstance(value, datetime): return value.isoformat() elif isinstance(value, Fraction): return float(value) return value @app.route('/assets/') def assets_retrieve(): assets = [asset_key for asset_key in asset_storage] return jsonify({ "data": assets, "meta": { "count": len(assets) } }) @app.route('/assets/<asset_key>') def asset_retrieve(asset_key): asset = asset_storage[asset_key] return jsonify({ "links": { "self": url_for(asset_retrieve, asset_key=asset_key) }, "meta": _serializable(asset.metadata) })
Improve serialization of asset metadata.
Improve serialization of asset metadata.
Python
agpl-3.0
eseifert/madam-rest
+ from datetime import datetime from flask import jsonify, url_for + from fractions import Fraction + from frozendict import frozendict from madam_rest import app, asset_storage + + + def _serializable(value): + """ + Utility function to convert data structures with immutable types to + mutable, serializable data structures. + :param value: data structure with immutable types + :return: mutable, serializable data structure + """ + if isinstance(value, (tuple, set, frozenset)): + return [_serializable(v) for v in value] + elif isinstance(value, frozendict): + return {k: _serializable(v) for k, v in value.items()} + elif isinstance(value, datetime): + return value.isoformat() + elif isinstance(value, Fraction): + return float(value) + return value @app.route('/assets/') def assets_retrieve(): assets = [asset_key for asset_key in asset_storage] return jsonify({ "data": assets, "meta": { "count": len(assets) } }) @app.route('/assets/<asset_key>') def asset_retrieve(asset_key): asset = asset_storage[asset_key] return jsonify({ "links": { "self": url_for(asset_retrieve, asset_key=asset_key) }, - "meta": {} # TODO: _mutable(asset.metadata) + "meta": _serializable(asset.metadata) })
Improve serialization of asset metadata.
## Code Before: from flask import jsonify, url_for from madam_rest import app, asset_storage @app.route('/assets/') def assets_retrieve(): assets = [asset_key for asset_key in asset_storage] return jsonify({ "data": assets, "meta": { "count": len(assets) } }) @app.route('/assets/<asset_key>') def asset_retrieve(asset_key): asset = asset_storage[asset_key] return jsonify({ "links": { "self": url_for(asset_retrieve, asset_key=asset_key) }, "meta": {} # TODO: _mutable(asset.metadata) }) ## Instruction: Improve serialization of asset metadata. ## Code After: from datetime import datetime from flask import jsonify, url_for from fractions import Fraction from frozendict import frozendict from madam_rest import app, asset_storage def _serializable(value): """ Utility function to convert data structures with immutable types to mutable, serializable data structures. :param value: data structure with immutable types :return: mutable, serializable data structure """ if isinstance(value, (tuple, set, frozenset)): return [_serializable(v) for v in value] elif isinstance(value, frozendict): return {k: _serializable(v) for k, v in value.items()} elif isinstance(value, datetime): return value.isoformat() elif isinstance(value, Fraction): return float(value) return value @app.route('/assets/') def assets_retrieve(): assets = [asset_key for asset_key in asset_storage] return jsonify({ "data": assets, "meta": { "count": len(assets) } }) @app.route('/assets/<asset_key>') def asset_retrieve(asset_key): asset = asset_storage[asset_key] return jsonify({ "links": { "self": url_for(asset_retrieve, asset_key=asset_key) }, "meta": _serializable(asset.metadata) })
+ from datetime import datetime from flask import jsonify, url_for + from fractions import Fraction + from frozendict import frozendict from madam_rest import app, asset_storage + + + def _serializable(value): + """ + Utility function to convert data structures with immutable types to + mutable, serializable data structures. + :param value: data structure with immutable types + :return: mutable, serializable data structure + """ + if isinstance(value, (tuple, set, frozenset)): + return [_serializable(v) for v in value] + elif isinstance(value, frozendict): + return {k: _serializable(v) for k, v in value.items()} + elif isinstance(value, datetime): + return value.isoformat() + elif isinstance(value, Fraction): + return float(value) + return value @app.route('/assets/') def assets_retrieve(): assets = [asset_key for asset_key in asset_storage] return jsonify({ "data": assets, "meta": { "count": len(assets) } }) @app.route('/assets/<asset_key>') def asset_retrieve(asset_key): asset = asset_storage[asset_key] return jsonify({ "links": { "self": url_for(asset_retrieve, asset_key=asset_key) }, - "meta": {} # TODO: _mutable(asset.metadata) ? ------------ ^^^ + "meta": _serializable(asset.metadata) ? ^^^^^^^^ })
114382ff9b6dad3c9ba621014dd7cd63ad49bef6
django/santropolFeast/meal/models.py
django/santropolFeast/meal/models.py
from django.db import models from django.utils.translation import ugettext_lazy as _ class Meal(models.Model): class Meta: verbose_name_plural = _('meals') # Meal information nom = models.CharField(max_length=50, verbose_name=_('name')) description = models.TextField(verbose_name=_('description')) ingredients = models.ManyToManyField( 'meal.Ingredient', related_name='related_meals' ) class Ingredient(models.Model): class Meta: verbose_name_plural = _('ingredients') # Ingredient information nom = models.CharField(max_length=50, verbose_name=_('name')) class Allergy(models.Model): class Meta: verbose_name_plural = _('allergies') # Allergy information nom = models.CharField(max_length=50, verbose_name=_('name')) description = models.TextField(verbose_name=_('description')) ingredients = models.ManyToManyField( 'meal.Ingredient', related_name='related_allergies' )
from django.db import models from django.utils.translation import ugettext_lazy as _ class Meal(models.Model): class Meta: verbose_name_plural = _('meals') # Meal information nom = models.CharField(max_length=50, verbose_name=_('name')) description = models.TextField(verbose_name=_('description')) ingredients = models.ManyToManyField( 'meal.Ingredient', related_name='related_meals' ) def __str__( self ): return self.nom class Ingredient(models.Model): class Meta: verbose_name_plural = _('ingredients') # Ingredient information nom = models.CharField(max_length=50, verbose_name=_('name')) def __str__( self ): return self.nom class Allergy(models.Model): class Meta: verbose_name_plural = _('allergies') # Allergy information nom = models.CharField(max_length=50, verbose_name=_('name')) description = models.TextField(verbose_name=_('description')) ingredients = models.ManyToManyField( 'meal.Ingredient', related_name='related_allergies' ) def __str__( self ): return self.nom
Use string representation for objects
Use string representation for objects
Python
agpl-3.0
savoirfairelinux/santropol-feast,savoirfairelinux/sous-chef,savoirfairelinux/santropol-feast,savoirfairelinux/sous-chef,madmath/sous-chef,savoirfairelinux/sous-chef,madmath/sous-chef,savoirfairelinux/santropol-feast,madmath/sous-chef
from django.db import models from django.utils.translation import ugettext_lazy as _ class Meal(models.Model): class Meta: verbose_name_plural = _('meals') # Meal information nom = models.CharField(max_length=50, verbose_name=_('name')) description = models.TextField(verbose_name=_('description')) ingredients = models.ManyToManyField( 'meal.Ingredient', related_name='related_meals' ) + def __str__( self ): + return self.nom + class Ingredient(models.Model): class Meta: verbose_name_plural = _('ingredients') # Ingredient information nom = models.CharField(max_length=50, verbose_name=_('name')) + def __str__( self ): + return self.nom class Allergy(models.Model): class Meta: verbose_name_plural = _('allergies') # Allergy information nom = models.CharField(max_length=50, verbose_name=_('name')) description = models.TextField(verbose_name=_('description')) ingredients = models.ManyToManyField( 'meal.Ingredient', related_name='related_allergies' ) + + def __str__( self ): + return self.nom
Use string representation for objects
## Code Before: from django.db import models from django.utils.translation import ugettext_lazy as _ class Meal(models.Model): class Meta: verbose_name_plural = _('meals') # Meal information nom = models.CharField(max_length=50, verbose_name=_('name')) description = models.TextField(verbose_name=_('description')) ingredients = models.ManyToManyField( 'meal.Ingredient', related_name='related_meals' ) class Ingredient(models.Model): class Meta: verbose_name_plural = _('ingredients') # Ingredient information nom = models.CharField(max_length=50, verbose_name=_('name')) class Allergy(models.Model): class Meta: verbose_name_plural = _('allergies') # Allergy information nom = models.CharField(max_length=50, verbose_name=_('name')) description = models.TextField(verbose_name=_('description')) ingredients = models.ManyToManyField( 'meal.Ingredient', related_name='related_allergies' ) ## Instruction: Use string representation for objects ## Code After: from django.db import models from django.utils.translation import ugettext_lazy as _ class Meal(models.Model): class Meta: verbose_name_plural = _('meals') # Meal information nom = models.CharField(max_length=50, verbose_name=_('name')) description = models.TextField(verbose_name=_('description')) ingredients = models.ManyToManyField( 'meal.Ingredient', related_name='related_meals' ) def __str__( self ): return self.nom class Ingredient(models.Model): class Meta: verbose_name_plural = _('ingredients') # Ingredient information nom = models.CharField(max_length=50, verbose_name=_('name')) def __str__( self ): return self.nom class Allergy(models.Model): class Meta: verbose_name_plural = _('allergies') # Allergy information nom = models.CharField(max_length=50, verbose_name=_('name')) description = models.TextField(verbose_name=_('description')) ingredients = models.ManyToManyField( 'meal.Ingredient', related_name='related_allergies' ) def __str__( self ): return self.nom
from django.db import models from django.utils.translation import ugettext_lazy as _ class Meal(models.Model): class Meta: verbose_name_plural = _('meals') # Meal information nom = models.CharField(max_length=50, verbose_name=_('name')) description = models.TextField(verbose_name=_('description')) ingredients = models.ManyToManyField( 'meal.Ingredient', related_name='related_meals' ) + def __str__( self ): + return self.nom + class Ingredient(models.Model): class Meta: verbose_name_plural = _('ingredients') # Ingredient information nom = models.CharField(max_length=50, verbose_name=_('name')) + def __str__( self ): + return self.nom class Allergy(models.Model): class Meta: verbose_name_plural = _('allergies') # Allergy information nom = models.CharField(max_length=50, verbose_name=_('name')) description = models.TextField(verbose_name=_('description')) ingredients = models.ManyToManyField( 'meal.Ingredient', related_name='related_allergies' ) + + def __str__( self ): + return self.nom
ec2d3feff6a1677457dfeb5b948b2013bc03df2a
classes/admin.py
classes/admin.py
from django.contrib import admin from classes.models import Attendee from classes.models import Attendance from classes.models import Session from classes.models import WalkinClass class AttendanceInline(admin.TabularInline): model = Attendance extra = 1 verbose_name = 'Attendee' verbose_name_plural = 'Attendees' fields = ('attendee', 'start_date_time', "stop_date_time", 'notes') # fieldsets = ( # ("Attendee", {'fields': ('name'),}), # ("Start Date Time", {"fields": ('start_date_time'),}), # ("Stop Date Time", {"fields": ('stop_date_time'),}), # ('Notes', {'fields': ('notes'),}), # ) class AttendeeAdmin(admin.ModelAdmin): pass class SessionAdmin(admin.ModelAdmin): inlines = [ AttendanceInline, ] fields = ('walk_in_class','teacher', 'start_date_time', "stop_date_time", ) list_display= ('walk_in_class', 'start_date_time',) class WalkinClassAdmin(admin.ModelAdmin): pass admin.site.register(Attendee, AttendeeAdmin) admin.site.register(Session, SessionAdmin) admin.site.register(WalkinClass, WalkinClassAdmin)
from django.contrib import admin from classes.models import Attendee from classes.models import Attendance from classes.models import Session from classes.models import WalkinClass class AttendanceInline(admin.TabularInline): model = Attendance extra = 1 verbose_name = 'Attendee' verbose_name_plural = 'Attendees' fields = ('attendee', 'start_date_time', "stop_date_time", 'notes') class SessionInline(admin.TabularInline): model = Session extra = 1 fields = ('start_date_time', 'stop_date_time', 'teacher') class AttendeeAdmin(admin.ModelAdmin): pass class SessionAdmin(admin.ModelAdmin): inlines = [ AttendanceInline, ] fields = ('walk_in_class','teacher', 'start_date_time', "stop_date_time", ) list_display= ('walk_in_class', 'start_date_time',) class WalkinClassAdmin(admin.ModelAdmin): inlines = [ SessionInline, ] admin.site.register(Attendee, AttendeeAdmin) admin.site.register(Session, SessionAdmin) admin.site.register(WalkinClass, WalkinClassAdmin)
Add sessions inline to classes
Add sessions inline to classes
Python
mit
thrive-refugee/thrive-refugee,thrive-refugee/thrive-refugee,thrive-refugee/thrive-refugee
from django.contrib import admin from classes.models import Attendee from classes.models import Attendance from classes.models import Session from classes.models import WalkinClass class AttendanceInline(admin.TabularInline): model = Attendance extra = 1 verbose_name = 'Attendee' verbose_name_plural = 'Attendees' fields = ('attendee', 'start_date_time', "stop_date_time", 'notes') - # fieldsets = ( - # ("Attendee", {'fields': ('name'),}), - # ("Start Date Time", {"fields": ('start_date_time'),}), - # ("Stop Date Time", {"fields": ('stop_date_time'),}), - # ('Notes', {'fields': ('notes'),}), - # ) + + + class SessionInline(admin.TabularInline): + model = Session + extra = 1 + fields = ('start_date_time', 'stop_date_time', 'teacher') class AttendeeAdmin(admin.ModelAdmin): pass class SessionAdmin(admin.ModelAdmin): inlines = [ AttendanceInline, ] fields = ('walk_in_class','teacher', 'start_date_time', "stop_date_time", ) list_display= ('walk_in_class', 'start_date_time',) class WalkinClassAdmin(admin.ModelAdmin): - pass + inlines = [ + SessionInline, + ] admin.site.register(Attendee, AttendeeAdmin) admin.site.register(Session, SessionAdmin) admin.site.register(WalkinClass, WalkinClassAdmin)
Add sessions inline to classes
## Code Before: from django.contrib import admin from classes.models import Attendee from classes.models import Attendance from classes.models import Session from classes.models import WalkinClass class AttendanceInline(admin.TabularInline): model = Attendance extra = 1 verbose_name = 'Attendee' verbose_name_plural = 'Attendees' fields = ('attendee', 'start_date_time', "stop_date_time", 'notes') # fieldsets = ( # ("Attendee", {'fields': ('name'),}), # ("Start Date Time", {"fields": ('start_date_time'),}), # ("Stop Date Time", {"fields": ('stop_date_time'),}), # ('Notes', {'fields': ('notes'),}), # ) class AttendeeAdmin(admin.ModelAdmin): pass class SessionAdmin(admin.ModelAdmin): inlines = [ AttendanceInline, ] fields = ('walk_in_class','teacher', 'start_date_time', "stop_date_time", ) list_display= ('walk_in_class', 'start_date_time',) class WalkinClassAdmin(admin.ModelAdmin): pass admin.site.register(Attendee, AttendeeAdmin) admin.site.register(Session, SessionAdmin) admin.site.register(WalkinClass, WalkinClassAdmin) ## Instruction: Add sessions inline to classes ## Code After: from django.contrib import admin from classes.models import Attendee from classes.models import Attendance from classes.models import Session from classes.models import WalkinClass class AttendanceInline(admin.TabularInline): model = Attendance extra = 1 verbose_name = 'Attendee' verbose_name_plural = 'Attendees' fields = ('attendee', 'start_date_time', "stop_date_time", 'notes') class SessionInline(admin.TabularInline): model = Session extra = 1 fields = ('start_date_time', 'stop_date_time', 'teacher') class AttendeeAdmin(admin.ModelAdmin): pass class SessionAdmin(admin.ModelAdmin): inlines = [ AttendanceInline, ] fields = ('walk_in_class','teacher', 'start_date_time', "stop_date_time", ) list_display= ('walk_in_class', 'start_date_time',) class WalkinClassAdmin(admin.ModelAdmin): inlines = [ SessionInline, ] admin.site.register(Attendee, AttendeeAdmin) admin.site.register(Session, SessionAdmin) admin.site.register(WalkinClass, WalkinClassAdmin)
from django.contrib import admin from classes.models import Attendee from classes.models import Attendance from classes.models import Session from classes.models import WalkinClass class AttendanceInline(admin.TabularInline): model = Attendance extra = 1 verbose_name = 'Attendee' verbose_name_plural = 'Attendees' fields = ('attendee', 'start_date_time', "stop_date_time", 'notes') - # fieldsets = ( - # ("Attendee", {'fields': ('name'),}), - # ("Start Date Time", {"fields": ('start_date_time'),}), - # ("Stop Date Time", {"fields": ('stop_date_time'),}), - # ('Notes', {'fields': ('notes'),}), - # ) + + + class SessionInline(admin.TabularInline): + model = Session + extra = 1 + fields = ('start_date_time', 'stop_date_time', 'teacher') class AttendeeAdmin(admin.ModelAdmin): pass class SessionAdmin(admin.ModelAdmin): inlines = [ AttendanceInline, ] fields = ('walk_in_class','teacher', 'start_date_time', "stop_date_time", ) list_display= ('walk_in_class', 'start_date_time',) class WalkinClassAdmin(admin.ModelAdmin): - pass + inlines = [ + SessionInline, + ] admin.site.register(Attendee, AttendeeAdmin) admin.site.register(Session, SessionAdmin) admin.site.register(WalkinClass, WalkinClassAdmin)
b33c1b70bcb7a5303c1731cb6699466610ee54af
pyedgar/__init__.py
pyedgar/__init__.py
__title__ = 'pyedgar' __version__ = '0.0.3a1' __author__ = 'Mac Gaulin' __license__ = 'MIT' __copyright__ = 'Copyright 2018 Mac Gaulin' # Include top level modules from . import filing from . import downloader # Include sub-modules from . import utilities from . import exceptions from .exceptions import (InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound) # __all__ = [edgarweb, forms, localstore, plaintext, #downloader, # InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound]
__title__ = 'pyedgar' __version__ = '0.0.4a1' __author__ = 'Mac Gaulin' __license__ = 'MIT' __copyright__ = 'Copyright 2018 Mac Gaulin' # Include sub-modules from . import utilities from . import exceptions from .exceptions import (InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound) # __all__ = [edgarweb, forms, localstore, plaintext, #downloader, # InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound]
Remove top level imports to avoid cyclical import
Remove top level imports to avoid cyclical import
Python
mit
gaulinmp/pyedgar
__title__ = 'pyedgar' - __version__ = '0.0.3a1' + __version__ = '0.0.4a1' __author__ = 'Mac Gaulin' __license__ = 'MIT' __copyright__ = 'Copyright 2018 Mac Gaulin' - - # Include top level modules - from . import filing - from . import downloader # Include sub-modules from . import utilities from . import exceptions from .exceptions import (InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound) # __all__ = [edgarweb, forms, localstore, plaintext, #downloader, # InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound]
Remove top level imports to avoid cyclical import
## Code Before: __title__ = 'pyedgar' __version__ = '0.0.3a1' __author__ = 'Mac Gaulin' __license__ = 'MIT' __copyright__ = 'Copyright 2018 Mac Gaulin' # Include top level modules from . import filing from . import downloader # Include sub-modules from . import utilities from . import exceptions from .exceptions import (InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound) # __all__ = [edgarweb, forms, localstore, plaintext, #downloader, # InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound] ## Instruction: Remove top level imports to avoid cyclical import ## Code After: __title__ = 'pyedgar' __version__ = '0.0.4a1' __author__ = 'Mac Gaulin' __license__ = 'MIT' __copyright__ = 'Copyright 2018 Mac Gaulin' # Include sub-modules from . import utilities from . import exceptions from .exceptions import (InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound) # __all__ = [edgarweb, forms, localstore, plaintext, #downloader, # InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound]
__title__ = 'pyedgar' - __version__ = '0.0.3a1' ? ^ + __version__ = '0.0.4a1' ? ^ __author__ = 'Mac Gaulin' __license__ = 'MIT' __copyright__ = 'Copyright 2018 Mac Gaulin' - - # Include top level modules - from . import filing - from . import downloader # Include sub-modules from . import utilities from . import exceptions from .exceptions import (InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound) # __all__ = [edgarweb, forms, localstore, plaintext, #downloader, # InputTypeError, WrongFormType, NoFormTypeFound, NoCIKFound]
084a923d928996022936e5c942e69876dc409b5e
edx_data_research/cli/commands.py
edx_data_research/cli/commands.py
from edx_data_research import parsing from edx_data_research import reporting def cmd_report_basic(args): """ Run basic analytics """ edx_obj = reporting.Basic(args) getattr(edx_obj, args.basic.replace('-', '_'))() def cmd_report_problem_ids(args): edx_obj = reporting.ProblemIds(args) getattr(edx_obj, args.report.replace('-', '_'))() def cmd_report_stats(args): edx_obj = reporting.Stats(args) getattr(edx_obj, args.report.replace('-', '_'))() def cmd_parse_sql(args): edx_obj = parsing.SQL(args) edx_obj.migrate() def cmd_parse_forum(args): edx_obj = parsing.Forum(args) edx_obj.migrate() def cmd_parse_problem_ids(args): edx_obj = parsing.ProblemIds(args) edx_obj.migrate() def cmd_parse_course_structure(args): edx_obj = parsing.CourseStructure(args) edx_obj.migrate() def cmd_parse_tracking(args): edx_obj = parsing.Tracking(args) edx_obj.migrate()
from edx_data_research import parsing from edx_data_research import reporting def cmd_report_basic(args): """ Run basic analytics """ edx_obj = reporting.Basic(args) getattr(edx_obj, args.basic.replace('-', '_'))() def cmd_report_problem_ids(args): edx_obj = reporting.ProblemIds(args) getattr(edx_obj, args.report.replace('-', '_'))() def cmd_report_stats(args): edx_obj = reporting.Stats(args) getattr(edx_obj, args.report.replace('-', '_'))() def cmd_parse_sql(args): edx_obj = parsing.SQL(args) edx_obj.migrate() def cmd_parse_forum(args): edx_obj = parsing.Forum(args) edx_obj.migrate() def cmd_parse_problem_ids(args): edx_obj = parsing.ProblemIds(args) edx_obj.migrate() def cmd_parse_course_structure(args): edx_obj = parsing.CourseStructure(args) edx_obj.migrate() def cmd_parse_tracking(args): edx_obj = parsing.Tracking(args) edx_obj.migrate() def cmd_parse_course_tracking(args): edx_obj = parsing.CourseTracking(args) edx_obj.migrate()
Define proxy function for course specific tracking logs
Define proxy function for course specific tracking logs
Python
mit
McGillX/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research
from edx_data_research import parsing from edx_data_research import reporting def cmd_report_basic(args): """ Run basic analytics """ edx_obj = reporting.Basic(args) getattr(edx_obj, args.basic.replace('-', '_'))() def cmd_report_problem_ids(args): edx_obj = reporting.ProblemIds(args) getattr(edx_obj, args.report.replace('-', '_'))() def cmd_report_stats(args): edx_obj = reporting.Stats(args) getattr(edx_obj, args.report.replace('-', '_'))() def cmd_parse_sql(args): edx_obj = parsing.SQL(args) edx_obj.migrate() def cmd_parse_forum(args): edx_obj = parsing.Forum(args) edx_obj.migrate() def cmd_parse_problem_ids(args): edx_obj = parsing.ProblemIds(args) edx_obj.migrate() def cmd_parse_course_structure(args): edx_obj = parsing.CourseStructure(args) edx_obj.migrate() def cmd_parse_tracking(args): edx_obj = parsing.Tracking(args) edx_obj.migrate() + def cmd_parse_course_tracking(args): + edx_obj = parsing.CourseTracking(args) + edx_obj.migrate() +
Define proxy function for course specific tracking logs
## Code Before: from edx_data_research import parsing from edx_data_research import reporting def cmd_report_basic(args): """ Run basic analytics """ edx_obj = reporting.Basic(args) getattr(edx_obj, args.basic.replace('-', '_'))() def cmd_report_problem_ids(args): edx_obj = reporting.ProblemIds(args) getattr(edx_obj, args.report.replace('-', '_'))() def cmd_report_stats(args): edx_obj = reporting.Stats(args) getattr(edx_obj, args.report.replace('-', '_'))() def cmd_parse_sql(args): edx_obj = parsing.SQL(args) edx_obj.migrate() def cmd_parse_forum(args): edx_obj = parsing.Forum(args) edx_obj.migrate() def cmd_parse_problem_ids(args): edx_obj = parsing.ProblemIds(args) edx_obj.migrate() def cmd_parse_course_structure(args): edx_obj = parsing.CourseStructure(args) edx_obj.migrate() def cmd_parse_tracking(args): edx_obj = parsing.Tracking(args) edx_obj.migrate() ## Instruction: Define proxy function for course specific tracking logs ## Code After: from edx_data_research import parsing from edx_data_research import reporting def cmd_report_basic(args): """ Run basic analytics """ edx_obj = reporting.Basic(args) getattr(edx_obj, args.basic.replace('-', '_'))() def cmd_report_problem_ids(args): edx_obj = reporting.ProblemIds(args) getattr(edx_obj, args.report.replace('-', '_'))() def cmd_report_stats(args): edx_obj = reporting.Stats(args) getattr(edx_obj, args.report.replace('-', '_'))() def cmd_parse_sql(args): edx_obj = parsing.SQL(args) edx_obj.migrate() def cmd_parse_forum(args): edx_obj = parsing.Forum(args) edx_obj.migrate() def cmd_parse_problem_ids(args): edx_obj = parsing.ProblemIds(args) edx_obj.migrate() def cmd_parse_course_structure(args): edx_obj = parsing.CourseStructure(args) edx_obj.migrate() def cmd_parse_tracking(args): edx_obj = parsing.Tracking(args) edx_obj.migrate() def cmd_parse_course_tracking(args): edx_obj = parsing.CourseTracking(args) edx_obj.migrate()
from edx_data_research import parsing from edx_data_research import reporting def cmd_report_basic(args): """ Run basic analytics """ edx_obj = reporting.Basic(args) getattr(edx_obj, args.basic.replace('-', '_'))() def cmd_report_problem_ids(args): edx_obj = reporting.ProblemIds(args) getattr(edx_obj, args.report.replace('-', '_'))() def cmd_report_stats(args): edx_obj = reporting.Stats(args) getattr(edx_obj, args.report.replace('-', '_'))() def cmd_parse_sql(args): edx_obj = parsing.SQL(args) edx_obj.migrate() def cmd_parse_forum(args): edx_obj = parsing.Forum(args) edx_obj.migrate() def cmd_parse_problem_ids(args): edx_obj = parsing.ProblemIds(args) edx_obj.migrate() def cmd_parse_course_structure(args): edx_obj = parsing.CourseStructure(args) edx_obj.migrate() def cmd_parse_tracking(args): edx_obj = parsing.Tracking(args) edx_obj.migrate() + + def cmd_parse_course_tracking(args): + edx_obj = parsing.CourseTracking(args) + edx_obj.migrate()
4d46001296ad083df6827a9c97333f0f093f31bd
example/config.py
example/config.py
vars['blogname'] = 'Example Blog' class Entry: def get_organization(self): return self.m.get('Organization')
locals['blogname'] = 'Example Blog' locals['base'] = 'http://example.invalid' class Entry: def get_organization(self): return self.m.get('Organization')
Document new evil magic, and add required var.
Document new evil magic, and add required var.
Python
isc
decklin/ennepe
- vars['blogname'] = 'Example Blog' + locals['blogname'] = 'Example Blog' + locals['base'] = 'http://example.invalid' class Entry: def get_organization(self): return self.m.get('Organization')
Document new evil magic, and add required var.
## Code Before: vars['blogname'] = 'Example Blog' class Entry: def get_organization(self): return self.m.get('Organization') ## Instruction: Document new evil magic, and add required var. ## Code After: locals['blogname'] = 'Example Blog' locals['base'] = 'http://example.invalid' class Entry: def get_organization(self): return self.m.get('Organization')
- vars['blogname'] = 'Example Blog' ? ^ ^ + locals['blogname'] = 'Example Blog' ? ^^^ ^ + locals['base'] = 'http://example.invalid' class Entry: def get_organization(self): return self.m.get('Organization')
d1ccd3e93043d11a22e873e7ccdb76d749746151
api/app/app.py
api/app/app.py
import os import logging from flask import Flask from model.base import db from route.base import blueprint # Register models and routes import model import route logging.basicConfig(level=logging.INFO) app = Flask(__name__) # app.config['PROPAGATE_EXCEPTIONS'] = True app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://' +\ os.environ['USER'] + ':' +\ os.environ['PASSWORD'] + '@' +\ 'db/' + os.environ['SCHEMA'] db.init_app(app) with app.test_request_context(): db.create_all() db.session.commit() app.register_blueprint(blueprint)
import os import logging from uwsgidecorators import postfork from flask import Flask from model.base import db from route.base import blueprint # Register models and routes import model import route logging.basicConfig(level=logging.INFO) app = Flask(__name__) # app.config['PROPAGATE_EXCEPTIONS'] = True app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://' +\ os.environ['USER'] + ':' +\ os.environ['PASSWORD'] + '@' +\ 'db/' + os.environ['SCHEMA'] db.init_app(app) with app.test_request_context(): db.create_all() db.session.commit() app.register_blueprint(blueprint) @postfork def refresh_db(): db.session.remove() db.init_app(app)
Refresh db connections on uwsgi fork
Refresh db connections on uwsgi fork
Python
mit
hexa4313/velov-companion-server,hexa4313/velov-companion-server
import os import logging + from uwsgidecorators import postfork from flask import Flask from model.base import db from route.base import blueprint # Register models and routes import model import route logging.basicConfig(level=logging.INFO) app = Flask(__name__) # app.config['PROPAGATE_EXCEPTIONS'] = True app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://' +\ os.environ['USER'] + ':' +\ os.environ['PASSWORD'] + '@' +\ 'db/' + os.environ['SCHEMA'] db.init_app(app) with app.test_request_context(): db.create_all() db.session.commit() app.register_blueprint(blueprint) + + @postfork + def refresh_db(): + db.session.remove() + db.init_app(app) +
Refresh db connections on uwsgi fork
## Code Before: import os import logging from flask import Flask from model.base import db from route.base import blueprint # Register models and routes import model import route logging.basicConfig(level=logging.INFO) app = Flask(__name__) # app.config['PROPAGATE_EXCEPTIONS'] = True app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://' +\ os.environ['USER'] + ':' +\ os.environ['PASSWORD'] + '@' +\ 'db/' + os.environ['SCHEMA'] db.init_app(app) with app.test_request_context(): db.create_all() db.session.commit() app.register_blueprint(blueprint) ## Instruction: Refresh db connections on uwsgi fork ## Code After: import os import logging from uwsgidecorators import postfork from flask import Flask from model.base import db from route.base import blueprint # Register models and routes import model import route logging.basicConfig(level=logging.INFO) app = Flask(__name__) # app.config['PROPAGATE_EXCEPTIONS'] = True app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://' +\ os.environ['USER'] + ':' +\ os.environ['PASSWORD'] + '@' +\ 'db/' + os.environ['SCHEMA'] db.init_app(app) with app.test_request_context(): db.create_all() db.session.commit() app.register_blueprint(blueprint) @postfork def refresh_db(): db.session.remove() db.init_app(app)
import os import logging + from uwsgidecorators import postfork from flask import Flask from model.base import db from route.base import blueprint # Register models and routes import model import route logging.basicConfig(level=logging.INFO) app = Flask(__name__) # app.config['PROPAGATE_EXCEPTIONS'] = True app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://' +\ os.environ['USER'] + ':' +\ os.environ['PASSWORD'] + '@' +\ 'db/' + os.environ['SCHEMA'] db.init_app(app) with app.test_request_context(): db.create_all() db.session.commit() app.register_blueprint(blueprint) + + + @postfork + def refresh_db(): + db.session.remove() + db.init_app(app)
e6b24f6e8bfca6f8e22bd63c893a228cc2a694f1
starter_project/normalize_breton_test.py
starter_project/normalize_breton_test.py
import unittest class TestStringMethods(unittest.TestCase): def test_upper(self): self.assertEqual('foo'.upper(), 'FOO') if __name__ == '__main__': unittest.main()
import unittest import normalize_breton_lib class TestStringMethods(unittest.TestCase): def test_normalize_breton(self): 'Test the output of NormalizeBreton.' test_cases = [(('a--bc', 'a-bc'), ('ccb--a', 'ccb-a'), ('ba--aa', 'ba-aa'))] for test in test_cases: for test_case, expected in test: test_fst = normalize_breton_lib.NormalizeBreton(test_case) self.assertEqual(test_fst, expected) if __name__ == '__main__': unittest.main()
Add basic test for example Pynini FST.
Add basic test for example Pynini FST.
Python
apache-2.0
googleinterns/text-norm-for-low-resource-languages,googleinterns/text-norm-for-low-resource-languages
import unittest + import normalize_breton_lib class TestStringMethods(unittest.TestCase): - def test_upper(self): - self.assertEqual('foo'.upper(), 'FOO') + def test_normalize_breton(self): + 'Test the output of NormalizeBreton.' + test_cases = [(('a--bc', 'a-bc'), ('ccb--a', 'ccb-a'), ('ba--aa', 'ba-aa'))] + for test in test_cases: + for test_case, expected in test: + test_fst = normalize_breton_lib.NormalizeBreton(test_case) + self.assertEqual(test_fst, expected) if __name__ == '__main__': unittest.main()
Add basic test for example Pynini FST.
## Code Before: import unittest class TestStringMethods(unittest.TestCase): def test_upper(self): self.assertEqual('foo'.upper(), 'FOO') if __name__ == '__main__': unittest.main() ## Instruction: Add basic test for example Pynini FST. ## Code After: import unittest import normalize_breton_lib class TestStringMethods(unittest.TestCase): def test_normalize_breton(self): 'Test the output of NormalizeBreton.' test_cases = [(('a--bc', 'a-bc'), ('ccb--a', 'ccb-a'), ('ba--aa', 'ba-aa'))] for test in test_cases: for test_case, expected in test: test_fst = normalize_breton_lib.NormalizeBreton(test_case) self.assertEqual(test_fst, expected) if __name__ == '__main__': unittest.main()
import unittest + import normalize_breton_lib class TestStringMethods(unittest.TestCase): - def test_upper(self): - self.assertEqual('foo'.upper(), 'FOO') + def test_normalize_breton(self): + 'Test the output of NormalizeBreton.' + test_cases = [(('a--bc', 'a-bc'), ('ccb--a', 'ccb-a'), ('ba--aa', 'ba-aa'))] + for test in test_cases: + for test_case, expected in test: + test_fst = normalize_breton_lib.NormalizeBreton(test_case) + self.assertEqual(test_fst, expected) if __name__ == '__main__': unittest.main()
db981f7616283992fd1d17a3b1bf7d300b8ee34f
proper_parens.py
proper_parens.py
from __future__ import unicode_literals <<<<<<< HEAD def check_statement1(value): output = 0 while output >= 0: for item in value: if item == ")": output -= 1 if output == -1: return -1 elif item == "(": output += 1 if output == 0: return 0 elif output > 1: return 1 ======= def check_statement(value): ''' Return 1, 0, or -1 if input is open, balanced, or broken. ''' output = 0 index = 0 while index < len(value) and output >= 0: # If the count is ever < 0, statement must be -1 (broken), end loop # If the index is out of range, end loop if value[index] == ")": # Subtract 1 for every close paren output -= 1 elif value[index] == "(": # Add 1 for every close paren output += 1 index += 1 if output == -1: # Check if output is -1 (broken) return output elif not output: # Check if output is 0 (balanced) return output else: # Must be 1 (open) if it makes it to here return 1 >>>>>>> 74dee1d09fdc09f93af3d15286336d7face4ba08
from __future__ import unicode_literals def check_statement(value): ''' Return 1, 0, or -1 if input is open, balanced, or broken. ''' output = 0 index = 0 while index < len(value) and output >= 0: # If the count is ever < 0, statement must be -1 (broken), end loop # If the index is out of range, end loop if value[index] == ")": # Subtract 1 for every close paren output -= 1 elif value[index] == "(": # Add 1 for every close paren output += 1 index += 1 if output == -1: # Check if output is -1 (broken) return output elif not output: # Check if output is 0 (balanced) return output else: # Must be 1 (open) if it makes it to here return 1
Fix proper parens merge conflict
Fix proper parens merge conflict
Python
mit
constanthatz/data-structures
from __future__ import unicode_literals - <<<<<<< HEAD - def check_statement1(value): - output = 0 - while output >= 0: - for item in value: - if item == ")": - output -= 1 - if output == -1: - return -1 - elif item == "(": - output += 1 - if output == 0: - return 0 - elif output > 1: - return 1 - ======= def check_statement(value): ''' Return 1, 0, or -1 if input is open, balanced, or broken. ''' output = 0 index = 0 while index < len(value) and output >= 0: # If the count is ever < 0, statement must be -1 (broken), end loop # If the index is out of range, end loop if value[index] == ")": # Subtract 1 for every close paren output -= 1 elif value[index] == "(": # Add 1 for every close paren output += 1 index += 1 if output == -1: # Check if output is -1 (broken) return output elif not output: # Check if output is 0 (balanced) return output else: # Must be 1 (open) if it makes it to here return 1 - >>>>>>> 74dee1d09fdc09f93af3d15286336d7face4ba08
Fix proper parens merge conflict
## Code Before: from __future__ import unicode_literals <<<<<<< HEAD def check_statement1(value): output = 0 while output >= 0: for item in value: if item == ")": output -= 1 if output == -1: return -1 elif item == "(": output += 1 if output == 0: return 0 elif output > 1: return 1 ======= def check_statement(value): ''' Return 1, 0, or -1 if input is open, balanced, or broken. ''' output = 0 index = 0 while index < len(value) and output >= 0: # If the count is ever < 0, statement must be -1 (broken), end loop # If the index is out of range, end loop if value[index] == ")": # Subtract 1 for every close paren output -= 1 elif value[index] == "(": # Add 1 for every close paren output += 1 index += 1 if output == -1: # Check if output is -1 (broken) return output elif not output: # Check if output is 0 (balanced) return output else: # Must be 1 (open) if it makes it to here return 1 >>>>>>> 74dee1d09fdc09f93af3d15286336d7face4ba08 ## Instruction: Fix proper parens merge conflict ## Code After: from __future__ import unicode_literals def check_statement(value): ''' Return 1, 0, or -1 if input is open, balanced, or broken. ''' output = 0 index = 0 while index < len(value) and output >= 0: # If the count is ever < 0, statement must be -1 (broken), end loop # If the index is out of range, end loop if value[index] == ")": # Subtract 1 for every close paren output -= 1 elif value[index] == "(": # Add 1 for every close paren output += 1 index += 1 if output == -1: # Check if output is -1 (broken) return output elif not output: # Check if output is 0 (balanced) return output else: # Must be 1 (open) if it makes it to here return 1
from __future__ import unicode_literals - <<<<<<< HEAD - def check_statement1(value): - output = 0 - while output >= 0: - for item in value: - if item == ")": - output -= 1 - if output == -1: - return -1 - elif item == "(": - output += 1 - if output == 0: - return 0 - elif output > 1: - return 1 - ======= def check_statement(value): ''' Return 1, 0, or -1 if input is open, balanced, or broken. ''' output = 0 index = 0 while index < len(value) and output >= 0: # If the count is ever < 0, statement must be -1 (broken), end loop # If the index is out of range, end loop if value[index] == ")": # Subtract 1 for every close paren output -= 1 elif value[index] == "(": # Add 1 for every close paren output += 1 index += 1 if output == -1: # Check if output is -1 (broken) return output elif not output: # Check if output is 0 (balanced) return output else: # Must be 1 (open) if it makes it to here return 1 - >>>>>>> 74dee1d09fdc09f93af3d15286336d7face4ba08
4e8c84bf36250d7e61b585fc5db545206cab9730
perfkitbenchmarker/scripts/spark_table.py
perfkitbenchmarker/scripts/spark_table.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import logging import os from pyspark.sql import SparkSession def main(): parser = argparse.ArgumentParser() parser.add_argument('root_dir') parser.add_argument('tables', type=lambda csv: csv.split(',')) args = parser.parse_args() spark = (SparkSession.builder .appName('Setup Spark tables') .enableHiveSupport() .getOrCreate()) for table in args.tables: logging.info('Creating table %s', table) table_dir = os.path.join(args.root_dir, table) # clean up previous table spark.sql('drop table if exists ' + table) # register new table spark.catalog.createTable(table, table_dir, source='parquet') if __name__ == '__main__': main()
from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import logging import os from pyspark.sql import SparkSession from pyspark.sql.utils import AnalysisException def main(): parser = argparse.ArgumentParser() parser.add_argument('root_dir') parser.add_argument('tables', type=lambda csv: csv.split(',')) args = parser.parse_args() spark = (SparkSession.builder .appName('Setup Spark tables') .enableHiveSupport() .getOrCreate()) for table in args.tables: logging.info('Creating table %s', table) table_dir = os.path.join(args.root_dir, table) # clean up previous table spark.sql('DROP TABLE IF EXISTS ' + table) # register new table spark.catalog.createTable(table, table_dir, source='parquet') try: # This loads the partitions under the table if table is partitioned. spark.sql('MSCK REPAIR TABLE ' + table) except AnalysisException: # The table was not partitioned, which was presumably expected pass if __name__ == '__main__': main()
Support creating Hive tables with partitioned data.
Support creating Hive tables with partitioned data. PiperOrigin-RevId: 335539022
Python
apache-2.0
GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker
from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import logging import os from pyspark.sql import SparkSession + from pyspark.sql.utils import AnalysisException def main(): parser = argparse.ArgumentParser() parser.add_argument('root_dir') parser.add_argument('tables', type=lambda csv: csv.split(',')) args = parser.parse_args() spark = (SparkSession.builder .appName('Setup Spark tables') .enableHiveSupport() .getOrCreate()) for table in args.tables: logging.info('Creating table %s', table) table_dir = os.path.join(args.root_dir, table) # clean up previous table - spark.sql('drop table if exists ' + table) + spark.sql('DROP TABLE IF EXISTS ' + table) # register new table spark.catalog.createTable(table, table_dir, source='parquet') - + try: + # This loads the partitions under the table if table is partitioned. + spark.sql('MSCK REPAIR TABLE ' + table) + except AnalysisException: + # The table was not partitioned, which was presumably expected + pass if __name__ == '__main__': main()
Support creating Hive tables with partitioned data.
## Code Before: from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import logging import os from pyspark.sql import SparkSession def main(): parser = argparse.ArgumentParser() parser.add_argument('root_dir') parser.add_argument('tables', type=lambda csv: csv.split(',')) args = parser.parse_args() spark = (SparkSession.builder .appName('Setup Spark tables') .enableHiveSupport() .getOrCreate()) for table in args.tables: logging.info('Creating table %s', table) table_dir = os.path.join(args.root_dir, table) # clean up previous table spark.sql('drop table if exists ' + table) # register new table spark.catalog.createTable(table, table_dir, source='parquet') if __name__ == '__main__': main() ## Instruction: Support creating Hive tables with partitioned data. ## Code After: from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import logging import os from pyspark.sql import SparkSession from pyspark.sql.utils import AnalysisException def main(): parser = argparse.ArgumentParser() parser.add_argument('root_dir') parser.add_argument('tables', type=lambda csv: csv.split(',')) args = parser.parse_args() spark = (SparkSession.builder .appName('Setup Spark tables') .enableHiveSupport() .getOrCreate()) for table in args.tables: logging.info('Creating table %s', table) table_dir = os.path.join(args.root_dir, table) # clean up previous table spark.sql('DROP TABLE IF EXISTS ' + table) # register new table spark.catalog.createTable(table, table_dir, source='parquet') try: # This loads the partitions under the table if table is partitioned. spark.sql('MSCK REPAIR TABLE ' + table) except AnalysisException: # The table was not partitioned, which was presumably expected pass if __name__ == '__main__': main()
from __future__ import absolute_import from __future__ import division from __future__ import print_function import argparse import logging import os from pyspark.sql import SparkSession + from pyspark.sql.utils import AnalysisException def main(): parser = argparse.ArgumentParser() parser.add_argument('root_dir') parser.add_argument('tables', type=lambda csv: csv.split(',')) args = parser.parse_args() spark = (SparkSession.builder .appName('Setup Spark tables') .enableHiveSupport() .getOrCreate()) for table in args.tables: logging.info('Creating table %s', table) table_dir = os.path.join(args.root_dir, table) # clean up previous table - spark.sql('drop table if exists ' + table) + spark.sql('DROP TABLE IF EXISTS ' + table) # register new table spark.catalog.createTable(table, table_dir, source='parquet') - + try: + # This loads the partitions under the table if table is partitioned. + spark.sql('MSCK REPAIR TABLE ' + table) + except AnalysisException: + # The table was not partitioned, which was presumably expected + pass if __name__ == '__main__': main()
e0b2ce4b0287e8321cddde6c658a833dcf147974
features.py
features.py
import numpy as np def mean_energy(x_blocks): return np.sqrt(np.mean(x_blocks**2, axis=1)) if __name__ == '__main__': import matplotlib.pyplot as plt from files import load_wav from analysis import split_to_blocks def analyze_mean_energy(file, block_size=1024): x, fs = load_wav(file) blocks, t = split_to_blocks(x, block_size) y = mean_energy(blocks) plt.semilogy(t, y) plt.ylim(0, 1)
import numpy as np from numpy.linalg import norm def mean_power(x_blocks): return np.sqrt(np.mean(x_blocks**2, axis=-1)) def power(x_blocks): return np.sqrt(np.sum(x_blocks**2, axis=-1)) def mean_energy(x_blocks): return np.mean(x_blocks**2, axis=-1) def energy(x_blocks): return np.sum(x_blocks**2, axis=-1) if __name__ == '__main__': import matplotlib.pyplot as plt from files import load_wav from analysis import split_to_blocks def analyze_mean_energy(file, block_size=1024): x, fs = load_wav(file) blocks, t = split_to_blocks(x, block_size) y = mean_energy(blocks) plt.semilogy(t, y) plt.ylim(0, 1)
Add computation on energy and power (mean and total).
Add computation on energy and power (mean and total).
Python
mit
bzamecnik/tfr,bzamecnik/tfr
import numpy as np + from numpy.linalg import norm + + def mean_power(x_blocks): + return np.sqrt(np.mean(x_blocks**2, axis=-1)) + + def power(x_blocks): + return np.sqrt(np.sum(x_blocks**2, axis=-1)) def mean_energy(x_blocks): - return np.sqrt(np.mean(x_blocks**2, axis=1)) + return np.mean(x_blocks**2, axis=-1) + + def energy(x_blocks): + return np.sum(x_blocks**2, axis=-1) if __name__ == '__main__': import matplotlib.pyplot as plt from files import load_wav from analysis import split_to_blocks def analyze_mean_energy(file, block_size=1024): x, fs = load_wav(file) blocks, t = split_to_blocks(x, block_size) - y = mean_energy(blocks) + y = mean_energy(blocks) plt.semilogy(t, y) plt.ylim(0, 1)
Add computation on energy and power (mean and total).
## Code Before: import numpy as np def mean_energy(x_blocks): return np.sqrt(np.mean(x_blocks**2, axis=1)) if __name__ == '__main__': import matplotlib.pyplot as plt from files import load_wav from analysis import split_to_blocks def analyze_mean_energy(file, block_size=1024): x, fs = load_wav(file) blocks, t = split_to_blocks(x, block_size) y = mean_energy(blocks) plt.semilogy(t, y) plt.ylim(0, 1) ## Instruction: Add computation on energy and power (mean and total). ## Code After: import numpy as np from numpy.linalg import norm def mean_power(x_blocks): return np.sqrt(np.mean(x_blocks**2, axis=-1)) def power(x_blocks): return np.sqrt(np.sum(x_blocks**2, axis=-1)) def mean_energy(x_blocks): return np.mean(x_blocks**2, axis=-1) def energy(x_blocks): return np.sum(x_blocks**2, axis=-1) if __name__ == '__main__': import matplotlib.pyplot as plt from files import load_wav from analysis import split_to_blocks def analyze_mean_energy(file, block_size=1024): x, fs = load_wav(file) blocks, t = split_to_blocks(x, block_size) y = mean_energy(blocks) plt.semilogy(t, y) plt.ylim(0, 1)
import numpy as np + from numpy.linalg import norm + + def mean_power(x_blocks): + return np.sqrt(np.mean(x_blocks**2, axis=-1)) + + def power(x_blocks): + return np.sqrt(np.sum(x_blocks**2, axis=-1)) def mean_energy(x_blocks): - return np.sqrt(np.mean(x_blocks**2, axis=1)) ? -------- - + return np.mean(x_blocks**2, axis=-1) ? + + + def energy(x_blocks): + return np.sum(x_blocks**2, axis=-1) if __name__ == '__main__': import matplotlib.pyplot as plt from files import load_wav from analysis import split_to_blocks def analyze_mean_energy(file, block_size=1024): x, fs = load_wav(file) blocks, t = split_to_blocks(x, block_size) - y = mean_energy(blocks) ? ---- + y = mean_energy(blocks) plt.semilogy(t, y) plt.ylim(0, 1)
7fb284ad29098a4397c7ac953e2d9acb89cf089e
notification/backends/email.py
notification/backends/email.py
from django.conf import settings from django.core.mail import EmailMessage from notification.backends.base import NotificationBackend class EmailBackend(NotificationBackend): slug = u'email' display_name = u'E-mail' formats = ['short.txt', 'full.txt'] def email_for_user(self, recipient): return recipient.email def should_send(self, sender, recipient, notice_type, *args, **kwargs): send = super(EmailBackend, self).should_send(sender, recipient, notice_type) return send and self.email_for_user(recipient) != '' def render_subject(self, label, context): # Strip newlines from subject return ''.join(self.render_message(label, 'notification/email_subject.txt', 'short.txt', context ).splitlines()) def send(self, sender, recipient, notice_type, context, *args, **kwargs): if not self.should_send(sender, recipient, notice_type): return False headers = kwargs.get('headers', {}) headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL) EmailMessage(self.render_subject(notice_type.label, context), self.render_message(notice_type.label, 'notification/email_body.txt', 'full.txt', context), kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL, [self.email_for_user(recipient)], headers=headers).send() return True
from django.conf import settings from django.core.mail import EmailMessage from notification.backends.base import NotificationBackend class EmailBackend(NotificationBackend): sensitivity = 2 slug = u'email' display_name = u'E-mail' formats = ['short.txt', 'full.txt'] def email_for_user(self, recipient): return recipient.email def should_send(self, sender, recipient, notice_type, *args, **kwargs): send = super(EmailBackend, self).should_send(sender, recipient, notice_type) return send and self.email_for_user(recipient) != '' def render_subject(self, label, context): # Strip newlines from subject return ''.join(self.render_message(label, 'notification/email_subject.txt', 'short.txt', context ).splitlines()) def send(self, sender, recipient, notice_type, context, *args, **kwargs): if not self.should_send(sender, recipient, notice_type): return False headers = kwargs.get('headers', {}) headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL) EmailMessage(self.render_subject(notice_type.label, context), self.render_message(notice_type.label, 'notification/email_body.txt', 'full.txt', context), kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL, [self.email_for_user(recipient)], headers=headers).send() return True
Set sensitivity of e-mail backend to 2, so notifications with 1 aren't mailed.
Set sensitivity of e-mail backend to 2, so notifications with 1 aren't mailed.
Python
mit
theatlantic/django-notification,theatlantic/django-notification
from django.conf import settings from django.core.mail import EmailMessage from notification.backends.base import NotificationBackend class EmailBackend(NotificationBackend): + sensitivity = 2 slug = u'email' display_name = u'E-mail' formats = ['short.txt', 'full.txt'] def email_for_user(self, recipient): return recipient.email def should_send(self, sender, recipient, notice_type, *args, **kwargs): send = super(EmailBackend, self).should_send(sender, recipient, notice_type) return send and self.email_for_user(recipient) != '' def render_subject(self, label, context): # Strip newlines from subject return ''.join(self.render_message(label, 'notification/email_subject.txt', 'short.txt', context ).splitlines()) def send(self, sender, recipient, notice_type, context, *args, **kwargs): if not self.should_send(sender, recipient, notice_type): return False headers = kwargs.get('headers', {}) headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL) EmailMessage(self.render_subject(notice_type.label, context), self.render_message(notice_type.label, 'notification/email_body.txt', 'full.txt', context), kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL, [self.email_for_user(recipient)], headers=headers).send() return True
Set sensitivity of e-mail backend to 2, so notifications with 1 aren't mailed.
## Code Before: from django.conf import settings from django.core.mail import EmailMessage from notification.backends.base import NotificationBackend class EmailBackend(NotificationBackend): slug = u'email' display_name = u'E-mail' formats = ['short.txt', 'full.txt'] def email_for_user(self, recipient): return recipient.email def should_send(self, sender, recipient, notice_type, *args, **kwargs): send = super(EmailBackend, self).should_send(sender, recipient, notice_type) return send and self.email_for_user(recipient) != '' def render_subject(self, label, context): # Strip newlines from subject return ''.join(self.render_message(label, 'notification/email_subject.txt', 'short.txt', context ).splitlines()) def send(self, sender, recipient, notice_type, context, *args, **kwargs): if not self.should_send(sender, recipient, notice_type): return False headers = kwargs.get('headers', {}) headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL) EmailMessage(self.render_subject(notice_type.label, context), self.render_message(notice_type.label, 'notification/email_body.txt', 'full.txt', context), kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL, [self.email_for_user(recipient)], headers=headers).send() return True ## Instruction: Set sensitivity of e-mail backend to 2, so notifications with 1 aren't mailed. ## Code After: from django.conf import settings from django.core.mail import EmailMessage from notification.backends.base import NotificationBackend class EmailBackend(NotificationBackend): sensitivity = 2 slug = u'email' display_name = u'E-mail' formats = ['short.txt', 'full.txt'] def email_for_user(self, recipient): return recipient.email def should_send(self, sender, recipient, notice_type, *args, **kwargs): send = super(EmailBackend, self).should_send(sender, recipient, notice_type) return send and self.email_for_user(recipient) != '' def render_subject(self, label, context): # Strip newlines from subject return ''.join(self.render_message(label, 'notification/email_subject.txt', 'short.txt', context ).splitlines()) def send(self, sender, recipient, notice_type, context, *args, **kwargs): if not self.should_send(sender, recipient, notice_type): return False headers = kwargs.get('headers', {}) headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL) EmailMessage(self.render_subject(notice_type.label, context), self.render_message(notice_type.label, 'notification/email_body.txt', 'full.txt', context), kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL, [self.email_for_user(recipient)], headers=headers).send() return True
from django.conf import settings from django.core.mail import EmailMessage from notification.backends.base import NotificationBackend class EmailBackend(NotificationBackend): + sensitivity = 2 slug = u'email' display_name = u'E-mail' formats = ['short.txt', 'full.txt'] def email_for_user(self, recipient): return recipient.email def should_send(self, sender, recipient, notice_type, *args, **kwargs): send = super(EmailBackend, self).should_send(sender, recipient, notice_type) return send and self.email_for_user(recipient) != '' def render_subject(self, label, context): # Strip newlines from subject return ''.join(self.render_message(label, 'notification/email_subject.txt', 'short.txt', context ).splitlines()) def send(self, sender, recipient, notice_type, context, *args, **kwargs): if not self.should_send(sender, recipient, notice_type): return False headers = kwargs.get('headers', {}) headers.setdefault('Reply-To', settings.DEFAULT_FROM_EMAIL) EmailMessage(self.render_subject(notice_type.label, context), self.render_message(notice_type.label, 'notification/email_body.txt', 'full.txt', context), kwargs.get('from_email') or settings.DEFAULT_FROM_EMAIL, [self.email_for_user(recipient)], headers=headers).send() return True
b6208c1f9b6f0afca1dff40a66d2c915594b1946
blaze/io/server/tests/start_simple_server.py
blaze/io/server/tests/start_simple_server.py
import sys, os import blaze from blaze.io.server.app import app blaze.catalog.load_config(sys.argv[1]) app.run(port=int(sys.argv[2]), use_reloader=False)
import sys, os if os.name == 'nt': old_excepthook = sys.excepthook # Exclude this from our autogenerated API docs. undoc = lambda func: func @undoc def gui_excepthook(exctype, value, tb): try: import ctypes, traceback MB_ICONERROR = 0x00000010 title = u'Error starting test Blaze server' msg = u''.join(traceback.format_exception(exctype, value, tb)) ctypes.windll.user32.MessageBoxW(0, msg, title, MB_ICONERROR) finally: # Also call the old exception hook to let it do # its thing too. old_excepthook(exctype, value, tb) sys.excepthook = gui_excepthook import blaze from blaze.io.server.app import app blaze.catalog.load_config(sys.argv[1]) app.run(port=int(sys.argv[2]), use_reloader=False)
Add exception hook to help diagnose server test errors in python3 gui mode
Add exception hook to help diagnose server test errors in python3 gui mode
Python
bsd-3-clause
caseyclements/blaze,dwillmer/blaze,jcrist/blaze,mrocklin/blaze,cowlicks/blaze,ContinuumIO/blaze,jcrist/blaze,xlhtc007/blaze,FrancescAlted/blaze,aterrel/blaze,nkhuyu/blaze,AbhiAgarwal/blaze,markflorisson/blaze-core,nkhuyu/blaze,FrancescAlted/blaze,ChinaQuants/blaze,markflorisson/blaze-core,alexmojaki/blaze,alexmojaki/blaze,jdmcbr/blaze,mrocklin/blaze,AbhiAgarwal/blaze,maxalbert/blaze,dwillmer/blaze,markflorisson/blaze-core,AbhiAgarwal/blaze,scls19fr/blaze,LiaoPan/blaze,scls19fr/blaze,mwiebe/blaze,aterrel/blaze,xlhtc007/blaze,aterrel/blaze,ContinuumIO/blaze,AbhiAgarwal/blaze,ChinaQuants/blaze,jdmcbr/blaze,cpcloud/blaze,LiaoPan/blaze,maxalbert/blaze,cpcloud/blaze,cowlicks/blaze,mwiebe/blaze,mwiebe/blaze,markflorisson/blaze-core,FrancescAlted/blaze,caseyclements/blaze,mwiebe/blaze,FrancescAlted/blaze
import sys, os + + if os.name == 'nt': + old_excepthook = sys.excepthook + + # Exclude this from our autogenerated API docs. + undoc = lambda func: func + + @undoc + def gui_excepthook(exctype, value, tb): + try: + import ctypes, traceback + MB_ICONERROR = 0x00000010 + title = u'Error starting test Blaze server' + msg = u''.join(traceback.format_exception(exctype, value, tb)) + ctypes.windll.user32.MessageBoxW(0, msg, title, MB_ICONERROR) + finally: + # Also call the old exception hook to let it do + # its thing too. + old_excepthook(exctype, value, tb) + + sys.excepthook = gui_excepthook + import blaze from blaze.io.server.app import app blaze.catalog.load_config(sys.argv[1]) app.run(port=int(sys.argv[2]), use_reloader=False)
Add exception hook to help diagnose server test errors in python3 gui mode
## Code Before: import sys, os import blaze from blaze.io.server.app import app blaze.catalog.load_config(sys.argv[1]) app.run(port=int(sys.argv[2]), use_reloader=False) ## Instruction: Add exception hook to help diagnose server test errors in python3 gui mode ## Code After: import sys, os if os.name == 'nt': old_excepthook = sys.excepthook # Exclude this from our autogenerated API docs. undoc = lambda func: func @undoc def gui_excepthook(exctype, value, tb): try: import ctypes, traceback MB_ICONERROR = 0x00000010 title = u'Error starting test Blaze server' msg = u''.join(traceback.format_exception(exctype, value, tb)) ctypes.windll.user32.MessageBoxW(0, msg, title, MB_ICONERROR) finally: # Also call the old exception hook to let it do # its thing too. old_excepthook(exctype, value, tb) sys.excepthook = gui_excepthook import blaze from blaze.io.server.app import app blaze.catalog.load_config(sys.argv[1]) app.run(port=int(sys.argv[2]), use_reloader=False)
import sys, os + + if os.name == 'nt': + old_excepthook = sys.excepthook + + # Exclude this from our autogenerated API docs. + undoc = lambda func: func + + @undoc + def gui_excepthook(exctype, value, tb): + try: + import ctypes, traceback + MB_ICONERROR = 0x00000010 + title = u'Error starting test Blaze server' + msg = u''.join(traceback.format_exception(exctype, value, tb)) + ctypes.windll.user32.MessageBoxW(0, msg, title, MB_ICONERROR) + finally: + # Also call the old exception hook to let it do + # its thing too. + old_excepthook(exctype, value, tb) + + sys.excepthook = gui_excepthook + import blaze from blaze.io.server.app import app blaze.catalog.load_config(sys.argv[1]) app.run(port=int(sys.argv[2]), use_reloader=False)
d18ff30bbddde5049ffbe23bce19288c3c47e41b
posts/views.py
posts/views.py
from django.views.generic.list import ListView from django.views.generic.detail import DetailView from .models import Post class PostListView(ListView): model = Post context_object_name = 'posts' class PostDetailView(DetailView): model = Post context_object_name = 'post'
from django.views.generic.list import ListView from django.views.generic.detail import DetailView from .models import Post class PostListView(ListView): model = Post context_object_name = 'posts' def get_queryset(self): """ Order posts by the day they were added, from newest, to oldest. """ queryset = super(PostListView, self).get_queryset() return queryset.order_by('-added_at') class PostDetailView(DetailView): model = Post context_object_name = 'post'
Order posts from newest to oldest
posts: Order posts from newest to oldest
Python
mit
rtrembecky/roots,tbabej/roots,rtrembecky/roots,tbabej/roots,matus-stehlik/roots,matus-stehlik/roots,matus-stehlik/glowing-batman,matus-stehlik/roots,matus-stehlik/glowing-batman,rtrembecky/roots,tbabej/roots
from django.views.generic.list import ListView from django.views.generic.detail import DetailView from .models import Post class PostListView(ListView): model = Post context_object_name = 'posts' + def get_queryset(self): + """ + Order posts by the day they were added, from newest, to oldest. + """ + + queryset = super(PostListView, self).get_queryset() + return queryset.order_by('-added_at') + class PostDetailView(DetailView): model = Post context_object_name = 'post'
Order posts from newest to oldest
## Code Before: from django.views.generic.list import ListView from django.views.generic.detail import DetailView from .models import Post class PostListView(ListView): model = Post context_object_name = 'posts' class PostDetailView(DetailView): model = Post context_object_name = 'post' ## Instruction: Order posts from newest to oldest ## Code After: from django.views.generic.list import ListView from django.views.generic.detail import DetailView from .models import Post class PostListView(ListView): model = Post context_object_name = 'posts' def get_queryset(self): """ Order posts by the day they were added, from newest, to oldest. """ queryset = super(PostListView, self).get_queryset() return queryset.order_by('-added_at') class PostDetailView(DetailView): model = Post context_object_name = 'post'
from django.views.generic.list import ListView from django.views.generic.detail import DetailView from .models import Post class PostListView(ListView): model = Post context_object_name = 'posts' + def get_queryset(self): + """ + Order posts by the day they were added, from newest, to oldest. + """ + + queryset = super(PostListView, self).get_queryset() + return queryset.order_by('-added_at') + class PostDetailView(DetailView): model = Post context_object_name = 'post'
d2456f280fd1d1bff44475b870bf067d2694fc9d
chainerrl/functions/arctanh.py
chainerrl/functions/arctanh.py
from chainer.backends import cuda from chainer import function_node from chainer import utils from chainer.utils import type_check class Arctanh(function_node.FunctionNode): """Elementwise inverse hyperbolic tangent function.""" def check_type_forward(self, in_types): type_check._argname(in_types, ('x',)) x_type, = in_types type_check.expect(x_type.dtype.kind == 'f') def forward(self, inputs): self.retain_inputs((0,)) x, = inputs xp = cuda.get_array_module(x) y = xp.arctanh(x) return utils.force_array(y, dtype=x.dtype), def backward(self, indexes, grad_outputs): x, = self.get_retained_inputs() gy, = grad_outputs gx = 1. / (1 - x ** 2) * gy return gx, def arctanh(x): """Elementwise inverse hyperbolic tangent function. Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable. Returns: ~chainer.Variable: Output variable. """ return Arctanh().apply((x,))[0]
from chainer.backends import cuda from chainer import function_node from chainer import utils from chainer.utils import type_check class Arctanh(function_node.FunctionNode): """Elementwise inverse hyperbolic tangent function.""" def check_type_forward(self, in_types): if hasattr(type_check, '_argname'): # typecheck._argname is introduced by Chainer v6 type_check._argname(in_types, ('x',)) x_type, = in_types type_check.expect(x_type.dtype.kind == 'f') def forward(self, inputs): self.retain_inputs((0,)) x, = inputs xp = cuda.get_array_module(x) y = xp.arctanh(x) return utils.force_array(y, dtype=x.dtype), def backward(self, indexes, grad_outputs): x, = self.get_retained_inputs() gy, = grad_outputs gx = 1. / (1 - x ** 2) * gy return gx, def arctanh(x): """Elementwise inverse hyperbolic tangent function. Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable. Returns: ~chainer.Variable: Output variable. """ return Arctanh().apply((x,))[0]
Fix chainer v4 error about type_check._argname
Fix chainer v4 error about type_check._argname
Python
mit
toslunar/chainerrl,toslunar/chainerrl
from chainer.backends import cuda from chainer import function_node from chainer import utils from chainer.utils import type_check class Arctanh(function_node.FunctionNode): """Elementwise inverse hyperbolic tangent function.""" def check_type_forward(self, in_types): + if hasattr(type_check, '_argname'): + # typecheck._argname is introduced by Chainer v6 - type_check._argname(in_types, ('x',)) + type_check._argname(in_types, ('x',)) x_type, = in_types type_check.expect(x_type.dtype.kind == 'f') def forward(self, inputs): self.retain_inputs((0,)) x, = inputs xp = cuda.get_array_module(x) y = xp.arctanh(x) return utils.force_array(y, dtype=x.dtype), def backward(self, indexes, grad_outputs): x, = self.get_retained_inputs() gy, = grad_outputs gx = 1. / (1 - x ** 2) * gy return gx, def arctanh(x): """Elementwise inverse hyperbolic tangent function. Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable. Returns: ~chainer.Variable: Output variable. """ return Arctanh().apply((x,))[0]
Fix chainer v4 error about type_check._argname
## Code Before: from chainer.backends import cuda from chainer import function_node from chainer import utils from chainer.utils import type_check class Arctanh(function_node.FunctionNode): """Elementwise inverse hyperbolic tangent function.""" def check_type_forward(self, in_types): type_check._argname(in_types, ('x',)) x_type, = in_types type_check.expect(x_type.dtype.kind == 'f') def forward(self, inputs): self.retain_inputs((0,)) x, = inputs xp = cuda.get_array_module(x) y = xp.arctanh(x) return utils.force_array(y, dtype=x.dtype), def backward(self, indexes, grad_outputs): x, = self.get_retained_inputs() gy, = grad_outputs gx = 1. / (1 - x ** 2) * gy return gx, def arctanh(x): """Elementwise inverse hyperbolic tangent function. Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable. Returns: ~chainer.Variable: Output variable. """ return Arctanh().apply((x,))[0] ## Instruction: Fix chainer v4 error about type_check._argname ## Code After: from chainer.backends import cuda from chainer import function_node from chainer import utils from chainer.utils import type_check class Arctanh(function_node.FunctionNode): """Elementwise inverse hyperbolic tangent function.""" def check_type_forward(self, in_types): if hasattr(type_check, '_argname'): # typecheck._argname is introduced by Chainer v6 type_check._argname(in_types, ('x',)) x_type, = in_types type_check.expect(x_type.dtype.kind == 'f') def forward(self, inputs): self.retain_inputs((0,)) x, = inputs xp = cuda.get_array_module(x) y = xp.arctanh(x) return utils.force_array(y, dtype=x.dtype), def backward(self, indexes, grad_outputs): x, = self.get_retained_inputs() gy, = grad_outputs gx = 1. / (1 - x ** 2) * gy return gx, def arctanh(x): """Elementwise inverse hyperbolic tangent function. Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable. Returns: ~chainer.Variable: Output variable. """ return Arctanh().apply((x,))[0]
from chainer.backends import cuda from chainer import function_node from chainer import utils from chainer.utils import type_check class Arctanh(function_node.FunctionNode): """Elementwise inverse hyperbolic tangent function.""" def check_type_forward(self, in_types): + if hasattr(type_check, '_argname'): + # typecheck._argname is introduced by Chainer v6 - type_check._argname(in_types, ('x',)) + type_check._argname(in_types, ('x',)) ? ++++ x_type, = in_types type_check.expect(x_type.dtype.kind == 'f') def forward(self, inputs): self.retain_inputs((0,)) x, = inputs xp = cuda.get_array_module(x) y = xp.arctanh(x) return utils.force_array(y, dtype=x.dtype), def backward(self, indexes, grad_outputs): x, = self.get_retained_inputs() gy, = grad_outputs gx = 1. / (1 - x ** 2) * gy return gx, def arctanh(x): """Elementwise inverse hyperbolic tangent function. Args: x (:class:`~chainer.Variable` or :ref:`ndarray`): Input variable. Returns: ~chainer.Variable: Output variable. """ return Arctanh().apply((x,))[0]
1303d1c14f7c3127b8fc87178f268d8b052ef503
setup.py
setup.py
from distutils.core import setup def readfile(fname): with open(fname) as f: content = f.read() return content setup(name='sockjs-cyclone', version='1.0.2', author='Flavio Grossi', author_email='[email protected]', description='SockJS python server for the Cyclone Web Server', license=readfile('LICENSE'), long_description=readfile('README.rst'), keywords=[ 'sockjs', 'cyclone', 'web server', 'websocket' ], url='http://github.com/flaviogrossi/sockjs-cyclone/', packages=[ 'sockjs', 'sockjs.cyclone', 'sockjs.cyclone.transports' ], requires=[ 'twisted (>=12.0)', 'cyclone (>=1.0)', 'simplejson' ], install_requires=[ 'twisted>=12.0', 'cyclone>=1.0-rc8', 'simplejson' ], classifiers=( 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Framework :: Twisted', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: HTTP Servers', 'Topic :: Software Development :: Libraries :: Python Modules', ) )
from distutils.core import setup def readfile(fname): with open(fname) as f: content = f.read() return content setup(name='sockjs-cyclone', version='1.0.2', author='Flavio Grossi', author_email='[email protected]', description='SockJS python server for the Cyclone Web Server', license=readfile('LICENSE'), long_description=readfile('README.rst'), keywords=[ 'sockjs', 'cyclone', 'web server', 'websocket' ], url='http://github.com/flaviogrossi/sockjs-cyclone/', packages=[ 'sockjs', 'sockjs.cyclone', 'sockjs.cyclone.transports' ], requires=[ 'twisted (>=12.0)', 'cyclone (>=1.0)' ], install_requires=[ 'twisted>=12.0', 'cyclone>=1.0-rc8' ], classifiers=( 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Framework :: Twisted', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: HTTP Servers', 'Topic :: Software Development :: Libraries :: Python Modules', ) )
Remove declaration of dependency on simplejson
Remove declaration of dependency on simplejson
Python
mit
flaviogrossi/sockjs-cyclone
from distutils.core import setup def readfile(fname): with open(fname) as f: content = f.read() return content setup(name='sockjs-cyclone', version='1.0.2', author='Flavio Grossi', author_email='[email protected]', description='SockJS python server for the Cyclone Web Server', license=readfile('LICENSE'), long_description=readfile('README.rst'), keywords=[ 'sockjs', 'cyclone', 'web server', 'websocket' ], url='http://github.com/flaviogrossi/sockjs-cyclone/', packages=[ 'sockjs', 'sockjs.cyclone', 'sockjs.cyclone.transports' ], requires=[ 'twisted (>=12.0)', - 'cyclone (>=1.0)', + 'cyclone (>=1.0)' - 'simplejson' ], install_requires=[ 'twisted>=12.0', - 'cyclone>=1.0-rc8', + 'cyclone>=1.0-rc8' - 'simplejson' ], classifiers=( 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Framework :: Twisted', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: HTTP Servers', 'Topic :: Software Development :: Libraries :: Python Modules', ) )
Remove declaration of dependency on simplejson
## Code Before: from distutils.core import setup def readfile(fname): with open(fname) as f: content = f.read() return content setup(name='sockjs-cyclone', version='1.0.2', author='Flavio Grossi', author_email='[email protected]', description='SockJS python server for the Cyclone Web Server', license=readfile('LICENSE'), long_description=readfile('README.rst'), keywords=[ 'sockjs', 'cyclone', 'web server', 'websocket' ], url='http://github.com/flaviogrossi/sockjs-cyclone/', packages=[ 'sockjs', 'sockjs.cyclone', 'sockjs.cyclone.transports' ], requires=[ 'twisted (>=12.0)', 'cyclone (>=1.0)', 'simplejson' ], install_requires=[ 'twisted>=12.0', 'cyclone>=1.0-rc8', 'simplejson' ], classifiers=( 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Framework :: Twisted', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: HTTP Servers', 'Topic :: Software Development :: Libraries :: Python Modules', ) ) ## Instruction: Remove declaration of dependency on simplejson ## Code After: from distutils.core import setup def readfile(fname): with open(fname) as f: content = f.read() return content setup(name='sockjs-cyclone', version='1.0.2', author='Flavio Grossi', author_email='[email protected]', description='SockJS python server for the Cyclone Web Server', license=readfile('LICENSE'), long_description=readfile('README.rst'), keywords=[ 'sockjs', 'cyclone', 'web server', 'websocket' ], url='http://github.com/flaviogrossi/sockjs-cyclone/', packages=[ 'sockjs', 'sockjs.cyclone', 'sockjs.cyclone.transports' ], requires=[ 'twisted (>=12.0)', 'cyclone (>=1.0)' ], install_requires=[ 'twisted>=12.0', 'cyclone>=1.0-rc8' ], classifiers=( 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Framework :: Twisted', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: HTTP Servers', 'Topic :: Software Development :: Libraries :: Python Modules', ) )
from distutils.core import setup def readfile(fname): with open(fname) as f: content = f.read() return content setup(name='sockjs-cyclone', version='1.0.2', author='Flavio Grossi', author_email='[email protected]', description='SockJS python server for the Cyclone Web Server', license=readfile('LICENSE'), long_description=readfile('README.rst'), keywords=[ 'sockjs', 'cyclone', 'web server', 'websocket' ], url='http://github.com/flaviogrossi/sockjs-cyclone/', packages=[ 'sockjs', 'sockjs.cyclone', 'sockjs.cyclone.transports' ], requires=[ 'twisted (>=12.0)', - 'cyclone (>=1.0)', ? - + 'cyclone (>=1.0)' - 'simplejson' ], install_requires=[ 'twisted>=12.0', - 'cyclone>=1.0-rc8', ? - + 'cyclone>=1.0-rc8' - 'simplejson' ], classifiers=( 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Framework :: Twisted', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: HTTP Servers', 'Topic :: Software Development :: Libraries :: Python Modules', ) )
6d21aa9e6d52701d7beacd4ec519ab1bac488886
machina/models/abstract_models.py
machina/models/abstract_models.py
from __future__ import unicode_literals # Third party imports from django.db import models from django.utils.translation import ugettext_lazy as _ # Local application / specific library imports class ActiveManager(models.Manager): """ Returns only active objects. """ def get_queryset(self): super_self = super(ActiveManager, self) get_queryset = (super_self.get_query_set if hasattr(super_self, 'get_query_set') else super_self.get_queryset) return get_queryset().filter(is_active__exact=True) class ActiveModel(models.Model): """ An abstract base class model that provides an is_active field and attach an ActiveManager. """ is_active = models.BooleanField(default=True, db_index=True) # Managers objects = models.Manager() active = ActiveManager() class Meta: abstract = True class DatedModel(models.Model): """ An abstract base class model that provides a created and a updated fields to store creation date and last updated date. """ created = models.DateTimeField(auto_now_add=True, verbose_name=_('Creation date')) updated = models.DateTimeField(auto_now=True, verbose_name=_('Update date')) class Meta: abstract = True
from __future__ import unicode_literals from django.db import models from django.utils.translation import ugettext_lazy as _ class ActiveManager(models.Manager): """ Returns only active objects. """ def get_queryset(self): return super(ActiveManager, self).get_queryset().filter(is_active__exact=True) class ActiveModel(models.Model): """ An abstract base class model that provides an is_active field and attach an ActiveManager. """ is_active = models.BooleanField(default=True, db_index=True) # Managers objects = models.Manager() active = ActiveManager() class Meta: abstract = True class DatedModel(models.Model): """ An abstract base class model that provides a created and a updated fields to store creation date and last updated date. """ created = models.DateTimeField(auto_now_add=True, verbose_name=_('Creation date')) updated = models.DateTimeField(auto_now=True, verbose_name=_('Update date')) class Meta: abstract = True
Remove unnecessary compat code from ActiveManager
Remove unnecessary compat code from ActiveManager
Python
bsd-3-clause
franga2000/django-machina,franga2000/django-machina,reinbach/django-machina,reinbach/django-machina,ellmetha/django-machina,franga2000/django-machina,reinbach/django-machina,ellmetha/django-machina,ellmetha/django-machina
+ from __future__ import unicode_literals - # Third party imports from django.db import models from django.utils.translation import ugettext_lazy as _ - - # Local application / specific library imports class ActiveManager(models.Manager): """ Returns only active objects. """ def get_queryset(self): - super_self = super(ActiveManager, self) - get_queryset = (super_self.get_query_set - if hasattr(super_self, 'get_query_set') - else super_self.get_queryset) - - return get_queryset().filter(is_active__exact=True) + return super(ActiveManager, self).get_queryset().filter(is_active__exact=True) class ActiveModel(models.Model): """ An abstract base class model that provides an is_active field and attach an ActiveManager. """ is_active = models.BooleanField(default=True, db_index=True) # Managers objects = models.Manager() active = ActiveManager() class Meta: abstract = True class DatedModel(models.Model): """ - An abstract base class model that provides a created and a updated fields to store creation date and last updated date. + An abstract base class model that provides a created and a updated fields to store creation date + and last updated date. """ created = models.DateTimeField(auto_now_add=True, verbose_name=_('Creation date')) updated = models.DateTimeField(auto_now=True, verbose_name=_('Update date')) class Meta: abstract = True
Remove unnecessary compat code from ActiveManager
## Code Before: from __future__ import unicode_literals # Third party imports from django.db import models from django.utils.translation import ugettext_lazy as _ # Local application / specific library imports class ActiveManager(models.Manager): """ Returns only active objects. """ def get_queryset(self): super_self = super(ActiveManager, self) get_queryset = (super_self.get_query_set if hasattr(super_self, 'get_query_set') else super_self.get_queryset) return get_queryset().filter(is_active__exact=True) class ActiveModel(models.Model): """ An abstract base class model that provides an is_active field and attach an ActiveManager. """ is_active = models.BooleanField(default=True, db_index=True) # Managers objects = models.Manager() active = ActiveManager() class Meta: abstract = True class DatedModel(models.Model): """ An abstract base class model that provides a created and a updated fields to store creation date and last updated date. """ created = models.DateTimeField(auto_now_add=True, verbose_name=_('Creation date')) updated = models.DateTimeField(auto_now=True, verbose_name=_('Update date')) class Meta: abstract = True ## Instruction: Remove unnecessary compat code from ActiveManager ## Code After: from __future__ import unicode_literals from django.db import models from django.utils.translation import ugettext_lazy as _ class ActiveManager(models.Manager): """ Returns only active objects. """ def get_queryset(self): return super(ActiveManager, self).get_queryset().filter(is_active__exact=True) class ActiveModel(models.Model): """ An abstract base class model that provides an is_active field and attach an ActiveManager. """ is_active = models.BooleanField(default=True, db_index=True) # Managers objects = models.Manager() active = ActiveManager() class Meta: abstract = True class DatedModel(models.Model): """ An abstract base class model that provides a created and a updated fields to store creation date and last updated date. """ created = models.DateTimeField(auto_now_add=True, verbose_name=_('Creation date')) updated = models.DateTimeField(auto_now=True, verbose_name=_('Update date')) class Meta: abstract = True
+ from __future__ import unicode_literals - # Third party imports from django.db import models from django.utils.translation import ugettext_lazy as _ - - # Local application / specific library imports class ActiveManager(models.Manager): """ Returns only active objects. """ def get_queryset(self): - super_self = super(ActiveManager, self) - get_queryset = (super_self.get_query_set - if hasattr(super_self, 'get_query_set') - else super_self.get_queryset) - - return get_queryset().filter(is_active__exact=True) + return super(ActiveManager, self).get_queryset().filter(is_active__exact=True) ? +++++++++++++++++++++++++++ class ActiveModel(models.Model): """ An abstract base class model that provides an is_active field and attach an ActiveManager. """ is_active = models.BooleanField(default=True, db_index=True) # Managers objects = models.Manager() active = ActiveManager() class Meta: abstract = True class DatedModel(models.Model): """ - An abstract base class model that provides a created and a updated fields to store creation date and last updated date. ? ----------------------- + An abstract base class model that provides a created and a updated fields to store creation date + and last updated date. """ created = models.DateTimeField(auto_now_add=True, verbose_name=_('Creation date')) updated = models.DateTimeField(auto_now=True, verbose_name=_('Update date')) class Meta: abstract = True
e5a94d2902a66d55be62b92e35ac90ac7aed7991
javascript/navigator/__init__.py
javascript/navigator/__init__.py
__author__ = 'katharine' import PyV8 as v8 from geolocation import Geolocation class Navigator(v8.JSClass): def __init__(self, runtime): # W3C spec says that if geolocation is disabled, navigator.geolocation should not exist. # if 'location' in runtime.manifest.get('capabilities', []): if True: self.geolocation = Geolocation(runtime)
__author__ = 'katharine' import PyV8 as v8 from geolocation import Geolocation from javascript.exceptions import JSRuntimeException class Navigator(v8.JSClass): def __init__(self, runtime): self._runtime = runtime # W3C spec says that if geolocation is disabled, navigator.geolocation should not exist. if 'location' in runtime.manifest.get('capabilities', []): self.geolocation = Geolocation(runtime) def __getattr__(self, item): # __getattr__ is only called if something does not exist. Therefore, if it's called, geolocation # does not exist. # This approach lets us report it doesn't exist if tested for (e.g. `'geolocation' in navigator`), # but throw an informative exception if it's accessed. if item == 'geolocation': raise JSRuntimeException( self._runtime, "You must add 'location' to the appinfo.json capabilities array to access geolocation." ) else: raise AttributeError
Implement location restriction more thoroughly.
Implement location restriction more thoroughly.
Python
mit
youtux/pypkjs,pebble/pypkjs
__author__ = 'katharine' import PyV8 as v8 from geolocation import Geolocation - + from javascript.exceptions import JSRuntimeException class Navigator(v8.JSClass): def __init__(self, runtime): + self._runtime = runtime # W3C spec says that if geolocation is disabled, navigator.geolocation should not exist. - # if 'location' in runtime.manifest.get('capabilities', []): + if 'location' in runtime.manifest.get('capabilities', []): - if True: self.geolocation = Geolocation(runtime) + def __getattr__(self, item): + # __getattr__ is only called if something does not exist. Therefore, if it's called, geolocation + # does not exist. + # This approach lets us report it doesn't exist if tested for (e.g. `'geolocation' in navigator`), + # but throw an informative exception if it's accessed. + if item == 'geolocation': + raise JSRuntimeException( + self._runtime, + "You must add 'location' to the appinfo.json capabilities array to access geolocation." + ) + else: + raise AttributeError + +
Implement location restriction more thoroughly.
## Code Before: __author__ = 'katharine' import PyV8 as v8 from geolocation import Geolocation class Navigator(v8.JSClass): def __init__(self, runtime): # W3C spec says that if geolocation is disabled, navigator.geolocation should not exist. # if 'location' in runtime.manifest.get('capabilities', []): if True: self.geolocation = Geolocation(runtime) ## Instruction: Implement location restriction more thoroughly. ## Code After: __author__ = 'katharine' import PyV8 as v8 from geolocation import Geolocation from javascript.exceptions import JSRuntimeException class Navigator(v8.JSClass): def __init__(self, runtime): self._runtime = runtime # W3C spec says that if geolocation is disabled, navigator.geolocation should not exist. if 'location' in runtime.manifest.get('capabilities', []): self.geolocation = Geolocation(runtime) def __getattr__(self, item): # __getattr__ is only called if something does not exist. Therefore, if it's called, geolocation # does not exist. # This approach lets us report it doesn't exist if tested for (e.g. `'geolocation' in navigator`), # but throw an informative exception if it's accessed. if item == 'geolocation': raise JSRuntimeException( self._runtime, "You must add 'location' to the appinfo.json capabilities array to access geolocation." ) else: raise AttributeError
__author__ = 'katharine' import PyV8 as v8 from geolocation import Geolocation - + from javascript.exceptions import JSRuntimeException class Navigator(v8.JSClass): def __init__(self, runtime): + self._runtime = runtime # W3C spec says that if geolocation is disabled, navigator.geolocation should not exist. - # if 'location' in runtime.manifest.get('capabilities', []): ? -- + if 'location' in runtime.manifest.get('capabilities', []): - if True: self.geolocation = Geolocation(runtime) + + def __getattr__(self, item): + # __getattr__ is only called if something does not exist. Therefore, if it's called, geolocation + # does not exist. + # This approach lets us report it doesn't exist if tested for (e.g. `'geolocation' in navigator`), + # but throw an informative exception if it's accessed. + if item == 'geolocation': + raise JSRuntimeException( + self._runtime, + "You must add 'location' to the appinfo.json capabilities array to access geolocation." + ) + else: + raise AttributeError +