commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
3.18k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43k
ndiff
stringlengths
52
3.32k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
fuzzy_diff
stringlengths
16
3.18k
2a77f5e9a2bcce6b11c21f40574f73cad133c4b8
slack.py
slack.py
import json from slackipycore import invite, get_team_info from slackipycore import (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) from flask import current_app def invite_user(email): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] try: if invite(team_id=team_id, api_token=api_token, invitee_email=email): return json.dumps({'status': 'success'}) except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) as e: return _response_message(message=str(e)) def get_team_name(): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] team_info = get_team_info(team_id=team_id, api_token=api_token) return team_info['name'] def _response_message(message): return {'status': 'fail', 'error': message}
import json from slackipycore import invite, get_team_info from slackipycore import (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) from flask import current_app def invite_user(email): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] try: if invite(team_id=team_id, api_token=api_token, invitee_email=email): return {'status': 'success'} except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) as e: return _response_message(message=str(e)) def get_team_name(): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] team_info = get_team_info(team_id=team_id, api_token=api_token) return team_info['name'] def _response_message(message): return {'status': 'fail', 'error': message}
Fix return statement for `invite`
Fix return statement for `invite`
Python
mit
avinassh/slackipy,avinassh/slackipy,avinassh/slackipy
import json from slackipycore import invite, get_team_info from slackipycore import (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) from flask import current_app def invite_user(email): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] try: if invite(team_id=team_id, api_token=api_token, invitee_email=email): - return json.dumps({'status': 'success'}) + return {'status': 'success'} except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) as e: return _response_message(message=str(e)) def get_team_name(): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] team_info = get_team_info(team_id=team_id, api_token=api_token) return team_info['name'] def _response_message(message): return {'status': 'fail', 'error': message}
Fix return statement for `invite`
## Code Before: import json from slackipycore import invite, get_team_info from slackipycore import (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) from flask import current_app def invite_user(email): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] try: if invite(team_id=team_id, api_token=api_token, invitee_email=email): return json.dumps({'status': 'success'}) except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) as e: return _response_message(message=str(e)) def get_team_name(): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] team_info = get_team_info(team_id=team_id, api_token=api_token) return team_info['name'] def _response_message(message): return {'status': 'fail', 'error': message} ## Instruction: Fix return statement for `invite` ## Code After: import json from slackipycore import invite, get_team_info from slackipycore import (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) from flask import current_app def invite_user(email): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] try: if invite(team_id=team_id, api_token=api_token, invitee_email=email): return {'status': 'success'} except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, AlreadyInvited, APIRequestError) as e: return _response_message(message=str(e)) def get_team_name(): api_token = current_app.config['SLACK_API_TOKEN'] team_id = current_app.config['SLACK_TEAM_ID'] team_info = get_team_info(team_id=team_id, api_token=api_token) return team_info['name'] def _response_message(message): return {'status': 'fail', 'error': message}
... invitee_email=email): return {'status': 'success'} except (AlreadyInTeam, InvalidInviteeEmail, InvalidAuthToken, ...
4217f587606c4e326b4df97681ae4f5187b6e6d9
falmer/content/serializers.py
falmer/content/serializers.py
from django.conf import settings from django.urls import reverse from rest_framework import serializers from falmer.content.models import StaffMemberSnippet from falmer.matte.models import MatteImage def generate_image_url(image, filter_spec): from wagtail.wagtailimages.views.serve import generate_signature signature = generate_signature(image.id, filter_spec) url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec)) # Append image's original filename to the URL (optional) # url += image.file.name[len('original_images/'):] return settings.PUBLIC_HOST + url class WagtailImageSerializer(serializers.ModelSerializer): wagtail_image = serializers.SerializerMethodField() resource = serializers.SerializerMethodField() class Meta: model = MatteImage fields = ('id', 'wagtail_image', 'resource') def get_wagtail_image(self, image): return generate_image_url(image, 'fill-400x400') def get_resource(self, image): return image.file.name class SnippetSerializer(serializers.ModelSerializer): photo = WagtailImageSerializer() class Meta: model = StaffMemberSnippet fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
from django.conf import settings from django.urls import reverse from rest_framework import serializers from falmer.content.models import StaffMemberSnippet from falmer.matte.models import MatteImage def generate_image_url(image, filter_spec): from wagtail.wagtailimages.views.serve import generate_signature signature = generate_signature(image.id, filter_spec) url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec)) # Append image's original filename to the URL (optional) # url += image.file.name[len('original_images/'):] return settings.PUBLIC_HOST + url class WagtailImageSerializer(serializers.ModelSerializer): resource = serializers.SerializerMethodField() class Meta: model = MatteImage fields = ('id', 'resource') def get_resource(self, image): return image.file.name class SnippetSerializer(serializers.ModelSerializer): photo = WagtailImageSerializer() class Meta: model = StaffMemberSnippet fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
Remove wagtail_image from image resources
Remove wagtail_image from image resources
Python
mit
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
from django.conf import settings from django.urls import reverse from rest_framework import serializers from falmer.content.models import StaffMemberSnippet from falmer.matte.models import MatteImage def generate_image_url(image, filter_spec): from wagtail.wagtailimages.views.serve import generate_signature signature = generate_signature(image.id, filter_spec) url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec)) # Append image's original filename to the URL (optional) # url += image.file.name[len('original_images/'):] return settings.PUBLIC_HOST + url class WagtailImageSerializer(serializers.ModelSerializer): - wagtail_image = serializers.SerializerMethodField() resource = serializers.SerializerMethodField() class Meta: model = MatteImage - fields = ('id', 'wagtail_image', 'resource') + fields = ('id', 'resource') - - def get_wagtail_image(self, image): - return generate_image_url(image, 'fill-400x400') - def get_resource(self, image): return image.file.name class SnippetSerializer(serializers.ModelSerializer): photo = WagtailImageSerializer() class Meta: model = StaffMemberSnippet fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
Remove wagtail_image from image resources
## Code Before: from django.conf import settings from django.urls import reverse from rest_framework import serializers from falmer.content.models import StaffMemberSnippet from falmer.matte.models import MatteImage def generate_image_url(image, filter_spec): from wagtail.wagtailimages.views.serve import generate_signature signature = generate_signature(image.id, filter_spec) url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec)) # Append image's original filename to the URL (optional) # url += image.file.name[len('original_images/'):] return settings.PUBLIC_HOST + url class WagtailImageSerializer(serializers.ModelSerializer): wagtail_image = serializers.SerializerMethodField() resource = serializers.SerializerMethodField() class Meta: model = MatteImage fields = ('id', 'wagtail_image', 'resource') def get_wagtail_image(self, image): return generate_image_url(image, 'fill-400x400') def get_resource(self, image): return image.file.name class SnippetSerializer(serializers.ModelSerializer): photo = WagtailImageSerializer() class Meta: model = StaffMemberSnippet fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo') ## Instruction: Remove wagtail_image from image resources ## Code After: from django.conf import settings from django.urls import reverse from rest_framework import serializers from falmer.content.models import StaffMemberSnippet from falmer.matte.models import MatteImage def generate_image_url(image, filter_spec): from wagtail.wagtailimages.views.serve import generate_signature signature = generate_signature(image.id, filter_spec) url = reverse('wagtailimages_serve', args=(signature, image.id, filter_spec)) # Append image's original filename to the URL (optional) # url += image.file.name[len('original_images/'):] return settings.PUBLIC_HOST + url class WagtailImageSerializer(serializers.ModelSerializer): resource = serializers.SerializerMethodField() class Meta: model = MatteImage fields = ('id', 'resource') def get_resource(self, image): return image.file.name class SnippetSerializer(serializers.ModelSerializer): photo = WagtailImageSerializer() class Meta: model = StaffMemberSnippet fields = ('name', 'job_title', 'email', 'office_phone_number', 'mobile_phone_number', 'job_description', 'office_location', 'photo')
# ... existing code ... class WagtailImageSerializer(serializers.ModelSerializer): resource = serializers.SerializerMethodField() # ... modified code ... model = MatteImage fields = ('id', 'resource') # ... rest of the code ...
def129e32bf731351253e210b53c44cf8c57c302
planetstack/openstack_observer/steps/sync_images.py
planetstack/openstack_observer/steps/sync_images.py
import os import base64 from django.db.models import F, Q from xos.config import Config from observer.openstacksyncstep import OpenStackSyncStep from core.models.image import Image class SyncImages(OpenStackSyncStep): provides=[Image] requested_interval=0 observes=Image def fetch_pending(self, deleted): # Images come from the back end # You can't delete them if (deleted): return [] # get list of images on disk images_path = Config().observer_images_directory available_images = {} for f in os.listdir(images_path): if os.path.isfile(os.path.join(images_path ,f)): available_images[f] = os.path.join(images_path ,f) images = Image.objects.all() image_names = [image.name for image in images] for image_name in available_images: #remove file extension clean_name = ".".join(image_name.split('.')[:-1]) if clean_name not in image_names: image = Image(name=clean_name, disk_format='raw', container_format='bare', path = available_images[image_name]) image.save() return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None)) def sync_record(self, image): image.save()
import os import base64 from django.db.models import F, Q from xos.config import Config from observer.openstacksyncstep import OpenStackSyncStep from core.models.image import Image class SyncImages(OpenStackSyncStep): provides=[Image] requested_interval=0 observes=Image def fetch_pending(self, deleted): # Images come from the back end # You can't delete them if (deleted): return [] # get list of images on disk images_path = Config().observer_images_directory available_images = {} if os.path.exists(images_path): for f in os.listdir(images_path): filename = os.path.join(images_path, f) if os.path.isfile(filename): available_images[f] = filename images = Image.objects.all() image_names = [image.name for image in images] for image_name in available_images: #remove file extension clean_name = ".".join(image_name.split('.')[:-1]) if clean_name not in image_names: image = Image(name=clean_name, disk_format='raw', container_format='bare', path = available_images[image_name]) image.save() return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None)) def sync_record(self, image): image.save()
Check the existence of the images_path
Check the existence of the images_path ERROR:planetstack.log:[Errno 2] No such file or directory: '/opt/xos/images' BEG TRACEBACK Traceback (most recent call last): File "/opt/xos/observer/event_loop.py", line 349, in sync failed_objects = sync_step(failed=list(self.failed_step_objects), deletion=deletion) File "/opt/xos/observer/openstacksyncstep.py", line 14, in __call__ return self.call(**args) File "/opt/xos/observer/syncstep.py", line 97, in call pending = self.fetch_pending(deletion) File "/opt/xos/observer/steps/sync_images.py", line 22, in fetch_pending for f in os.listdir(images_path): OSError: [Errno 2] No such file or directory: '/opt/xos/images' ERROR:planetstack.log:[Errno 2] No such file or directory: '/opt/xos/images' END TRACEBACK Signed-off-by: S.Çağlar Onur <[email protected]>
Python
apache-2.0
wathsalav/xos,wathsalav/xos,wathsalav/xos,wathsalav/xos
import os import base64 from django.db.models import F, Q from xos.config import Config from observer.openstacksyncstep import OpenStackSyncStep from core.models.image import Image class SyncImages(OpenStackSyncStep): provides=[Image] requested_interval=0 observes=Image def fetch_pending(self, deleted): # Images come from the back end # You can't delete them if (deleted): return [] # get list of images on disk - images_path = Config().observer_images_directory + images_path = Config().observer_images_directory + available_images = {} + if os.path.exists(images_path): - for f in os.listdir(images_path): + for f in os.listdir(images_path): - if os.path.isfile(os.path.join(images_path ,f)): - available_images[f] = os.path.join(images_path ,f) + filename = os.path.join(images_path, f) + if os.path.isfile(filename): + available_images[f] = filename images = Image.objects.all() image_names = [image.name for image in images] for image_name in available_images: #remove file extension clean_name = ".".join(image_name.split('.')[:-1]) if clean_name not in image_names: image = Image(name=clean_name, disk_format='raw', container_format='bare', path = available_images[image_name]) image.save() + - - return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None)) def sync_record(self, image): image.save()
Check the existence of the images_path
## Code Before: import os import base64 from django.db.models import F, Q from xos.config import Config from observer.openstacksyncstep import OpenStackSyncStep from core.models.image import Image class SyncImages(OpenStackSyncStep): provides=[Image] requested_interval=0 observes=Image def fetch_pending(self, deleted): # Images come from the back end # You can't delete them if (deleted): return [] # get list of images on disk images_path = Config().observer_images_directory available_images = {} for f in os.listdir(images_path): if os.path.isfile(os.path.join(images_path ,f)): available_images[f] = os.path.join(images_path ,f) images = Image.objects.all() image_names = [image.name for image in images] for image_name in available_images: #remove file extension clean_name = ".".join(image_name.split('.')[:-1]) if clean_name not in image_names: image = Image(name=clean_name, disk_format='raw', container_format='bare', path = available_images[image_name]) image.save() return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None)) def sync_record(self, image): image.save() ## Instruction: Check the existence of the images_path ## Code After: import os import base64 from django.db.models import F, Q from xos.config import Config from observer.openstacksyncstep import OpenStackSyncStep from core.models.image import Image class SyncImages(OpenStackSyncStep): provides=[Image] requested_interval=0 observes=Image def fetch_pending(self, deleted): # Images come from the back end # You can't delete them if (deleted): return [] # get list of images on disk images_path = Config().observer_images_directory available_images = {} if os.path.exists(images_path): for f in os.listdir(images_path): filename = os.path.join(images_path, f) if os.path.isfile(filename): available_images[f] = filename images = Image.objects.all() image_names = [image.name for image in images] for image_name in available_images: #remove file extension clean_name = ".".join(image_name.split('.')[:-1]) if clean_name not in image_names: image = Image(name=clean_name, disk_format='raw', container_format='bare', path = available_images[image_name]) image.save() return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None)) def sync_record(self, image): image.save()
# ... existing code ... # get list of images on disk images_path = Config().observer_images_directory available_images = {} if os.path.exists(images_path): for f in os.listdir(images_path): filename = os.path.join(images_path, f) if os.path.isfile(filename): available_images[f] = filename # ... modified code ... image.save() return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None)) # ... rest of the code ...
313a81093527c88631713f6b4ad8c652554edb50
l10n_br_base/migrations/12.0.1.0.0/post-migration.py
l10n_br_base/migrations/12.0.1.0.0/post-migration.py
from openupgradelib import openupgrade @openupgrade.migrate() def migrate(env, version): cr = env.cr cr.execute( '''INSERT INTO res_city(id, name, country_id, state_id, ibge_code) SELECT nextval('res_city_id_seq'), name, (SELECT id FROM res_country WHERE code='BR'), state_id, ibge_code FROM l10n_br_base_city WHERE ibge_code NOT IN (SELECT ibge_code FROM res_city); ''') cr.execute( '''INSERT INTO state_tax_numbers(id, inscr_est, partner_id, state_id) SELECT nextval('state_tax_numbers_id_seq'), inscr_est, partner_id, state_id FROM other_inscricoes_estaduais; ''') cr.execute( '''UPDATE res_partner rp SET city_id=( SELECT id FROM res_city WHERE ibge_code=( SELECT ibge_code FROM l10n_br_base_city WHERE id=rp.l10n_br_city_id)) ''')
from openupgradelib import openupgrade _model_renames = [ ('l10n_br_base.city', 'res.city'), ] _table_renames = [ ('l10n_br_base_city', 'res_city'), ] @openupgrade.migrate() def migrate(env, version): cr = env.cr openupgrade.rename_models(cr, _model_renames) openupgrade.rename_tables(cr, _table_renames) cr.execute( '''INSERT INTO state_tax_numbers(id, inscr_est, partner_id, state_id) SELECT nextval('state_tax_numbers_id_seq'), inscr_est, partner_id, state_id FROM other_inscricoes_estaduais; ''') cr.execute( '''UPDATE res_partner rp SET city_id=( SELECT id FROM res_city WHERE ibge_code=( SELECT ibge_code FROM l10n_br_base_city WHERE id=rp.l10n_br_city_id)) ''')
Rename table _model_renames and _table_renames
[ADD] Rename table _model_renames and _table_renames Signed-off-by: Luis Felipe Mileo <[email protected]>
Python
agpl-3.0
akretion/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil,akretion/l10n-brazil
from openupgradelib import openupgrade + + _model_renames = [ + ('l10n_br_base.city', 'res.city'), + ] + + _table_renames = [ + ('l10n_br_base_city', 'res_city'), + ] + @openupgrade.migrate() def migrate(env, version): cr = env.cr + openupgrade.rename_models(cr, _model_renames) + openupgrade.rename_tables(cr, _table_renames) - cr.execute( - '''INSERT INTO res_city(id, name, country_id, state_id, ibge_code) - SELECT nextval('res_city_id_seq'), name, (SELECT id FROM res_country - WHERE code='BR'), state_id, ibge_code FROM l10n_br_base_city - WHERE ibge_code NOT IN (SELECT ibge_code FROM res_city); - ''') cr.execute( '''INSERT INTO state_tax_numbers(id, inscr_est, partner_id, state_id) - SELECT nextval('state_tax_numbers_id_seq'), inscr_est, partner_id, + SELECT nextval('state_tax_numbers_id_seq'), inscr_est, partner_id, state_id FROM other_inscricoes_estaduais; ''') cr.execute( '''UPDATE res_partner rp SET city_id=( SELECT id FROM res_city WHERE ibge_code=( SELECT ibge_code FROM l10n_br_base_city WHERE id=rp.l10n_br_city_id)) ''')
Rename table _model_renames and _table_renames
## Code Before: from openupgradelib import openupgrade @openupgrade.migrate() def migrate(env, version): cr = env.cr cr.execute( '''INSERT INTO res_city(id, name, country_id, state_id, ibge_code) SELECT nextval('res_city_id_seq'), name, (SELECT id FROM res_country WHERE code='BR'), state_id, ibge_code FROM l10n_br_base_city WHERE ibge_code NOT IN (SELECT ibge_code FROM res_city); ''') cr.execute( '''INSERT INTO state_tax_numbers(id, inscr_est, partner_id, state_id) SELECT nextval('state_tax_numbers_id_seq'), inscr_est, partner_id, state_id FROM other_inscricoes_estaduais; ''') cr.execute( '''UPDATE res_partner rp SET city_id=( SELECT id FROM res_city WHERE ibge_code=( SELECT ibge_code FROM l10n_br_base_city WHERE id=rp.l10n_br_city_id)) ''') ## Instruction: Rename table _model_renames and _table_renames ## Code After: from openupgradelib import openupgrade _model_renames = [ ('l10n_br_base.city', 'res.city'), ] _table_renames = [ ('l10n_br_base_city', 'res_city'), ] @openupgrade.migrate() def migrate(env, version): cr = env.cr openupgrade.rename_models(cr, _model_renames) openupgrade.rename_tables(cr, _table_renames) cr.execute( '''INSERT INTO state_tax_numbers(id, inscr_est, partner_id, state_id) SELECT nextval('state_tax_numbers_id_seq'), inscr_est, partner_id, state_id FROM other_inscricoes_estaduais; ''') cr.execute( '''UPDATE res_partner rp SET city_id=( SELECT id FROM res_city WHERE ibge_code=( SELECT ibge_code FROM l10n_br_base_city WHERE id=rp.l10n_br_city_id)) ''')
// ... existing code ... from openupgradelib import openupgrade _model_renames = [ ('l10n_br_base.city', 'res.city'), ] _table_renames = [ ('l10n_br_base_city', 'res_city'), ] // ... modified code ... cr = env.cr openupgrade.rename_models(cr, _model_renames) openupgrade.rename_tables(cr, _table_renames) cr.execute( ... '''INSERT INTO state_tax_numbers(id, inscr_est, partner_id, state_id) SELECT nextval('state_tax_numbers_id_seq'), inscr_est, partner_id, state_id FROM other_inscricoes_estaduais; // ... rest of the code ...
bec268ef554e6f30c2cecd52ecddcafc34c5b0db
tutorials/cmake_python_wrapper/v1/python/foo/__init__.py
tutorials/cmake_python_wrapper/v1/python/foo/__init__.py
import ctypes import numpy as np import os __all__ = ['square'] lib = ctypes.cdll.LoadLibrary("libfoo.so") lib.square.restype = ctypes.c_int lib.square.argtypes = [ctypes.c_int] def square(value): """ Parameters ---------- value: int Returns -------- value square """ return lib.square(value)
import ctypes import numpy as np import os import sys __all__ = ['square'] _path = os.path.dirname(__file__) libname = None if sys.platform.startswith('linux'): libname = 'libfoo.so' elif sys.platform == 'darwin': libname = 'libfoo.dylib' elif sys.platform.startswith('win'): libname = 'foo.dll' if libname ==None: print("Unknow platform", sys.platform) else: lib = ctypes.CDLL(libname) lib.square.restype = ctypes.c_int lib.square.argtypes = [ctypes.c_int] def square(value): """ Parameters ---------- value: int Returns -------- value square """ return lib.square(value)
Change to cmake to 3.4 and test sys.platform to choose lib extension to resolve import error on MacOSX
Change to cmake to 3.4 and test sys.platform to choose lib extension to resolve import error on MacOSX
Python
bsd-3-clause
gammapy/PyGamma15,gammapy/2015-MPIK-Workshop,gammapy/2015-MPIK-Workshop,gammapy/PyGamma15,gammapy/PyGamma15,gammapy/2015-MPIK-Workshop
import ctypes import numpy as np import os + import sys __all__ = ['square'] - lib = ctypes.cdll.LoadLibrary("libfoo.so") + _path = os.path.dirname(__file__) + + libname = None + if sys.platform.startswith('linux'): + libname = 'libfoo.so' + elif sys.platform == 'darwin': + libname = 'libfoo.dylib' + elif sys.platform.startswith('win'): + libname = 'foo.dll' + if libname ==None: + print("Unknow platform", sys.platform) + + else: + lib = ctypes.CDLL(libname) + - lib.square.restype = ctypes.c_int + lib.square.restype = ctypes.c_int - lib.square.argtypes = [ctypes.c_int] + lib.square.argtypes = [ctypes.c_int] - def square(value): + def square(value): - """ + """ - Parameters + Parameters - ---------- + ---------- - value: int + value: int - Returns - -------- + Returns + -------- - value square + value square - """ + """ - return lib.square(value) + return lib.square(value) -
Change to cmake to 3.4 and test sys.platform to choose lib extension to resolve import error on MacOSX
## Code Before: import ctypes import numpy as np import os __all__ = ['square'] lib = ctypes.cdll.LoadLibrary("libfoo.so") lib.square.restype = ctypes.c_int lib.square.argtypes = [ctypes.c_int] def square(value): """ Parameters ---------- value: int Returns -------- value square """ return lib.square(value) ## Instruction: Change to cmake to 3.4 and test sys.platform to choose lib extension to resolve import error on MacOSX ## Code After: import ctypes import numpy as np import os import sys __all__ = ['square'] _path = os.path.dirname(__file__) libname = None if sys.platform.startswith('linux'): libname = 'libfoo.so' elif sys.platform == 'darwin': libname = 'libfoo.dylib' elif sys.platform.startswith('win'): libname = 'foo.dll' if libname ==None: print("Unknow platform", sys.platform) else: lib = ctypes.CDLL(libname) lib.square.restype = ctypes.c_int lib.square.argtypes = [ctypes.c_int] def square(value): """ Parameters ---------- value: int Returns -------- value square """ return lib.square(value)
... import os import sys ... _path = os.path.dirname(__file__) libname = None if sys.platform.startswith('linux'): libname = 'libfoo.so' elif sys.platform == 'darwin': libname = 'libfoo.dylib' elif sys.platform.startswith('win'): libname = 'foo.dll' if libname ==None: print("Unknow platform", sys.platform) else: lib = ctypes.CDLL(libname) lib.square.restype = ctypes.c_int lib.square.argtypes = [ctypes.c_int] ... def square(value): """ Parameters ---------- value: int Returns -------- value square """ return lib.square(value) ...
b6d08abf7bc4aafaeec59944bdcdf8ae4a9352d5
recipe_scrapers/consts.py
recipe_scrapers/consts.py
import re TIME_REGEX = re.compile( r'\A(\s*(?P<hours>\d+)\s{1}(hours|hrs|hr|h))?((?P<minutes>\s*\d+)\s{1}(minutes|mins|min|m))?\Z' ) HTML_SYMBOLS = '\xa0' # &nbsp;
import re TIME_REGEX = re.compile( r'\A(\s*(?P<hours>\d+)\s*(hours|hrs|hr|h))?(\s*(?P<minutes>\d+)\s*(minutes|mins|min|m))?\Z' ) HTML_SYMBOLS = '\xa0' # &nbsp;
Update time_regex captcher so to work with more sites
Update time_regex captcher so to work with more sites
Python
mit
hhursev/recipe-scraper
import re TIME_REGEX = re.compile( - r'\A(\s*(?P<hours>\d+)\s{1}(hours|hrs|hr|h))?((?P<minutes>\s*\d+)\s{1}(minutes|mins|min|m))?\Z' + r'\A(\s*(?P<hours>\d+)\s*(hours|hrs|hr|h))?(\s*(?P<minutes>\d+)\s*(minutes|mins|min|m))?\Z' ) HTML_SYMBOLS = '\xa0' # &nbsp;
Update time_regex captcher so to work with more sites
## Code Before: import re TIME_REGEX = re.compile( r'\A(\s*(?P<hours>\d+)\s{1}(hours|hrs|hr|h))?((?P<minutes>\s*\d+)\s{1}(minutes|mins|min|m))?\Z' ) HTML_SYMBOLS = '\xa0' # &nbsp; ## Instruction: Update time_regex captcher so to work with more sites ## Code After: import re TIME_REGEX = re.compile( r'\A(\s*(?P<hours>\d+)\s*(hours|hrs|hr|h))?(\s*(?P<minutes>\d+)\s*(minutes|mins|min|m))?\Z' ) HTML_SYMBOLS = '\xa0' # &nbsp;
# ... existing code ... TIME_REGEX = re.compile( r'\A(\s*(?P<hours>\d+)\s*(hours|hrs|hr|h))?(\s*(?P<minutes>\d+)\s*(minutes|mins|min|m))?\Z' ) # ... rest of the code ...
46e9db6167a9c4f7f778381da888537c00d35bfd
emailsupport/admin.py
emailsupport/admin.py
from __future__ import unicode_literals from django.contrib import admin from models import Email, Resolution class ResolutionInline(admin.StackedInline): model = Resolution max_num = 1 class EmailAdmin(admin.ModelAdmin): list_display = ('subject', 'submitter', 'get_state_display') inlines = [ResolutionInline] ordering = ('-state', '-created') change_form_template = 'admin/email_change_form.html' readonly_fields = ('submitter', 'subject', 'body', 'body_html') fieldsets = ( ('Question', { 'fields': ('submitter', 'subject', 'body', 'body_html', 'state') }), ) class Media: css = { "all": ("admin/css/admin.css",) } def render_change_form(self, *args, **kwargs): response = super(EmailAdmin, self).render_change_form(*args, **kwargs) email = response.context_data['original'] response.context_data['previous_email'] = self.get_previous_email(email) response.context_data['next_email'] = self.get_next_email(email) return response def get_previous_email(self, email): return Email.objects.get_previous_email(email) def get_next_email(self, email): return Email.objects.get_next_email(email) admin.site.register(Email, EmailAdmin)
from __future__ import unicode_literals from django.contrib import admin from models import Email, Resolution class ResolutionInline(admin.StackedInline): model = Resolution max_num = 1 class EmailAdmin(admin.ModelAdmin): list_display = ('subject', 'submitter', 'get_state_display') inlines = [ResolutionInline] ordering = ('-state', '-created') change_form_template = 'admin/email_change_form.html' readonly_fields = ('submitter', 'subject', 'body', 'body_html') fieldsets = ( ('Question', { 'fields': ('submitter', 'subject', 'body', 'body_html', 'state') }), ) class Media: css = { "all": ("admin/css/admin.css",) } def render_change_form(self, *args, **kwargs): response = super(EmailAdmin, self).render_change_form(*args, **kwargs) email = response.context_data['original'] if email: response.context_data['previous_email'] = self.get_previous_email(email) response.context_data['next_email'] = self.get_next_email(email) return response def get_previous_email(self, email): return Email.objects.get_previous_email(email) def get_next_email(self, email): return Email.objects.get_next_email(email) admin.site.register(Email, EmailAdmin)
Add prev. and next email to context only if exist original (current)
Add prev. and next email to context only if exist original (current)
Python
mit
rosti-cz/django-emailsupport
from __future__ import unicode_literals from django.contrib import admin from models import Email, Resolution class ResolutionInline(admin.StackedInline): model = Resolution max_num = 1 class EmailAdmin(admin.ModelAdmin): list_display = ('subject', 'submitter', 'get_state_display') inlines = [ResolutionInline] ordering = ('-state', '-created') change_form_template = 'admin/email_change_form.html' readonly_fields = ('submitter', 'subject', 'body', 'body_html') fieldsets = ( ('Question', { 'fields': ('submitter', 'subject', 'body', 'body_html', 'state') }), ) class Media: css = { "all": ("admin/css/admin.css",) } def render_change_form(self, *args, **kwargs): response = super(EmailAdmin, self).render_change_form(*args, **kwargs) email = response.context_data['original'] + if email: - response.context_data['previous_email'] = self.get_previous_email(email) + response.context_data['previous_email'] = self.get_previous_email(email) - response.context_data['next_email'] = self.get_next_email(email) + response.context_data['next_email'] = self.get_next_email(email) return response def get_previous_email(self, email): return Email.objects.get_previous_email(email) def get_next_email(self, email): return Email.objects.get_next_email(email) admin.site.register(Email, EmailAdmin)
Add prev. and next email to context only if exist original (current)
## Code Before: from __future__ import unicode_literals from django.contrib import admin from models import Email, Resolution class ResolutionInline(admin.StackedInline): model = Resolution max_num = 1 class EmailAdmin(admin.ModelAdmin): list_display = ('subject', 'submitter', 'get_state_display') inlines = [ResolutionInline] ordering = ('-state', '-created') change_form_template = 'admin/email_change_form.html' readonly_fields = ('submitter', 'subject', 'body', 'body_html') fieldsets = ( ('Question', { 'fields': ('submitter', 'subject', 'body', 'body_html', 'state') }), ) class Media: css = { "all": ("admin/css/admin.css",) } def render_change_form(self, *args, **kwargs): response = super(EmailAdmin, self).render_change_form(*args, **kwargs) email = response.context_data['original'] response.context_data['previous_email'] = self.get_previous_email(email) response.context_data['next_email'] = self.get_next_email(email) return response def get_previous_email(self, email): return Email.objects.get_previous_email(email) def get_next_email(self, email): return Email.objects.get_next_email(email) admin.site.register(Email, EmailAdmin) ## Instruction: Add prev. and next email to context only if exist original (current) ## Code After: from __future__ import unicode_literals from django.contrib import admin from models import Email, Resolution class ResolutionInline(admin.StackedInline): model = Resolution max_num = 1 class EmailAdmin(admin.ModelAdmin): list_display = ('subject', 'submitter', 'get_state_display') inlines = [ResolutionInline] ordering = ('-state', '-created') change_form_template = 'admin/email_change_form.html' readonly_fields = ('submitter', 'subject', 'body', 'body_html') fieldsets = ( ('Question', { 'fields': ('submitter', 'subject', 'body', 'body_html', 'state') }), ) class Media: css = { "all": ("admin/css/admin.css",) } def render_change_form(self, *args, **kwargs): response = super(EmailAdmin, self).render_change_form(*args, **kwargs) email = response.context_data['original'] if email: response.context_data['previous_email'] = self.get_previous_email(email) response.context_data['next_email'] = self.get_next_email(email) return response def get_previous_email(self, email): return Email.objects.get_previous_email(email) def get_next_email(self, email): return Email.objects.get_next_email(email) admin.site.register(Email, EmailAdmin)
// ... existing code ... email = response.context_data['original'] if email: response.context_data['previous_email'] = self.get_previous_email(email) response.context_data['next_email'] = self.get_next_email(email) return response // ... rest of the code ...
c897942c8b1c3d9283ea6453bcc6616ca3d5108e
builds/python3.6_ci/src/lint_turtle_files.py
builds/python3.6_ci/src/lint_turtle_files.py
print("Hello, I am turtle linter")
import logging import os import daiquiri import rdflib daiquiri.setup(level=logging.INFO) logger = daiquiri.getLogger(__name__) # This is a slightly cheaty way of tracking which paths (if any) failed -- # we append to this global list, and inspect it at the end of the script! failures = [] def parse_turtle(path): """ Try to parse the Turtle at a given path. Raises a ValueError if it fails! """ logger.info("Parsing Turtle at path %s", path) graph = rdflib.Graph() try: graph.parse(path, format='ttl') except Exception as exc: # Get the name of the exception class # e.g. rdflib.plugins.parsers.notation3.BadSyntax exc_name = f'{exc.__class__.__module__}.{exc.__class__.__name__}' # Then try to log something useful logger.error("Error parsing Turtle (%s)", exc_name) logger.error(exc) failures.append(path) else: logger.info("Successfully parsed Turtle!") if __name__ == '__main__': for root, _, filenames in os.walk('.'): for f in filenames: if not f.endswith('.ttl'): continue path = os.path.join(root, f) if 'WIP' in path: logger.info("Skipping path %s as WIP", path) continue parse_turtle(path)
Write a proper Turtle linter
Write a proper Turtle linter
Python
mit
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
- print("Hello, I am turtle linter") + import logging + import os + import daiquiri + import rdflib + + daiquiri.setup(level=logging.INFO) + + logger = daiquiri.getLogger(__name__) + + # This is a slightly cheaty way of tracking which paths (if any) failed -- + # we append to this global list, and inspect it at the end of the script! + failures = [] + + + def parse_turtle(path): + """ + Try to parse the Turtle at a given path. Raises a ValueError if it fails! + """ + logger.info("Parsing Turtle at path %s", path) + graph = rdflib.Graph() + try: + graph.parse(path, format='ttl') + except Exception as exc: + # Get the name of the exception class + # e.g. rdflib.plugins.parsers.notation3.BadSyntax + exc_name = f'{exc.__class__.__module__}.{exc.__class__.__name__}' + + # Then try to log something useful + logger.error("Error parsing Turtle (%s)", exc_name) + logger.error(exc) + + failures.append(path) + else: + logger.info("Successfully parsed Turtle!") + + + if __name__ == '__main__': + for root, _, filenames in os.walk('.'): + for f in filenames: + if not f.endswith('.ttl'): + continue + path = os.path.join(root, f) + + if 'WIP' in path: + logger.info("Skipping path %s as WIP", path) + continue + + parse_turtle(path) +
Write a proper Turtle linter
## Code Before: print("Hello, I am turtle linter") ## Instruction: Write a proper Turtle linter ## Code After: import logging import os import daiquiri import rdflib daiquiri.setup(level=logging.INFO) logger = daiquiri.getLogger(__name__) # This is a slightly cheaty way of tracking which paths (if any) failed -- # we append to this global list, and inspect it at the end of the script! failures = [] def parse_turtle(path): """ Try to parse the Turtle at a given path. Raises a ValueError if it fails! """ logger.info("Parsing Turtle at path %s", path) graph = rdflib.Graph() try: graph.parse(path, format='ttl') except Exception as exc: # Get the name of the exception class # e.g. rdflib.plugins.parsers.notation3.BadSyntax exc_name = f'{exc.__class__.__module__}.{exc.__class__.__name__}' # Then try to log something useful logger.error("Error parsing Turtle (%s)", exc_name) logger.error(exc) failures.append(path) else: logger.info("Successfully parsed Turtle!") if __name__ == '__main__': for root, _, filenames in os.walk('.'): for f in filenames: if not f.endswith('.ttl'): continue path = os.path.join(root, f) if 'WIP' in path: logger.info("Skipping path %s as WIP", path) continue parse_turtle(path)
// ... existing code ... import logging import os import daiquiri import rdflib daiquiri.setup(level=logging.INFO) logger = daiquiri.getLogger(__name__) # This is a slightly cheaty way of tracking which paths (if any) failed -- # we append to this global list, and inspect it at the end of the script! failures = [] def parse_turtle(path): """ Try to parse the Turtle at a given path. Raises a ValueError if it fails! """ logger.info("Parsing Turtle at path %s", path) graph = rdflib.Graph() try: graph.parse(path, format='ttl') except Exception as exc: # Get the name of the exception class # e.g. rdflib.plugins.parsers.notation3.BadSyntax exc_name = f'{exc.__class__.__module__}.{exc.__class__.__name__}' # Then try to log something useful logger.error("Error parsing Turtle (%s)", exc_name) logger.error(exc) failures.append(path) else: logger.info("Successfully parsed Turtle!") if __name__ == '__main__': for root, _, filenames in os.walk('.'): for f in filenames: if not f.endswith('.ttl'): continue path = os.path.join(root, f) if 'WIP' in path: logger.info("Skipping path %s as WIP", path) continue parse_turtle(path) // ... rest of the code ...
446923b12942f351f2f40d035f0c1e6f9dcb8813
__init__.py
__init__.py
import os import sys # Add the third_party/ dir to our search path so that we can find the # modules in there automatically. This isn't normal, so don't replicate # this pattern elsewhere. _third_party = os.path.normpath(os.path.join(os.path.dirname(os.path.realpath( __file__)), 'third_party')) sys.path.insert(0, _third_party) # List of third_party packages that might need subpaths added to search. _paths = [ 'pyelftools', ] for _path in _paths: sys.path.insert(1, os.path.join(_third_party, _path))
import os import sys # Add the third_party/ dir to our search path so that we can find the # modules in there automatically. This isn't normal, so don't replicate # this pattern elsewhere. _chromite_dir = os.path.normpath(os.path.dirname(os.path.realpath(__file__))) _containing_dir = os.path.dirname(_chromite_dir) _third_party_dirs = [os.path.join(_chromite_dir, 'third_party')] # If chromite is living inside the Chrome checkout under # <chrome_root>/src/third_party/chromite, its dependencies will be checked out # to <chrome_root>/src/third_party instead of the normal chromite/third_party # location due to git-submodule limitations (a submodule cannot be contained # inside another submodule's workspace), so we want to add that to the # search path. if os.path.basename(_containing_dir) == 'third_party': _third_party_dirs.append(_containing_dir) # List of third_party packages that might need subpaths added to search. _paths = [ 'pyelftools', ] for _path in _paths: for _third_party in _third_party_dirs[:]: _component = os.path.join(_third_party, _path) if os.path.isdir(_component): _third_party_dirs.append(_component) sys.path = _third_party_dirs + sys.path
Add <chrome>/src/third_party dir to PYTHONPATH for Chrome checkouts.
Add <chrome>/src/third_party dir to PYTHONPATH for Chrome checkouts. If chromite is living inside the Chrome checkout under <chrome_root>/src/third_party/chromite, its dependencies will be checked out to <chrome_root>/src/third_party instead of the normal chromite/third_party location due to git-submodule limitations (a submodule cannot be contained inside another submodule's workspace), so we want to add that to the search path. BUG=None TEST=Local Change-Id: I10a12bcddc88e509c1c7015a95c54d578fb8b122 Reviewed-on: https://gerrit.chromium.org/gerrit/43066 Reviewed-by: Mike Frysinger <[email protected]> Commit-Queue: Ryan Cui <[email protected]> Tested-by: Ryan Cui <[email protected]>
Python
bsd-3-clause
coreos/chromite,bpsinc-native/src_third_party_chromite,bpsinc-native/src_third_party_chromite,bpsinc-native/src_third_party_chromite,zhang0137/chromite,chadversary/chromiumos.chromite,coreos/chromite,zhang0137/chromite,coreos/chromite,zhang0137/chromite,chadversary/chromiumos.chromite
import os import sys # Add the third_party/ dir to our search path so that we can find the # modules in there automatically. This isn't normal, so don't replicate # this pattern elsewhere. - _third_party = os.path.normpath(os.path.join(os.path.dirname(os.path.realpath( - __file__)), 'third_party')) - sys.path.insert(0, _third_party) + _chromite_dir = os.path.normpath(os.path.dirname(os.path.realpath(__file__))) + _containing_dir = os.path.dirname(_chromite_dir) + _third_party_dirs = [os.path.join(_chromite_dir, 'third_party')] + # If chromite is living inside the Chrome checkout under + # <chrome_root>/src/third_party/chromite, its dependencies will be checked out + # to <chrome_root>/src/third_party instead of the normal chromite/third_party + # location due to git-submodule limitations (a submodule cannot be contained + # inside another submodule's workspace), so we want to add that to the + # search path. + if os.path.basename(_containing_dir) == 'third_party': + _third_party_dirs.append(_containing_dir) # List of third_party packages that might need subpaths added to search. _paths = [ - 'pyelftools', + 'pyelftools', ] + for _path in _paths: - sys.path.insert(1, os.path.join(_third_party, _path)) + for _third_party in _third_party_dirs[:]: + _component = os.path.join(_third_party, _path) + if os.path.isdir(_component): + _third_party_dirs.append(_component) + sys.path = _third_party_dirs + sys.path -
Add <chrome>/src/third_party dir to PYTHONPATH for Chrome checkouts.
## Code Before: import os import sys # Add the third_party/ dir to our search path so that we can find the # modules in there automatically. This isn't normal, so don't replicate # this pattern elsewhere. _third_party = os.path.normpath(os.path.join(os.path.dirname(os.path.realpath( __file__)), 'third_party')) sys.path.insert(0, _third_party) # List of third_party packages that might need subpaths added to search. _paths = [ 'pyelftools', ] for _path in _paths: sys.path.insert(1, os.path.join(_third_party, _path)) ## Instruction: Add <chrome>/src/third_party dir to PYTHONPATH for Chrome checkouts. ## Code After: import os import sys # Add the third_party/ dir to our search path so that we can find the # modules in there automatically. This isn't normal, so don't replicate # this pattern elsewhere. _chromite_dir = os.path.normpath(os.path.dirname(os.path.realpath(__file__))) _containing_dir = os.path.dirname(_chromite_dir) _third_party_dirs = [os.path.join(_chromite_dir, 'third_party')] # If chromite is living inside the Chrome checkout under # <chrome_root>/src/third_party/chromite, its dependencies will be checked out # to <chrome_root>/src/third_party instead of the normal chromite/third_party # location due to git-submodule limitations (a submodule cannot be contained # inside another submodule's workspace), so we want to add that to the # search path. if os.path.basename(_containing_dir) == 'third_party': _third_party_dirs.append(_containing_dir) # List of third_party packages that might need subpaths added to search. _paths = [ 'pyelftools', ] for _path in _paths: for _third_party in _third_party_dirs[:]: _component = os.path.join(_third_party, _path) if os.path.isdir(_component): _third_party_dirs.append(_component) sys.path = _third_party_dirs + sys.path
// ... existing code ... # this pattern elsewhere. _chromite_dir = os.path.normpath(os.path.dirname(os.path.realpath(__file__))) _containing_dir = os.path.dirname(_chromite_dir) _third_party_dirs = [os.path.join(_chromite_dir, 'third_party')] # If chromite is living inside the Chrome checkout under # <chrome_root>/src/third_party/chromite, its dependencies will be checked out # to <chrome_root>/src/third_party instead of the normal chromite/third_party # location due to git-submodule limitations (a submodule cannot be contained # inside another submodule's workspace), so we want to add that to the # search path. if os.path.basename(_containing_dir) == 'third_party': _third_party_dirs.append(_containing_dir) // ... modified code ... _paths = [ 'pyelftools', ] for _path in _paths: for _third_party in _third_party_dirs[:]: _component = os.path.join(_third_party, _path) if os.path.isdir(_component): _third_party_dirs.append(_component) sys.path = _third_party_dirs + sys.path // ... rest of the code ...
b6b9c6f3f8faaade428d044f93acd25edade075d
tools/pdtools/pdtools/__main__.py
tools/pdtools/pdtools/__main__.py
import os import click from . import chute from . import device from . import routers from . import store PDSERVER_URL = os.environ.get("PDSERVER_URL", "https://paradrop.org") @click.group() @click.pass_context def root(ctx): """ Paradrop command line utility. Environment Variables PDSERVER_URL ParaDrop controller URL [default: https://paradrop.org] """ # Options can be parsed from PDTOOLS_* environment variables. ctx.auto_envvar_prefix = 'PDTOOLS' # Respond to both -h and --help for all commands. ctx.help_option_names = ['-h', '--help'] ctx.obj = { 'pdserver_url': PDSERVER_URL } root.add_command(chute.chute) root.add_command(device.device) root.add_command(routers.routers) root.add_command(store.store) def main(): """ Entry point for the pdtools Python package. """ root() if __name__ == "__main__": main()
import os import click from . import chute from . import device from . import routers from . import store PDSERVER_URL = os.environ.get("PDSERVER_URL", "https://paradrop.org") CONTEXT_SETTINGS = dict( # Options can be parsed from PDTOOLS_* environment variables. auto_envvar_prefix = 'PDTOOLS', # Respond to both -h and --help for all commands. help_option_names = ['-h', '--help'], obj = { 'pdserver_url': PDSERVER_URL } ) @click.group(context_settings=CONTEXT_SETTINGS) def root(ctx): """ Paradrop command line utility. Environment Variables PDSERVER_URL ParaDrop controller URL [default: https://paradrop.org] """ pass root.add_command(chute.chute) root.add_command(device.device) root.add_command(routers.routers) root.add_command(store.store) def main(): """ Entry point for the pdtools Python package. """ root() if __name__ == "__main__": main()
Enable '-h' help option from the pdtools root level.
Enable '-h' help option from the pdtools root level.
Python
apache-2.0
ParadropLabs/Paradrop,ParadropLabs/Paradrop,ParadropLabs/Paradrop
import os import click from . import chute from . import device from . import routers from . import store PDSERVER_URL = os.environ.get("PDSERVER_URL", "https://paradrop.org") + CONTEXT_SETTINGS = dict( + # Options can be parsed from PDTOOLS_* environment variables. + auto_envvar_prefix = 'PDTOOLS', - @click.group() - @click.pass_context + # Respond to both -h and --help for all commands. + help_option_names = ['-h', '--help'], + + obj = { + 'pdserver_url': PDSERVER_URL + } + ) + + + @click.group(context_settings=CONTEXT_SETTINGS) def root(ctx): """ Paradrop command line utility. Environment Variables PDSERVER_URL ParaDrop controller URL [default: https://paradrop.org] """ + pass - # Options can be parsed from PDTOOLS_* environment variables. - ctx.auto_envvar_prefix = 'PDTOOLS' - - # Respond to both -h and --help for all commands. - ctx.help_option_names = ['-h', '--help'] - - ctx.obj = { - 'pdserver_url': PDSERVER_URL - } root.add_command(chute.chute) root.add_command(device.device) root.add_command(routers.routers) root.add_command(store.store) def main(): """ Entry point for the pdtools Python package. """ root() if __name__ == "__main__": main()
Enable '-h' help option from the pdtools root level.
## Code Before: import os import click from . import chute from . import device from . import routers from . import store PDSERVER_URL = os.environ.get("PDSERVER_URL", "https://paradrop.org") @click.group() @click.pass_context def root(ctx): """ Paradrop command line utility. Environment Variables PDSERVER_URL ParaDrop controller URL [default: https://paradrop.org] """ # Options can be parsed from PDTOOLS_* environment variables. ctx.auto_envvar_prefix = 'PDTOOLS' # Respond to both -h and --help for all commands. ctx.help_option_names = ['-h', '--help'] ctx.obj = { 'pdserver_url': PDSERVER_URL } root.add_command(chute.chute) root.add_command(device.device) root.add_command(routers.routers) root.add_command(store.store) def main(): """ Entry point for the pdtools Python package. """ root() if __name__ == "__main__": main() ## Instruction: Enable '-h' help option from the pdtools root level. ## Code After: import os import click from . import chute from . import device from . import routers from . import store PDSERVER_URL = os.environ.get("PDSERVER_URL", "https://paradrop.org") CONTEXT_SETTINGS = dict( # Options can be parsed from PDTOOLS_* environment variables. auto_envvar_prefix = 'PDTOOLS', # Respond to both -h and --help for all commands. help_option_names = ['-h', '--help'], obj = { 'pdserver_url': PDSERVER_URL } ) @click.group(context_settings=CONTEXT_SETTINGS) def root(ctx): """ Paradrop command line utility. Environment Variables PDSERVER_URL ParaDrop controller URL [default: https://paradrop.org] """ pass root.add_command(chute.chute) root.add_command(device.device) root.add_command(routers.routers) root.add_command(store.store) def main(): """ Entry point for the pdtools Python package. """ root() if __name__ == "__main__": main()
// ... existing code ... CONTEXT_SETTINGS = dict( # Options can be parsed from PDTOOLS_* environment variables. auto_envvar_prefix = 'PDTOOLS', # Respond to both -h and --help for all commands. help_option_names = ['-h', '--help'], obj = { 'pdserver_url': PDSERVER_URL } ) @click.group(context_settings=CONTEXT_SETTINGS) def root(ctx): // ... modified code ... """ pass // ... rest of the code ...
7f3d76bdec3731ae50b9487556b1b2750cd3108e
setup.py
setup.py
from distutils.core import setup import versioneer setup( name='iterm2-tools', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='''iTerm2 tools.''', author='Aaron Meurer', author_email='[email protected]', url='https://github.com/asmeurer/iterm2-tools', packages=['iterm2_tools'], package_data={'iterm2_tools.tests': ['aloha_cat.png']}, long_description=""" iterm2-tools Some tools for working with iTerm2's proprietary escape codes. For now, only includes iterm2_tools.images, which has functions for displaying images inline. License: MIT """, license="MIT", classifiers=[ 'Environment :: MacOS X', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 2', ], )
from distutils.core import setup import versioneer setup( name='iterm2-tools', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='''iTerm2 tools.''', author='Aaron Meurer', author_email='[email protected]', url='https://github.com/asmeurer/iterm2-tools', packages=[ 'iterm2_tools', 'iterm2_tools.tests' ], package_data={'iterm2_tools.tests': ['aloha_cat.png']}, long_description=""" iterm2-tools Some tools for working with iTerm2's proprietary escape codes. For now, only includes iterm2_tools.images, which has functions for displaying images inline. License: MIT """, license="MIT", classifiers=[ 'Environment :: MacOS X', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 2', ], )
Include the tests in the dist
Include the tests in the dist
Python
mit
asmeurer/iterm2-tools
from distutils.core import setup import versioneer setup( name='iterm2-tools', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='''iTerm2 tools.''', author='Aaron Meurer', author_email='[email protected]', url='https://github.com/asmeurer/iterm2-tools', - packages=['iterm2_tools'], + packages=[ + 'iterm2_tools', + 'iterm2_tools.tests' + ], package_data={'iterm2_tools.tests': ['aloha_cat.png']}, long_description=""" iterm2-tools Some tools for working with iTerm2's proprietary escape codes. For now, only includes iterm2_tools.images, which has functions for displaying images inline. License: MIT """, license="MIT", classifiers=[ 'Environment :: MacOS X', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 2', ], )
Include the tests in the dist
## Code Before: from distutils.core import setup import versioneer setup( name='iterm2-tools', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='''iTerm2 tools.''', author='Aaron Meurer', author_email='[email protected]', url='https://github.com/asmeurer/iterm2-tools', packages=['iterm2_tools'], package_data={'iterm2_tools.tests': ['aloha_cat.png']}, long_description=""" iterm2-tools Some tools for working with iTerm2's proprietary escape codes. For now, only includes iterm2_tools.images, which has functions for displaying images inline. License: MIT """, license="MIT", classifiers=[ 'Environment :: MacOS X', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 2', ], ) ## Instruction: Include the tests in the dist ## Code After: from distutils.core import setup import versioneer setup( name='iterm2-tools', version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='''iTerm2 tools.''', author='Aaron Meurer', author_email='[email protected]', url='https://github.com/asmeurer/iterm2-tools', packages=[ 'iterm2_tools', 'iterm2_tools.tests' ], package_data={'iterm2_tools.tests': ['aloha_cat.png']}, long_description=""" iterm2-tools Some tools for working with iTerm2's proprietary escape codes. For now, only includes iterm2_tools.images, which has functions for displaying images inline. License: MIT """, license="MIT", classifiers=[ 'Environment :: MacOS X', 'Operating System :: MacOS :: MacOS X', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 2', ], )
# ... existing code ... url='https://github.com/asmeurer/iterm2-tools', packages=[ 'iterm2_tools', 'iterm2_tools.tests' ], package_data={'iterm2_tools.tests': ['aloha_cat.png']}, # ... rest of the code ...
201d8d532b907d97823c2dbf61fdd6e75b8eb615
form_designer/contrib/cms_plugins/form_designer_form/cms_plugins.py
form_designer/contrib/cms_plugins/form_designer_form/cms_plugins.py
from form_designer.contrib.cms_plugins.form_designer_form.models import CMSFormDefinition from form_designer.views import process_form from form_designer import settings from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from django.utils.translation import ugettext as _ class FormDesignerPlugin(CMSPluginBase): model = CMSFormDefinition module = _('Form Designer') name = _('Form') admin_preview = False render_template = False def render(self, context, instance, placeholder): if instance.form_definition.form_template_name: self.render_template = instance.form_definition.form_template_name else: self.render_template = settings.DEFAULT_FORM_TEMPLATE # Redirection does not work with CMS plugin, hence disable: return process_form(context['request'], instance.form_definition, context, disable_redirection=True, push_messages=False) plugin_pool.register_plugin(FormDesignerPlugin)
from form_designer.contrib.cms_plugins.form_designer_form.models import CMSFormDefinition from form_designer.views import process_form from form_designer import settings from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from django.utils.translation import ugettext as _ class FormDesignerPlugin(CMSPluginBase): model = CMSFormDefinition module = _('Form Designer') name = _('Form') admin_preview = False render_template = False cache = False # New in version 3.0. see http://django-cms.readthedocs.org/en/latest/advanced/caching.html def render(self, context, instance, placeholder): if instance.form_definition.form_template_name: self.render_template = instance.form_definition.form_template_name else: self.render_template = settings.DEFAULT_FORM_TEMPLATE # Redirection does not work with CMS plugin, hence disable: return process_form(context['request'], instance.form_definition, context, disable_redirection=True, push_messages=False) plugin_pool.register_plugin(FormDesignerPlugin)
Disable caching for CMS plugin.
Disable caching for CMS plugin. CSRF tokens may get cached otherwise. This is for compatibility with Django CMS 3.0+.
Python
bsd-3-clause
andersinno/django-form-designer-ai,andersinno/django-form-designer,kcsry/django-form-designer,andersinno/django-form-designer,kcsry/django-form-designer,andersinno/django-form-designer-ai
from form_designer.contrib.cms_plugins.form_designer_form.models import CMSFormDefinition from form_designer.views import process_form from form_designer import settings from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from django.utils.translation import ugettext as _ class FormDesignerPlugin(CMSPluginBase): model = CMSFormDefinition module = _('Form Designer') name = _('Form') admin_preview = False render_template = False + cache = False # New in version 3.0. see http://django-cms.readthedocs.org/en/latest/advanced/caching.html def render(self, context, instance, placeholder): if instance.form_definition.form_template_name: self.render_template = instance.form_definition.form_template_name else: self.render_template = settings.DEFAULT_FORM_TEMPLATE # Redirection does not work with CMS plugin, hence disable: return process_form(context['request'], instance.form_definition, context, disable_redirection=True, push_messages=False) plugin_pool.register_plugin(FormDesignerPlugin)
Disable caching for CMS plugin.
## Code Before: from form_designer.contrib.cms_plugins.form_designer_form.models import CMSFormDefinition from form_designer.views import process_form from form_designer import settings from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from django.utils.translation import ugettext as _ class FormDesignerPlugin(CMSPluginBase): model = CMSFormDefinition module = _('Form Designer') name = _('Form') admin_preview = False render_template = False def render(self, context, instance, placeholder): if instance.form_definition.form_template_name: self.render_template = instance.form_definition.form_template_name else: self.render_template = settings.DEFAULT_FORM_TEMPLATE # Redirection does not work with CMS plugin, hence disable: return process_form(context['request'], instance.form_definition, context, disable_redirection=True, push_messages=False) plugin_pool.register_plugin(FormDesignerPlugin) ## Instruction: Disable caching for CMS plugin. ## Code After: from form_designer.contrib.cms_plugins.form_designer_form.models import CMSFormDefinition from form_designer.views import process_form from form_designer import settings from cms.plugin_base import CMSPluginBase from cms.plugin_pool import plugin_pool from django.utils.translation import ugettext as _ class FormDesignerPlugin(CMSPluginBase): model = CMSFormDefinition module = _('Form Designer') name = _('Form') admin_preview = False render_template = False cache = False # New in version 3.0. see http://django-cms.readthedocs.org/en/latest/advanced/caching.html def render(self, context, instance, placeholder): if instance.form_definition.form_template_name: self.render_template = instance.form_definition.form_template_name else: self.render_template = settings.DEFAULT_FORM_TEMPLATE # Redirection does not work with CMS plugin, hence disable: return process_form(context['request'], instance.form_definition, context, disable_redirection=True, push_messages=False) plugin_pool.register_plugin(FormDesignerPlugin)
// ... existing code ... render_template = False cache = False # New in version 3.0. see http://django-cms.readthedocs.org/en/latest/advanced/caching.html // ... rest of the code ...
b730fc84e07994d0a12357b70a1e912d0a032832
Lib/vanilla/test/testTools.py
Lib/vanilla/test/testTools.py
from AppKit import * from PyObjCTools import AppHelper class _VanillaMiniAppDelegate(NSObject): def applicationShouldTerminateAfterLastWindowClosed_(self, notification): return True def executeVanillaTest(cls, **kwargs): """ Execute a Vanilla UI class in a mini application. """ app = NSApplication.sharedApplication() delegate = _VanillaMiniAppDelegate.alloc().init() app.setDelegate_(delegate) cls(**kwargs) app.activateIgnoringOtherApps_(True) AppHelper.runEventLoop()
from AppKit import * from PyObjCTools import AppHelper class _VanillaMiniAppDelegate(NSObject): def applicationShouldTerminateAfterLastWindowClosed_(self, notification): return True def executeVanillaTest(cls, **kwargs): """ Execute a Vanilla UI class in a mini application. """ app = NSApplication.sharedApplication() delegate = _VanillaMiniAppDelegate.alloc().init() app.setDelegate_(delegate) mainMenu = NSMenu.alloc().initWithTitle_("Vanilla Test") fileMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("File", None, "") fileMenu = NSMenu.alloc().initWithTitle_("File") fileMenuItem.setSubmenu_(fileMenu) mainMenu.addItem_(fileMenuItem) editMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Edit", None, "") editMenu = NSMenu.alloc().initWithTitle_("Edit") editMenuItem.setSubmenu_(editMenu) mainMenu.addItem_(editMenuItem) helpMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Help", None, "") helpMenu = NSMenu.alloc().initWithTitle_("Help") helpMenuItem.setSubmenu_(helpMenu) mainMenu.addItem_(helpMenuItem) app.setMainMenu_(mainMenu) cls(**kwargs) app.activateIgnoringOtherApps_(True) AppHelper.runEventLoop()
Add a menu to the test runner.
Add a menu to the test runner.
Python
mit
typemytype/vanilla,typesupply/vanilla,moyogo/vanilla
from AppKit import * from PyObjCTools import AppHelper class _VanillaMiniAppDelegate(NSObject): def applicationShouldTerminateAfterLastWindowClosed_(self, notification): return True def executeVanillaTest(cls, **kwargs): """ Execute a Vanilla UI class in a mini application. """ app = NSApplication.sharedApplication() delegate = _VanillaMiniAppDelegate.alloc().init() app.setDelegate_(delegate) + + mainMenu = NSMenu.alloc().initWithTitle_("Vanilla Test") + + fileMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("File", None, "") + fileMenu = NSMenu.alloc().initWithTitle_("File") + fileMenuItem.setSubmenu_(fileMenu) + mainMenu.addItem_(fileMenuItem) + + editMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Edit", None, "") + editMenu = NSMenu.alloc().initWithTitle_("Edit") + editMenuItem.setSubmenu_(editMenu) + mainMenu.addItem_(editMenuItem) + + helpMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Help", None, "") + helpMenu = NSMenu.alloc().initWithTitle_("Help") + helpMenuItem.setSubmenu_(helpMenu) + mainMenu.addItem_(helpMenuItem) + + app.setMainMenu_(mainMenu) + cls(**kwargs) app.activateIgnoringOtherApps_(True) AppHelper.runEventLoop() -
Add a menu to the test runner.
## Code Before: from AppKit import * from PyObjCTools import AppHelper class _VanillaMiniAppDelegate(NSObject): def applicationShouldTerminateAfterLastWindowClosed_(self, notification): return True def executeVanillaTest(cls, **kwargs): """ Execute a Vanilla UI class in a mini application. """ app = NSApplication.sharedApplication() delegate = _VanillaMiniAppDelegate.alloc().init() app.setDelegate_(delegate) cls(**kwargs) app.activateIgnoringOtherApps_(True) AppHelper.runEventLoop() ## Instruction: Add a menu to the test runner. ## Code After: from AppKit import * from PyObjCTools import AppHelper class _VanillaMiniAppDelegate(NSObject): def applicationShouldTerminateAfterLastWindowClosed_(self, notification): return True def executeVanillaTest(cls, **kwargs): """ Execute a Vanilla UI class in a mini application. """ app = NSApplication.sharedApplication() delegate = _VanillaMiniAppDelegate.alloc().init() app.setDelegate_(delegate) mainMenu = NSMenu.alloc().initWithTitle_("Vanilla Test") fileMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("File", None, "") fileMenu = NSMenu.alloc().initWithTitle_("File") fileMenuItem.setSubmenu_(fileMenu) mainMenu.addItem_(fileMenuItem) editMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Edit", None, "") editMenu = NSMenu.alloc().initWithTitle_("Edit") editMenuItem.setSubmenu_(editMenu) mainMenu.addItem_(editMenuItem) helpMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Help", None, "") helpMenu = NSMenu.alloc().initWithTitle_("Help") helpMenuItem.setSubmenu_(helpMenu) mainMenu.addItem_(helpMenuItem) app.setMainMenu_(mainMenu) cls(**kwargs) app.activateIgnoringOtherApps_(True) AppHelper.runEventLoop()
// ... existing code ... app.setDelegate_(delegate) mainMenu = NSMenu.alloc().initWithTitle_("Vanilla Test") fileMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("File", None, "") fileMenu = NSMenu.alloc().initWithTitle_("File") fileMenuItem.setSubmenu_(fileMenu) mainMenu.addItem_(fileMenuItem) editMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Edit", None, "") editMenu = NSMenu.alloc().initWithTitle_("Edit") editMenuItem.setSubmenu_(editMenu) mainMenu.addItem_(editMenuItem) helpMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Help", None, "") helpMenu = NSMenu.alloc().initWithTitle_("Help") helpMenuItem.setSubmenu_(helpMenu) mainMenu.addItem_(helpMenuItem) app.setMainMenu_(mainMenu) cls(**kwargs) // ... modified code ... AppHelper.runEventLoop() // ... rest of the code ...
c9abba3a9ca6ccf1a9bed5fad5cda12557b3266c
tests/chainer_tests/training_tests/extensions_tests/test_plot_report.py
tests/chainer_tests/training_tests/extensions_tests/test_plot_report.py
import unittest import warnings from chainer import testing from chainer.training import extensions class TestPlotReport(unittest.TestCase): def test_available(self): try: import matplotlib # NOQA available = True except ImportError: available = False with warnings.catch_warnings(record=True) as w: self.assertEqual(extensions.PlotReport.available(), available) # It shows warning only when matplotlib.pyplot is not available if available: self.assertEqual(len(w), 0) else: self.assertEqual(len(w), 1) @unittest.skipUnless( extensions.PlotReport.available(), 'matplotlib is not installed') def test_lazy_import(self): # To support python2, we do not use self.assertWarns() with warnings.catch_warnings(record=True) as w: import matplotlib matplotlib.use('Agg') self.assertEqual(len(w), 0) testing.run_module(__name__, __file__)
import unittest import warnings from chainer import testing from chainer.training import extensions class TestPlotReport(unittest.TestCase): def test_available(self): try: import matplotlib # NOQA available = True except ImportError: available = False with warnings.catch_warnings(record=True) as w: self.assertEqual(extensions.PlotReport.available(), available) # It shows warning only when matplotlib is not available if available: self.assertEqual(len(w), 0) else: self.assertEqual(len(w), 1) # In Python 2 the above test does not raise UserWarning, # so we use plot_report._available instead of PlotReport.available() @unittest.skipUnless( extensions.plot_report._available, 'matplotlib is not installed') def test_lazy_import(self): # To support python2, we do not use self.assertWarns() with warnings.catch_warnings(record=True) as w: import matplotlib matplotlib.use('Agg') self.assertEqual(len(w), 0) testing.run_module(__name__, __file__)
Use plot_report._available instead of available()
Use plot_report._available instead of available()
Python
mit
hvy/chainer,jnishi/chainer,niboshi/chainer,keisuke-umezawa/chainer,wkentaro/chainer,hvy/chainer,chainer/chainer,jnishi/chainer,niboshi/chainer,okuta/chainer,keisuke-umezawa/chainer,wkentaro/chainer,hvy/chainer,okuta/chainer,okuta/chainer,chainer/chainer,hvy/chainer,ktnyt/chainer,ktnyt/chainer,wkentaro/chainer,jnishi/chainer,keisuke-umezawa/chainer,tkerola/chainer,wkentaro/chainer,ktnyt/chainer,ronekko/chainer,keisuke-umezawa/chainer,ktnyt/chainer,aonotas/chainer,rezoo/chainer,kashif/chainer,anaruse/chainer,jnishi/chainer,chainer/chainer,okuta/chainer,niboshi/chainer,niboshi/chainer,pfnet/chainer,chainer/chainer
import unittest import warnings from chainer import testing from chainer.training import extensions class TestPlotReport(unittest.TestCase): def test_available(self): try: import matplotlib # NOQA available = True except ImportError: available = False with warnings.catch_warnings(record=True) as w: self.assertEqual(extensions.PlotReport.available(), available) - # It shows warning only when matplotlib.pyplot is not available + # It shows warning only when matplotlib is not available if available: self.assertEqual(len(w), 0) else: self.assertEqual(len(w), 1) + # In Python 2 the above test does not raise UserWarning, + # so we use plot_report._available instead of PlotReport.available() @unittest.skipUnless( - extensions.PlotReport.available(), 'matplotlib is not installed') + extensions.plot_report._available, 'matplotlib is not installed') def test_lazy_import(self): # To support python2, we do not use self.assertWarns() with warnings.catch_warnings(record=True) as w: import matplotlib matplotlib.use('Agg') self.assertEqual(len(w), 0) testing.run_module(__name__, __file__)
Use plot_report._available instead of available()
## Code Before: import unittest import warnings from chainer import testing from chainer.training import extensions class TestPlotReport(unittest.TestCase): def test_available(self): try: import matplotlib # NOQA available = True except ImportError: available = False with warnings.catch_warnings(record=True) as w: self.assertEqual(extensions.PlotReport.available(), available) # It shows warning only when matplotlib.pyplot is not available if available: self.assertEqual(len(w), 0) else: self.assertEqual(len(w), 1) @unittest.skipUnless( extensions.PlotReport.available(), 'matplotlib is not installed') def test_lazy_import(self): # To support python2, we do not use self.assertWarns() with warnings.catch_warnings(record=True) as w: import matplotlib matplotlib.use('Agg') self.assertEqual(len(w), 0) testing.run_module(__name__, __file__) ## Instruction: Use plot_report._available instead of available() ## Code After: import unittest import warnings from chainer import testing from chainer.training import extensions class TestPlotReport(unittest.TestCase): def test_available(self): try: import matplotlib # NOQA available = True except ImportError: available = False with warnings.catch_warnings(record=True) as w: self.assertEqual(extensions.PlotReport.available(), available) # It shows warning only when matplotlib is not available if available: self.assertEqual(len(w), 0) else: self.assertEqual(len(w), 1) # In Python 2 the above test does not raise UserWarning, # so we use plot_report._available instead of PlotReport.available() @unittest.skipUnless( extensions.plot_report._available, 'matplotlib is not installed') def test_lazy_import(self): # To support python2, we do not use self.assertWarns() with warnings.catch_warnings(record=True) as w: import matplotlib matplotlib.use('Agg') self.assertEqual(len(w), 0) testing.run_module(__name__, __file__)
... # It shows warning only when matplotlib is not available if available: ... # In Python 2 the above test does not raise UserWarning, # so we use plot_report._available instead of PlotReport.available() @unittest.skipUnless( extensions.plot_report._available, 'matplotlib is not installed') def test_lazy_import(self): ...
44650a0b3d395b4201a039bd2f3eb916987dce8d
_grains/osqueryinfo.py
_grains/osqueryinfo.py
import salt.utils import salt.modules.cmdmod __salt__ = {'cmd.run': salt.modules.cmdmod._run_quiet} def osquerygrain(): ''' Return osquery version in grain ''' # Provides: # osqueryversion # osquerybinpath grains = {} option = '--version' # Prefer our /opt/osquery/osqueryi if present osqueryipaths = ('/opt/osquery/osqueryi', 'osqueryi', '/usr/bin/osqueryi') for path in osqueryipaths: if salt.utils.which(path): for item in __salt__['cmd.run']('{0} {1}'.format(path, option)).split(): if item[:1].isdigit(): grains['osqueryversion'] = item grains['osquerybinpath'] = salt.utils.which(path) break break return grains
import salt.utils import salt.modules.cmdmod __salt__ = {'cmd.run': salt.modules.cmdmod._run_quiet} def osquerygrain(): ''' Return osquery version in grain ''' # Provides: # osqueryversion # osquerybinpath grains = {} option = '--version' # Prefer our /opt/osquery/osqueryi if present osqueryipaths = ('/opt/osquery/osqueryi', 'osqueryi', '/usr/bin/osqueryi') for path in osqueryipaths: if salt.utils.path.which(path): for item in __salt__['cmd.run']('{0} {1}'.format(path, option)).split(): if item[:1].isdigit(): grains['osqueryversion'] = item grains['osquerybinpath'] = salt.utils.path.which(path) break break return grains
Use of salt.utils.which detected. This function has been moved to salt.utils.path.which as of Salt 2018.3.0
DeprecationWarning: Use of salt.utils.which detected. This function has been moved to salt.utils.path.which as of Salt 2018.3.0
Python
apache-2.0
hubblestack/hubble-salt
import salt.utils import salt.modules.cmdmod __salt__ = {'cmd.run': salt.modules.cmdmod._run_quiet} def osquerygrain(): ''' Return osquery version in grain ''' # Provides: # osqueryversion # osquerybinpath grains = {} option = '--version' # Prefer our /opt/osquery/osqueryi if present osqueryipaths = ('/opt/osquery/osqueryi', 'osqueryi', '/usr/bin/osqueryi') for path in osqueryipaths: - if salt.utils.which(path): + if salt.utils.path.which(path): for item in __salt__['cmd.run']('{0} {1}'.format(path, option)).split(): if item[:1].isdigit(): grains['osqueryversion'] = item - grains['osquerybinpath'] = salt.utils.which(path) + grains['osquerybinpath'] = salt.utils.path.which(path) break break return grains
Use of salt.utils.which detected. This function has been moved to salt.utils.path.which as of Salt 2018.3.0
## Code Before: import salt.utils import salt.modules.cmdmod __salt__ = {'cmd.run': salt.modules.cmdmod._run_quiet} def osquerygrain(): ''' Return osquery version in grain ''' # Provides: # osqueryversion # osquerybinpath grains = {} option = '--version' # Prefer our /opt/osquery/osqueryi if present osqueryipaths = ('/opt/osquery/osqueryi', 'osqueryi', '/usr/bin/osqueryi') for path in osqueryipaths: if salt.utils.which(path): for item in __salt__['cmd.run']('{0} {1}'.format(path, option)).split(): if item[:1].isdigit(): grains['osqueryversion'] = item grains['osquerybinpath'] = salt.utils.which(path) break break return grains ## Instruction: Use of salt.utils.which detected. This function has been moved to salt.utils.path.which as of Salt 2018.3.0 ## Code After: import salt.utils import salt.modules.cmdmod __salt__ = {'cmd.run': salt.modules.cmdmod._run_quiet} def osquerygrain(): ''' Return osquery version in grain ''' # Provides: # osqueryversion # osquerybinpath grains = {} option = '--version' # Prefer our /opt/osquery/osqueryi if present osqueryipaths = ('/opt/osquery/osqueryi', 'osqueryi', '/usr/bin/osqueryi') for path in osqueryipaths: if salt.utils.path.which(path): for item in __salt__['cmd.run']('{0} {1}'.format(path, option)).split(): if item[:1].isdigit(): grains['osqueryversion'] = item grains['osquerybinpath'] = salt.utils.path.which(path) break break return grains
// ... existing code ... for path in osqueryipaths: if salt.utils.path.which(path): for item in __salt__['cmd.run']('{0} {1}'.format(path, option)).split(): // ... modified code ... grains['osqueryversion'] = item grains['osquerybinpath'] = salt.utils.path.which(path) break // ... rest of the code ...
a0a0d120552eeb304ac4b49648a43be5cf83cdcb
piper/core.py
piper/core.py
class Piper(object): """ The main runner. This class loads the configurations, sets up all other components, and finally executes them in whatever order they are supposed to happen in. """ def __init__(self): pass
import logbook class Piper(object): """ The main pipeline runner. This class loads the configurations, sets up all other components, executes them in whatever order they are supposed to happen in, collects data about the state of the pipeline and persists it, and finally tears down the components that needs tearing down. The functions are almost executed in the order found in this file. Woo! """ def __init__(self): self.log = logbook.Logger(self.__class__.__name__) def setup(self): """ Performs all setup steps This is basically an umbrella function that runs setup for all the things that the class needs to run a fully configured execute(). """ pass def load_config(self): """ Parses the configuration file and dies in flames if there are errors. """ pass def setup_environment(self): """ Load the environment and it's configuration """ pass def setup_steps(self): """ Loads the steps and their configuration. Also determines which collection of steps is to be ran. """ pass def execute(self): """ Runs the steps and determines whether to continue or not. Of all the things to happen in this application, this is probably the most important part! """ pass def save_state(self): """ Collects all data about the pipeline being built and persists it. """ pass def teardown_environment(self): """ Execute teardown step of the environment """ pass
Add more skeletonisms and documentation for Piper()
Add more skeletonisms and documentation for Piper()
Python
mit
thiderman/piper
+ import logbook + + class Piper(object): """ - The main runner. + The main pipeline runner. - This class loads the configurations, sets up all other components, and + This class loads the configurations, sets up all other components, - finally executes them in whatever order they are supposed to happen in. + executes them in whatever order they are supposed to happen in, collects + data about the state of the pipeline and persists it, and finally tears + down the components that needs tearing down. + + The functions are almost executed in the order found in this file. Woo! """ def __init__(self): + self.log = logbook.Logger(self.__class__.__name__) + + def setup(self): + """ + Performs all setup steps + + This is basically an umbrella function that runs setup for all the + things that the class needs to run a fully configured execute(). + + """ + pass + def load_config(self): + """ + Parses the configuration file and dies in flames if there are errors. + + """ + + pass + + def setup_environment(self): + """ + Load the environment and it's configuration + + """ + + pass + + def setup_steps(self): + """ + Loads the steps and their configuration. + + Also determines which collection of steps is to be ran. + + """ + + pass + + def execute(self): + """ + Runs the steps and determines whether to continue or not. + + Of all the things to happen in this application, this is probably + the most important part! + + """ + + pass + + def save_state(self): + """ + Collects all data about the pipeline being built and persists it. + + """ + + pass + + def teardown_environment(self): + """ + Execute teardown step of the environment + + """ + + pass +
Add more skeletonisms and documentation for Piper()
## Code Before: class Piper(object): """ The main runner. This class loads the configurations, sets up all other components, and finally executes them in whatever order they are supposed to happen in. """ def __init__(self): pass ## Instruction: Add more skeletonisms and documentation for Piper() ## Code After: import logbook class Piper(object): """ The main pipeline runner. This class loads the configurations, sets up all other components, executes them in whatever order they are supposed to happen in, collects data about the state of the pipeline and persists it, and finally tears down the components that needs tearing down. The functions are almost executed in the order found in this file. Woo! """ def __init__(self): self.log = logbook.Logger(self.__class__.__name__) def setup(self): """ Performs all setup steps This is basically an umbrella function that runs setup for all the things that the class needs to run a fully configured execute(). """ pass def load_config(self): """ Parses the configuration file and dies in flames if there are errors. """ pass def setup_environment(self): """ Load the environment and it's configuration """ pass def setup_steps(self): """ Loads the steps and their configuration. Also determines which collection of steps is to be ran. """ pass def execute(self): """ Runs the steps and determines whether to continue or not. Of all the things to happen in this application, this is probably the most important part! """ pass def save_state(self): """ Collects all data about the pipeline being built and persists it. """ pass def teardown_environment(self): """ Execute teardown step of the environment """ pass
// ... existing code ... import logbook class Piper(object): // ... modified code ... """ The main pipeline runner. This class loads the configurations, sets up all other components, executes them in whatever order they are supposed to happen in, collects data about the state of the pipeline and persists it, and finally tears down the components that needs tearing down. The functions are almost executed in the order found in this file. Woo! ... def __init__(self): self.log = logbook.Logger(self.__class__.__name__) def setup(self): """ Performs all setup steps This is basically an umbrella function that runs setup for all the things that the class needs to run a fully configured execute(). """ pass def load_config(self): """ Parses the configuration file and dies in flames if there are errors. """ pass def setup_environment(self): """ Load the environment and it's configuration """ pass def setup_steps(self): """ Loads the steps and their configuration. Also determines which collection of steps is to be ran. """ pass def execute(self): """ Runs the steps and determines whether to continue or not. Of all the things to happen in this application, this is probably the most important part! """ pass def save_state(self): """ Collects all data about the pipeline being built and persists it. """ pass def teardown_environment(self): """ Execute teardown step of the environment """ pass // ... rest of the code ...
8fafef4c2151d17133c5787847d68ab4b58f40c3
stagecraft/libs/views/utils.py
stagecraft/libs/views/utils.py
import json from django.utils.cache import patch_response_headers from functools import wraps def long_cache(a_view): @wraps(a_view) def _wrapped_view(request, *args, **kwargs): response = a_view(request, *args, **kwargs) patch_response_headers(response, 86400 * 365) return response return _wrapped_view def to_json(what): return json.dumps(what, indent=1)
import json from django.utils.cache import patch_response_headers from functools import wraps from uuid import UUID class JsonEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, UUID): return '{}'.format(obj) if hasattr(obj, 'serialize'): return obj.serialize() return json.JSONEncoder.default(self, obj) def long_cache(a_view): @wraps(a_view) def _wrapped_view(request, *args, **kwargs): response = a_view(request, *args, **kwargs) patch_response_headers(response, 86400 * 365) return response return _wrapped_view def to_json(what): return json.dumps(what, indent=1, cls=JsonEncoder)
Extend JSON serialiser to use serialize() method
Extend JSON serialiser to use serialize() method If an object is a UUID, return a string representation of it. If the object still can't be serialised, call its serialize() method. This is useful when nesting Link models inside dashboards, for example.
Python
mit
alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft
import json from django.utils.cache import patch_response_headers from functools import wraps + from uuid import UUID + + + class JsonEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, UUID): + return '{}'.format(obj) + + if hasattr(obj, 'serialize'): + return obj.serialize() + + return json.JSONEncoder.default(self, obj) def long_cache(a_view): @wraps(a_view) def _wrapped_view(request, *args, **kwargs): response = a_view(request, *args, **kwargs) patch_response_headers(response, 86400 * 365) return response return _wrapped_view def to_json(what): - return json.dumps(what, indent=1) + return json.dumps(what, indent=1, cls=JsonEncoder)
Extend JSON serialiser to use serialize() method
## Code Before: import json from django.utils.cache import patch_response_headers from functools import wraps def long_cache(a_view): @wraps(a_view) def _wrapped_view(request, *args, **kwargs): response = a_view(request, *args, **kwargs) patch_response_headers(response, 86400 * 365) return response return _wrapped_view def to_json(what): return json.dumps(what, indent=1) ## Instruction: Extend JSON serialiser to use serialize() method ## Code After: import json from django.utils.cache import patch_response_headers from functools import wraps from uuid import UUID class JsonEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, UUID): return '{}'.format(obj) if hasattr(obj, 'serialize'): return obj.serialize() return json.JSONEncoder.default(self, obj) def long_cache(a_view): @wraps(a_view) def _wrapped_view(request, *args, **kwargs): response = a_view(request, *args, **kwargs) patch_response_headers(response, 86400 * 365) return response return _wrapped_view def to_json(what): return json.dumps(what, indent=1, cls=JsonEncoder)
// ... existing code ... from functools import wraps from uuid import UUID class JsonEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, UUID): return '{}'.format(obj) if hasattr(obj, 'serialize'): return obj.serialize() return json.JSONEncoder.default(self, obj) // ... modified code ... def to_json(what): return json.dumps(what, indent=1, cls=JsonEncoder) // ... rest of the code ...
b6dea08a0a9908d2303693cf4534c7b0beec4154
analyticpi/db.py
analyticpi/db.py
import os import peewee APP_DIR = os.path.dirname(__file__) try: import urlparse import psycopg2 urlparse.uses_netloc.append('postgres') url = urlparse.urlparse(os.environ["DATABASE_URL"]) database = peewee.PostgresqlDatabase(database=url.path[1:], user=url.username, password=url.password, host=url.hostname, port=url.port) except KeyError: database = peewee.MySQLDatabase(os.environ["MYSQL_DATABASE"], os.environ["MYSQL_HOST"], user=os.environ["MYSQL_USER"], passwd=os.environ["MYSQL_PASSWD"])
import os import peewee APP_DIR = os.path.dirname(__file__) try: import urlparse import psycopg2 urlparse.uses_netloc.append('postgres') url = urlparse.urlparse(os.environ["DATABASE_URL"]) database = peewee.PostgresqlDatabase(database=url.path[1:], user=url.username, password=url.password, host=url.hostname, port=url.port) except KeyError: database = peewee.SqliteDatabase('my_app.db')
Change from MySQL to SQLite3
Change from MySQL to SQLite3
Python
mit
analyticpi/analyticpi,analyticpi/analyticpi,analyticpi/analyticpi
import os import peewee APP_DIR = os.path.dirname(__file__) try: import urlparse import psycopg2 urlparse.uses_netloc.append('postgres') url = urlparse.urlparse(os.environ["DATABASE_URL"]) database = peewee.PostgresqlDatabase(database=url.path[1:], user=url.username, password=url.password, host=url.hostname, port=url.port) except KeyError: + database = peewee.SqliteDatabase('my_app.db') - database = peewee.MySQLDatabase(os.environ["MYSQL_DATABASE"], - os.environ["MYSQL_HOST"], - user=os.environ["MYSQL_USER"], - passwd=os.environ["MYSQL_PASSWD"])
Change from MySQL to SQLite3
## Code Before: import os import peewee APP_DIR = os.path.dirname(__file__) try: import urlparse import psycopg2 urlparse.uses_netloc.append('postgres') url = urlparse.urlparse(os.environ["DATABASE_URL"]) database = peewee.PostgresqlDatabase(database=url.path[1:], user=url.username, password=url.password, host=url.hostname, port=url.port) except KeyError: database = peewee.MySQLDatabase(os.environ["MYSQL_DATABASE"], os.environ["MYSQL_HOST"], user=os.environ["MYSQL_USER"], passwd=os.environ["MYSQL_PASSWD"]) ## Instruction: Change from MySQL to SQLite3 ## Code After: import os import peewee APP_DIR = os.path.dirname(__file__) try: import urlparse import psycopg2 urlparse.uses_netloc.append('postgres') url = urlparse.urlparse(os.environ["DATABASE_URL"]) database = peewee.PostgresqlDatabase(database=url.path[1:], user=url.username, password=url.password, host=url.hostname, port=url.port) except KeyError: database = peewee.SqliteDatabase('my_app.db')
# ... existing code ... except KeyError: database = peewee.SqliteDatabase('my_app.db') # ... rest of the code ...
fc85f8846c188992438c935b9ba1ff0394bbc866
deployment/cfn/utils/constants.py
deployment/cfn/utils/constants.py
EC2_INSTANCE_TYPES = [ 't2.micro', 't2.small', 't2.medium' ] RDS_INSTANCE_TYPES = [ 'db.t2.micro' ] ELASTICACHE_INSTANCE_TYPES = [ 'cache.m1.small' ] ALLOW_ALL_CIDR = '0.0.0.0/0' VPC_CIDR = '10.0.0.0/16' GRAPHITE = 2003 GRAPHITE_WEB = 8080 HTTP = 80 HTTPS = 443 KIBANA = 5601 POSTGRESQL = 5432 REDIS = 6379 RELP = 20514 SSH = 22 STATSITE = 8125
EC2_INSTANCE_TYPES = [ 't2.micro', 't2.small', 't2.medium', 't2.large' ] RDS_INSTANCE_TYPES = [ 'db.t2.micro', 'db.t2.small', 'db.t2.medium', 'db.t2.large' ] ELASTICACHE_INSTANCE_TYPES = [ 'cache.m1.small' ] ALLOW_ALL_CIDR = '0.0.0.0/0' VPC_CIDR = '10.0.0.0/16' GRAPHITE = 2003 GRAPHITE_WEB = 8080 HTTP = 80 HTTPS = 443 KIBANA = 5601 POSTGRESQL = 5432 REDIS = 6379 RELP = 20514 SSH = 22 STATSITE = 8125
Increase options for EC2 and RDS instance types
Increase options for EC2 and RDS instance types Adding small through large for both.
Python
apache-2.0
project-icp/bee-pollinator-app,WikiWatershed/model-my-watershed,kdeloach/model-my-watershed,lliss/model-my-watershed,project-icp/bee-pollinator-app,lliss/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,lliss/model-my-watershed,kdeloach/model-my-watershed,lliss/model-my-watershed,kdeloach/model-my-watershed,lliss/model-my-watershed,project-icp/bee-pollinator-app,kdeloach/model-my-watershed,kdeloach/model-my-watershed,project-icp/bee-pollinator-app,WikiWatershed/model-my-watershed
EC2_INSTANCE_TYPES = [ 't2.micro', 't2.small', - 't2.medium' + 't2.medium', + 't2.large' ] RDS_INSTANCE_TYPES = [ - 'db.t2.micro' + 'db.t2.micro', + 'db.t2.small', + 'db.t2.medium', + 'db.t2.large' ] ELASTICACHE_INSTANCE_TYPES = [ 'cache.m1.small' ] ALLOW_ALL_CIDR = '0.0.0.0/0' VPC_CIDR = '10.0.0.0/16' GRAPHITE = 2003 GRAPHITE_WEB = 8080 HTTP = 80 HTTPS = 443 KIBANA = 5601 POSTGRESQL = 5432 REDIS = 6379 RELP = 20514 SSH = 22 STATSITE = 8125
Increase options for EC2 and RDS instance types
## Code Before: EC2_INSTANCE_TYPES = [ 't2.micro', 't2.small', 't2.medium' ] RDS_INSTANCE_TYPES = [ 'db.t2.micro' ] ELASTICACHE_INSTANCE_TYPES = [ 'cache.m1.small' ] ALLOW_ALL_CIDR = '0.0.0.0/0' VPC_CIDR = '10.0.0.0/16' GRAPHITE = 2003 GRAPHITE_WEB = 8080 HTTP = 80 HTTPS = 443 KIBANA = 5601 POSTGRESQL = 5432 REDIS = 6379 RELP = 20514 SSH = 22 STATSITE = 8125 ## Instruction: Increase options for EC2 and RDS instance types ## Code After: EC2_INSTANCE_TYPES = [ 't2.micro', 't2.small', 't2.medium', 't2.large' ] RDS_INSTANCE_TYPES = [ 'db.t2.micro', 'db.t2.small', 'db.t2.medium', 'db.t2.large' ] ELASTICACHE_INSTANCE_TYPES = [ 'cache.m1.small' ] ALLOW_ALL_CIDR = '0.0.0.0/0' VPC_CIDR = '10.0.0.0/16' GRAPHITE = 2003 GRAPHITE_WEB = 8080 HTTP = 80 HTTPS = 443 KIBANA = 5601 POSTGRESQL = 5432 REDIS = 6379 RELP = 20514 SSH = 22 STATSITE = 8125
# ... existing code ... 't2.small', 't2.medium', 't2.large' ] # ... modified code ... RDS_INSTANCE_TYPES = [ 'db.t2.micro', 'db.t2.small', 'db.t2.medium', 'db.t2.large' ] # ... rest of the code ...
dc755e07516e1cbbcd01f01e8be59abf8f1a6329
humfrey/update/management/commands/update_dataset.py
humfrey/update/management/commands/update_dataset.py
import base64 import datetime import os import pickle from lxml import etree import redis from django.core.management.base import BaseCommand from django.conf import settings from humfrey.update.longliving.updater import Updater class Command(BaseCommand): def handle(self, *args, **options): config_filename = os.path.abspath(args[0]) with open(config_filename, 'r') as f: config_file = etree.parse(f) dataset_name = config_file.xpath('meta/name')[0].text client = redis.client.Redis(**settings.REDIS_PARAMS) client.rpush(Updater.QUEUE_NAME, base64.b64encode(pickle.dumps({ 'config_filename': config_filename, 'name': dataset_name, 'trigger': 'manual', 'queued_at': datetime.datetime.now(), })))
import base64 import datetime import os import pickle from lxml import etree import redis from django.core.management.base import BaseCommand from django.conf import settings from humfrey.update.longliving.updater import Updater class Command(BaseCommand): def handle(self, *args, **options): config_filename = os.path.abspath(args[0]) trigger = args[1] if len(args) > 1 else 'manual' with open(config_filename, 'r') as f: config_file = etree.parse(f) dataset_name = config_file.xpath('meta/name')[0].text client = redis.client.Redis(**settings.REDIS_PARAMS) client.rpush(Updater.QUEUE_NAME, base64.b64encode(pickle.dumps({ 'config_filename': config_filename, 'name': dataset_name, 'trigger': trigger, 'queued_at': datetime.datetime.now(), }))) if __name__ == '__main__': import sys Command().handle(*sys.argv[1:])
Update trigger can now be specified on the command line as the second argument, and the module can now be run as a script.
Update trigger can now be specified on the command line as the second argument, and the module can now be run as a script.
Python
bsd-3-clause
ox-it/humfrey,ox-it/humfrey,ox-it/humfrey
import base64 import datetime import os import pickle from lxml import etree import redis from django.core.management.base import BaseCommand from django.conf import settings from humfrey.update.longliving.updater import Updater class Command(BaseCommand): def handle(self, *args, **options): config_filename = os.path.abspath(args[0]) + trigger = args[1] if len(args) > 1 else 'manual' with open(config_filename, 'r') as f: config_file = etree.parse(f) dataset_name = config_file.xpath('meta/name')[0].text client = redis.client.Redis(**settings.REDIS_PARAMS) client.rpush(Updater.QUEUE_NAME, base64.b64encode(pickle.dumps({ 'config_filename': config_filename, 'name': dataset_name, - 'trigger': 'manual', + 'trigger': trigger, 'queued_at': datetime.datetime.now(), }))) - + if __name__ == '__main__': + import sys + Command().handle(*sys.argv[1:])
Update trigger can now be specified on the command line as the second argument, and the module can now be run as a script.
## Code Before: import base64 import datetime import os import pickle from lxml import etree import redis from django.core.management.base import BaseCommand from django.conf import settings from humfrey.update.longliving.updater import Updater class Command(BaseCommand): def handle(self, *args, **options): config_filename = os.path.abspath(args[0]) with open(config_filename, 'r') as f: config_file = etree.parse(f) dataset_name = config_file.xpath('meta/name')[0].text client = redis.client.Redis(**settings.REDIS_PARAMS) client.rpush(Updater.QUEUE_NAME, base64.b64encode(pickle.dumps({ 'config_filename': config_filename, 'name': dataset_name, 'trigger': 'manual', 'queued_at': datetime.datetime.now(), }))) ## Instruction: Update trigger can now be specified on the command line as the second argument, and the module can now be run as a script. ## Code After: import base64 import datetime import os import pickle from lxml import etree import redis from django.core.management.base import BaseCommand from django.conf import settings from humfrey.update.longliving.updater import Updater class Command(BaseCommand): def handle(self, *args, **options): config_filename = os.path.abspath(args[0]) trigger = args[1] if len(args) > 1 else 'manual' with open(config_filename, 'r') as f: config_file = etree.parse(f) dataset_name = config_file.xpath('meta/name')[0].text client = redis.client.Redis(**settings.REDIS_PARAMS) client.rpush(Updater.QUEUE_NAME, base64.b64encode(pickle.dumps({ 'config_filename': config_filename, 'name': dataset_name, 'trigger': trigger, 'queued_at': datetime.datetime.now(), }))) if __name__ == '__main__': import sys Command().handle(*sys.argv[1:])
# ... existing code ... config_filename = os.path.abspath(args[0]) trigger = args[1] if len(args) > 1 else 'manual' # ... modified code ... 'name': dataset_name, 'trigger': trigger, 'queued_at': datetime.datetime.now(), ... if __name__ == '__main__': import sys Command().handle(*sys.argv[1:]) # ... rest of the code ...
43b00bdb18131c49a6e52d752aeb0549298d8cda
avena/tests/test-image.py
avena/tests/test-image.py
from numpy import all, array, dstack from .. import image def test_get_channels(): x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) for z in image.get_channels(y): assert all(z == x) def test_map_to_channels(): def f(x): return x + 1 x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) z = image.map_to_channels( f, lambda shape: shape, y, ) assert all(z == y + 1) if __name__ == '__main__': pass
from numpy import all, allclose, array, dstack from os import remove from os.path import sep, split from .. import image, utils def test_get_channels(): x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) for z in image.get_channels(y): assert all(z == x) def test_map_to_channels(): def f(x): return x + 1 x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) z = image.map_to_channels( f, lambda shape: shape, y, ) assert all(z == y + 1) def test_read_save(): f = split(__file__)[0] + sep + 'drink.png' x = image.read(f) tmp = utils.rand_filename(f) try: image.save(x, tmp) y = image.read(tmp) assert allclose(x, y, rtol=1e-4, atol=1e-1) finally: remove(tmp) if __name__ == '__main__': pass
Add more unit tests for the image module.
Add more unit tests for the image module.
Python
isc
eliteraspberries/avena
- from numpy import all, array, dstack + from numpy import all, allclose, array, dstack + from os import remove + from os.path import sep, split - from .. import image + from .. import image, utils def test_get_channels(): x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) for z in image.get_channels(y): assert all(z == x) def test_map_to_channels(): def f(x): return x + 1 x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) z = image.map_to_channels( f, lambda shape: shape, y, ) assert all(z == y + 1) + def test_read_save(): + f = split(__file__)[0] + sep + 'drink.png' + x = image.read(f) + tmp = utils.rand_filename(f) + try: + image.save(x, tmp) + y = image.read(tmp) + assert allclose(x, y, rtol=1e-4, atol=1e-1) + finally: + remove(tmp) + + if __name__ == '__main__': pass
Add more unit tests for the image module.
## Code Before: from numpy import all, array, dstack from .. import image def test_get_channels(): x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) for z in image.get_channels(y): assert all(z == x) def test_map_to_channels(): def f(x): return x + 1 x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) z = image.map_to_channels( f, lambda shape: shape, y, ) assert all(z == y + 1) if __name__ == '__main__': pass ## Instruction: Add more unit tests for the image module. ## Code After: from numpy import all, allclose, array, dstack from os import remove from os.path import sep, split from .. import image, utils def test_get_channels(): x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) for z in image.get_channels(y): assert all(z == x) def test_map_to_channels(): def f(x): return x + 1 x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]]) y = dstack((x, x, x)) z = image.map_to_channels( f, lambda shape: shape, y, ) assert all(z == y + 1) def test_read_save(): f = split(__file__)[0] + sep + 'drink.png' x = image.read(f) tmp = utils.rand_filename(f) try: image.save(x, tmp) y = image.read(tmp) assert allclose(x, y, rtol=1e-4, atol=1e-1) finally: remove(tmp) if __name__ == '__main__': pass
# ... existing code ... from numpy import all, allclose, array, dstack from os import remove from os.path import sep, split from .. import image, utils # ... modified code ... def test_read_save(): f = split(__file__)[0] + sep + 'drink.png' x = image.read(f) tmp = utils.rand_filename(f) try: image.save(x, tmp) y = image.read(tmp) assert allclose(x, y, rtol=1e-4, atol=1e-1) finally: remove(tmp) if __name__ == '__main__': # ... rest of the code ...
1d6fa0521b0fbba48ddbc231614b7074a63488c2
tests/utils.py
tests/utils.py
import os import sys from config import * def addLocalPaths(paths): for path_part in paths: base_path = os.path.join(local_path, path_part) abs_path = os.path.abspath(base_path) print "importing " + abs_path sys.path.insert(0, abs_path)
import os import sys from config import * def addLocalPaths(paths): for path_part in paths: base_path = os.path.join(local_path, path_part) abs_path = os.path.abspath(base_path) sys.path.insert(0, abs_path)
Remove debug messages from import.
Remove debug messages from import.
Python
mpl-2.0
EsriOceans/btm
import os import sys from config import * def addLocalPaths(paths): for path_part in paths: base_path = os.path.join(local_path, path_part) abs_path = os.path.abspath(base_path) - print "importing " + abs_path sys.path.insert(0, abs_path) -
Remove debug messages from import.
## Code Before: import os import sys from config import * def addLocalPaths(paths): for path_part in paths: base_path = os.path.join(local_path, path_part) abs_path = os.path.abspath(base_path) print "importing " + abs_path sys.path.insert(0, abs_path) ## Instruction: Remove debug messages from import. ## Code After: import os import sys from config import * def addLocalPaths(paths): for path_part in paths: base_path = os.path.join(local_path, path_part) abs_path = os.path.abspath(base_path) sys.path.insert(0, abs_path)
# ... existing code ... abs_path = os.path.abspath(base_path) sys.path.insert(0, abs_path) # ... rest of the code ...
f2bf7807754d13c92bd2901072dd804dda61805f
cla_public/apps/contact/constants.py
cla_public/apps/contact/constants.py
"Contact constants" from flask.ext.babel import lazy_gettext as _ DAY_TODAY = 'today' DAY_SPECIFIC = 'specific_day' DAY_CHOICES = ( (DAY_TODAY, _('Call me today at')), (DAY_SPECIFIC, _('Call me in the next week on')) )
"Contact constants" from flask.ext.babel import lazy_gettext as _ DAY_TODAY = 'today' DAY_SPECIFIC = 'specific_day' DAY_CHOICES = ( (DAY_TODAY, _('Call me today at')), (DAY_SPECIFIC, _('Call me in on')) )
Update button label (call back time picker)
FE: Update button label (call back time picker)
Python
mit
ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public
"Contact constants" from flask.ext.babel import lazy_gettext as _ DAY_TODAY = 'today' DAY_SPECIFIC = 'specific_day' DAY_CHOICES = ( (DAY_TODAY, _('Call me today at')), - (DAY_SPECIFIC, _('Call me in the next week on')) + (DAY_SPECIFIC, _('Call me in on')) )
Update button label (call back time picker)
## Code Before: "Contact constants" from flask.ext.babel import lazy_gettext as _ DAY_TODAY = 'today' DAY_SPECIFIC = 'specific_day' DAY_CHOICES = ( (DAY_TODAY, _('Call me today at')), (DAY_SPECIFIC, _('Call me in the next week on')) ) ## Instruction: Update button label (call back time picker) ## Code After: "Contact constants" from flask.ext.babel import lazy_gettext as _ DAY_TODAY = 'today' DAY_SPECIFIC = 'specific_day' DAY_CHOICES = ( (DAY_TODAY, _('Call me today at')), (DAY_SPECIFIC, _('Call me in on')) )
... (DAY_TODAY, _('Call me today at')), (DAY_SPECIFIC, _('Call me in on')) ) ...
21e5ee2ad250c313ea0eb2c67c3d3cc32661d24d
examples/benchmarking/server.py
examples/benchmarking/server.py
import asyncore,time,signal,sys from secure_smtpd import SMTPServer, FakeCredentialValidator class SecureSMTPServer(SMTPServer): def __init__(self): pass def process_message(self, peer, mailfrom, rcpttos, message_data): pass def start(self): SMTPServer.__init__( self, ('0.0.0.0', 25), None ) asyncore.loop() server = SecureSMTPServer() server.start() # normal termination of this process will kill worker children in # process pool so this process (the parent) needs to idle here waiting # for termination signal. If you don't have a signal handler, then # Python multiprocess cleanup stuff doesn't happen, and children won't # get killed by sending SIGTERM to parent. def sig_handler(signal,frame): print "Got signal %s, shutting down." % signal sys.exit(0) signal.signal(signal.SIGTERM, sig_handler) while 1: time.sleep(1)
from secure_smtpd import SMTPServer class SecureSMTPServer(SMTPServer): def process_message(self, peer, mailfrom, rcpttos, message_data): pass server = SecureSMTPServer(('0.0.0.0', 1025), None) server.run()
Switch to non-privledged port to make testing easier.
Switch to non-privledged port to make testing easier. User the new server.run() method.
Python
isc
bcoe/secure-smtpd
+ from secure_smtpd import SMTPServer - import asyncore,time,signal,sys - from secure_smtpd import SMTPServer, FakeCredentialValidator class SecureSMTPServer(SMTPServer): - - def __init__(self): - pass - def process_message(self, peer, mailfrom, rcpttos, message_data): pass + server = SecureSMTPServer(('0.0.0.0', 1025), None) - def start(self): - SMTPServer.__init__( - self, - ('0.0.0.0', 25), - None - ) - asyncore.loop() - - server = SecureSMTPServer() - server.start() + server.run() - - # normal termination of this process will kill worker children in - # process pool so this process (the parent) needs to idle here waiting - # for termination signal. If you don't have a signal handler, then - # Python multiprocess cleanup stuff doesn't happen, and children won't - # get killed by sending SIGTERM to parent. - - def sig_handler(signal,frame): - print "Got signal %s, shutting down." % signal - sys.exit(0) - - signal.signal(signal.SIGTERM, sig_handler) - - while 1: - time.sleep(1) -
Switch to non-privledged port to make testing easier.
## Code Before: import asyncore,time,signal,sys from secure_smtpd import SMTPServer, FakeCredentialValidator class SecureSMTPServer(SMTPServer): def __init__(self): pass def process_message(self, peer, mailfrom, rcpttos, message_data): pass def start(self): SMTPServer.__init__( self, ('0.0.0.0', 25), None ) asyncore.loop() server = SecureSMTPServer() server.start() # normal termination of this process will kill worker children in # process pool so this process (the parent) needs to idle here waiting # for termination signal. If you don't have a signal handler, then # Python multiprocess cleanup stuff doesn't happen, and children won't # get killed by sending SIGTERM to parent. def sig_handler(signal,frame): print "Got signal %s, shutting down." % signal sys.exit(0) signal.signal(signal.SIGTERM, sig_handler) while 1: time.sleep(1) ## Instruction: Switch to non-privledged port to make testing easier. ## Code After: from secure_smtpd import SMTPServer class SecureSMTPServer(SMTPServer): def process_message(self, peer, mailfrom, rcpttos, message_data): pass server = SecureSMTPServer(('0.0.0.0', 1025), None) server.run()
// ... existing code ... from secure_smtpd import SMTPServer // ... modified code ... class SecureSMTPServer(SMTPServer): def process_message(self, peer, mailfrom, rcpttos, message_data): ... server = SecureSMTPServer(('0.0.0.0', 1025), None) server.run() // ... rest of the code ...
82eb7a69ccb88d27141aeb483e4482041108723f
app/Display/display.py
app/Display/display.py
import sys ESC = chr(27) CLEAR = ESC + "[2J" MOVE_HOME = ESC + "[H" ERASE = CLEAR + MOVE_HOME LINES = 24 COLS = 80 class Display: def __init__(self, title): self.title = title def clear(self): sys.stdout.write(ERASE) def show_properties(self, properties, names=None): if names is None: names = properties.keys() max_len = max(map(len, names)) self.clear() self.print(self.title) print() for k in names: self.print("{0}: {1}".format(k.rjust(max_len), properties[k])) def print(self, message): print(message, end="\x0a\x0d")
import sys ESC = chr(27) CSI = ESC + "[" CLEAR = CSI + "2J" MOVE_HOME = CSI + "H" ERASE = CLEAR + MOVE_HOME MOVE_TO = CSI + "{0};{1}H" LINES = 24 COLS = 80 class Display: def __init__(self, title, info=None): self.title = title self.info = info def clear(self): sys.stdout.write(ERASE) sys.stdout.flush() def move_to(self, row, col): sys.stdout.write(MOVE_TO.format(row, col)) sys.stdout.flush() def show_properties(self, properties, names=None): if names is None: names = properties.keys() max_len = max(map(len, names)) self.clear() self.print(self.title.center(COLS)) print() for k in names: self.print("{0}: {1}".format(k.rjust(max_len), properties[k])) if self.info is not None: self.move_to(LINES, 0) sys.stdout.write(self.info) sys.stdout.flush() self.move_to(LINES, 0) def print(self, message): print(message, end="\x0a\x0d")
Add support for cursor position, centered title, and an info bar
Add support for cursor position, centered title, and an info bar
Python
mit
gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x,gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x
import sys ESC = chr(27) + CSI = ESC + "[" - CLEAR = ESC + "[2J" + CLEAR = CSI + "2J" - MOVE_HOME = ESC + "[H" + MOVE_HOME = CSI + "H" ERASE = CLEAR + MOVE_HOME + MOVE_TO = CSI + "{0};{1}H" LINES = 24 COLS = 80 class Display: - def __init__(self, title): + def __init__(self, title, info=None): self.title = title + self.info = info def clear(self): sys.stdout.write(ERASE) + sys.stdout.flush() + + def move_to(self, row, col): + sys.stdout.write(MOVE_TO.format(row, col)) + sys.stdout.flush() def show_properties(self, properties, names=None): if names is None: names = properties.keys() max_len = max(map(len, names)) self.clear() - self.print(self.title) + self.print(self.title.center(COLS)) print() for k in names: self.print("{0}: {1}".format(k.rjust(max_len), properties[k])) + if self.info is not None: + self.move_to(LINES, 0) + sys.stdout.write(self.info) + sys.stdout.flush() + + self.move_to(LINES, 0) + def print(self, message): print(message, end="\x0a\x0d")
Add support for cursor position, centered title, and an info bar
## Code Before: import sys ESC = chr(27) CLEAR = ESC + "[2J" MOVE_HOME = ESC + "[H" ERASE = CLEAR + MOVE_HOME LINES = 24 COLS = 80 class Display: def __init__(self, title): self.title = title def clear(self): sys.stdout.write(ERASE) def show_properties(self, properties, names=None): if names is None: names = properties.keys() max_len = max(map(len, names)) self.clear() self.print(self.title) print() for k in names: self.print("{0}: {1}".format(k.rjust(max_len), properties[k])) def print(self, message): print(message, end="\x0a\x0d") ## Instruction: Add support for cursor position, centered title, and an info bar ## Code After: import sys ESC = chr(27) CSI = ESC + "[" CLEAR = CSI + "2J" MOVE_HOME = CSI + "H" ERASE = CLEAR + MOVE_HOME MOVE_TO = CSI + "{0};{1}H" LINES = 24 COLS = 80 class Display: def __init__(self, title, info=None): self.title = title self.info = info def clear(self): sys.stdout.write(ERASE) sys.stdout.flush() def move_to(self, row, col): sys.stdout.write(MOVE_TO.format(row, col)) sys.stdout.flush() def show_properties(self, properties, names=None): if names is None: names = properties.keys() max_len = max(map(len, names)) self.clear() self.print(self.title.center(COLS)) print() for k in names: self.print("{0}: {1}".format(k.rjust(max_len), properties[k])) if self.info is not None: self.move_to(LINES, 0) sys.stdout.write(self.info) sys.stdout.flush() self.move_to(LINES, 0) def print(self, message): print(message, end="\x0a\x0d")
... ESC = chr(27) CSI = ESC + "[" CLEAR = CSI + "2J" MOVE_HOME = CSI + "H" ERASE = CLEAR + MOVE_HOME MOVE_TO = CSI + "{0};{1}H" ... class Display: def __init__(self, title, info=None): self.title = title self.info = info ... sys.stdout.write(ERASE) sys.stdout.flush() def move_to(self, row, col): sys.stdout.write(MOVE_TO.format(row, col)) sys.stdout.flush() ... self.clear() self.print(self.title.center(COLS)) print() ... if self.info is not None: self.move_to(LINES, 0) sys.stdout.write(self.info) sys.stdout.flush() self.move_to(LINES, 0) def print(self, message): ...
3a74774a42521f4b68e484855d103495438095c3
examples/schema/targetinfo.py
examples/schema/targetinfo.py
import jsl class TargetInfo(jsl.Document): docker = jsl.ArrayField(jsl.StringField(), max_items=2) rsync = jsl.ArrayField(jsl.StringField(), max_items=2) containers = jsl.ArrayField([ jsl.StringField(), jsl.ArrayField(jsl.StringField()) ])
import jsl class TargetInfo(jsl.Document): docker = jsl.ArrayField([ jsl.StringField(), jsl.OneOfField([jsl.StringField(), jsl.NullField()]) ]) rsync = jsl.ArrayField([ jsl.StringField(), jsl.OneOfField([jsl.StringField(), jsl.NullField()]) ]) containers = jsl.ArrayField([ jsl.StringField(), jsl.ArrayField(jsl.StringField()) ])
Correct the target info schema: docker and rsync messages are Null in case of success. Suggested by @vinzenz and corrected by @artmello.
Correct the target info schema: docker and rsync messages are Null in case of success. Suggested by @vinzenz and corrected by @artmello.
Python
apache-2.0
leapp-to/snactor
import jsl class TargetInfo(jsl.Document): - docker = jsl.ArrayField(jsl.StringField(), max_items=2) - rsync = jsl.ArrayField(jsl.StringField(), max_items=2) + docker = jsl.ArrayField([ + jsl.StringField(), + jsl.OneOfField([jsl.StringField(), jsl.NullField()]) + ]) + rsync = jsl.ArrayField([ + jsl.StringField(), + jsl.OneOfField([jsl.StringField(), jsl.NullField()]) + ]) containers = jsl.ArrayField([ jsl.StringField(), jsl.ArrayField(jsl.StringField()) ])
Correct the target info schema: docker and rsync messages are Null in case of success. Suggested by @vinzenz and corrected by @artmello.
## Code Before: import jsl class TargetInfo(jsl.Document): docker = jsl.ArrayField(jsl.StringField(), max_items=2) rsync = jsl.ArrayField(jsl.StringField(), max_items=2) containers = jsl.ArrayField([ jsl.StringField(), jsl.ArrayField(jsl.StringField()) ]) ## Instruction: Correct the target info schema: docker and rsync messages are Null in case of success. Suggested by @vinzenz and corrected by @artmello. ## Code After: import jsl class TargetInfo(jsl.Document): docker = jsl.ArrayField([ jsl.StringField(), jsl.OneOfField([jsl.StringField(), jsl.NullField()]) ]) rsync = jsl.ArrayField([ jsl.StringField(), jsl.OneOfField([jsl.StringField(), jsl.NullField()]) ]) containers = jsl.ArrayField([ jsl.StringField(), jsl.ArrayField(jsl.StringField()) ])
... class TargetInfo(jsl.Document): docker = jsl.ArrayField([ jsl.StringField(), jsl.OneOfField([jsl.StringField(), jsl.NullField()]) ]) rsync = jsl.ArrayField([ jsl.StringField(), jsl.OneOfField([jsl.StringField(), jsl.NullField()]) ]) containers = jsl.ArrayField([ ...
8b4b5705907e1ec5f9dd3148560dc1bf4cd5b9b7
bin/detail/get_nmake_environment.py
bin/detail/get_nmake_environment.py
import detail.util import os import sys def get(arch, vs_version): vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version) vs_path = os.getenv(vs_path_env) if not vs_path: sys.exit( 'Environment variable {} is empty, ' 'looks like Visual Studio {} is not installed'.format( vs_path_env, vs_version ) ) vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC') if not os.path.isdir(vcvarsall_dir): sys.exit( 'Directory `{}` not exists ' '({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat') if not os.path.isfile(vcvarsall_path): sys.exit( 'File vcvarsall.bat not found in directory ' '`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) return detail.util.get_environment_from_batch_command([vcvarsall_path, arch])
import detail.util import os import sys def get(arch, vs_version): vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version) vs_path = os.getenv(vs_path_env) if not vs_path: sys.exit( 'Environment variable {} is empty, ' 'looks like Visual Studio {} is not installed'.format( vs_path_env, vs_version ) ) if vs_version == '15': vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC', 'Auxiliary', 'Build') else: vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC') if not os.path.isdir(vcvarsall_dir): sys.exit( 'Directory `{}` not exists ' '({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat') if not os.path.isfile(vcvarsall_path): sys.exit( 'File vcvarsall.bat not found in directory ' '`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) return detail.util.get_environment_from_batch_command([vcvarsall_path, arch])
Fix vcvarsall_dir for Visual Studio 2017
polly.py: Fix vcvarsall_dir for Visual Studio 2017 [skip ci]
Python
bsd-2-clause
idscan/polly,idscan/polly,ruslo/polly,ruslo/polly
import detail.util import os import sys def get(arch, vs_version): vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version) vs_path = os.getenv(vs_path_env) if not vs_path: sys.exit( 'Environment variable {} is empty, ' 'looks like Visual Studio {} is not installed'.format( vs_path_env, vs_version ) ) + + if vs_version == '15': + vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC', 'Auxiliary', 'Build') + else: - vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC') + vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC') + if not os.path.isdir(vcvarsall_dir): sys.exit( 'Directory `{}` not exists ' '({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat') if not os.path.isfile(vcvarsall_path): sys.exit( 'File vcvarsall.bat not found in directory ' '`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) return detail.util.get_environment_from_batch_command([vcvarsall_path, arch])
Fix vcvarsall_dir for Visual Studio 2017
## Code Before: import detail.util import os import sys def get(arch, vs_version): vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version) vs_path = os.getenv(vs_path_env) if not vs_path: sys.exit( 'Environment variable {} is empty, ' 'looks like Visual Studio {} is not installed'.format( vs_path_env, vs_version ) ) vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC') if not os.path.isdir(vcvarsall_dir): sys.exit( 'Directory `{}` not exists ' '({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat') if not os.path.isfile(vcvarsall_path): sys.exit( 'File vcvarsall.bat not found in directory ' '`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) return detail.util.get_environment_from_batch_command([vcvarsall_path, arch]) ## Instruction: Fix vcvarsall_dir for Visual Studio 2017 ## Code After: import detail.util import os import sys def get(arch, vs_version): vs_path_env = 'VS{}0COMNTOOLS'.format(vs_version) vs_path = os.getenv(vs_path_env) if not vs_path: sys.exit( 'Environment variable {} is empty, ' 'looks like Visual Studio {} is not installed'.format( vs_path_env, vs_version ) ) if vs_version == '15': vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC', 'Auxiliary', 'Build') else: vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC') if not os.path.isdir(vcvarsall_dir): sys.exit( 'Directory `{}` not exists ' '({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) vcvarsall_path = os.path.join(vcvarsall_dir, 'vcvarsall.bat') if not os.path.isfile(vcvarsall_path): sys.exit( 'File vcvarsall.bat not found in directory ' '`{}` ({} environment variable)'.format(vcvarsall_dir, vs_path_env) ) return detail.util.get_environment_from_batch_command([vcvarsall_path, arch])
// ... existing code ... ) if vs_version == '15': vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC', 'Auxiliary', 'Build') else: vcvarsall_dir = os.path.join(vs_path, '..', '..', 'VC') if not os.path.isdir(vcvarsall_dir): // ... rest of the code ...
ddb3bcf4e5d5eb5dc4f8bb74313f333e54c385d6
scripts/wall_stop.py
scripts/wall_stop.py
import rospy,copy from geometry_msgs.msg import Twist from std_srvs.srv import Trigger, TriggerResponse from pimouse_ros.msg import LightSensorValues class WallStop(): def __init__(self): self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1) self.sensor_values = LightSensorValues() rospy.Subscriber('/lightsensors', LightSensorValues, self.callback_lightsensors) def callback_lightsensors(self,messages): self.sensor_values = messages def run(self): rate = rospy.Rate(10) data = Twist() while not rospy.is_shutdown(): data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0 self.cmd_vel.publish(data) rate.sleep() if __name__ == '__main__': rospy.init_node('wall_stop') rospy.wait_for_service('/motor_on') rospy.wait_for_service('/motor_off') rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call) rospy.ServiceProxy('/motor_on',Trigger).call() WallStop().run()
import rospy,copy from geometry_msgs.msg import Twist from std_srvs.srv import Trigger, TriggerResponse from pimouse_ros.msg import LightSensorValues class WallStop(): def __init__(self): self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1) self.sensor_values = LightSensorValues() rospy.Subscriber('/lightsensors', LightSensorValues, self.callback) def callback(self,messages): self.sensor_values = messages def run(self): rate = rospy.Rate(10) data = Twist() while not rospy.is_shutdown(): data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0 self.cmd_vel.publish(data) rate.sleep() if __name__ == '__main__': rospy.init_node('wall_stop') rospy.wait_for_service('/motor_on') rospy.wait_for_service('/motor_off') rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call) rospy.ServiceProxy('/motor_on',Trigger).call() WallStop().run()
Reduce the name of a function
Reduce the name of a function
Python
mit
citueda/pimouse_run_corridor,citueda/pimouse_run_corridor
import rospy,copy from geometry_msgs.msg import Twist from std_srvs.srv import Trigger, TriggerResponse from pimouse_ros.msg import LightSensorValues class WallStop(): def __init__(self): self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1) self.sensor_values = LightSensorValues() - rospy.Subscriber('/lightsensors', LightSensorValues, self.callback_lightsensors) + rospy.Subscriber('/lightsensors', LightSensorValues, self.callback) - def callback_lightsensors(self,messages): + def callback(self,messages): self.sensor_values = messages def run(self): rate = rospy.Rate(10) data = Twist() while not rospy.is_shutdown(): data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0 self.cmd_vel.publish(data) rate.sleep() if __name__ == '__main__': rospy.init_node('wall_stop') rospy.wait_for_service('/motor_on') rospy.wait_for_service('/motor_off') rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call) rospy.ServiceProxy('/motor_on',Trigger).call() WallStop().run()
Reduce the name of a function
## Code Before: import rospy,copy from geometry_msgs.msg import Twist from std_srvs.srv import Trigger, TriggerResponse from pimouse_ros.msg import LightSensorValues class WallStop(): def __init__(self): self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1) self.sensor_values = LightSensorValues() rospy.Subscriber('/lightsensors', LightSensorValues, self.callback_lightsensors) def callback_lightsensors(self,messages): self.sensor_values = messages def run(self): rate = rospy.Rate(10) data = Twist() while not rospy.is_shutdown(): data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0 self.cmd_vel.publish(data) rate.sleep() if __name__ == '__main__': rospy.init_node('wall_stop') rospy.wait_for_service('/motor_on') rospy.wait_for_service('/motor_off') rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call) rospy.ServiceProxy('/motor_on',Trigger).call() WallStop().run() ## Instruction: Reduce the name of a function ## Code After: import rospy,copy from geometry_msgs.msg import Twist from std_srvs.srv import Trigger, TriggerResponse from pimouse_ros.msg import LightSensorValues class WallStop(): def __init__(self): self.cmd_vel = rospy.Publisher('/cmd_vel',Twist,queue_size=1) self.sensor_values = LightSensorValues() rospy.Subscriber('/lightsensors', LightSensorValues, self.callback) def callback(self,messages): self.sensor_values = messages def run(self): rate = rospy.Rate(10) data = Twist() while not rospy.is_shutdown(): data.linear.x = 0.2 if self.sensor_values.sum_all < 500 else 0.0 self.cmd_vel.publish(data) rate.sleep() if __name__ == '__main__': rospy.init_node('wall_stop') rospy.wait_for_service('/motor_on') rospy.wait_for_service('/motor_off') rospy.on_shutdown(rospy.ServiceProxy('/motor_off',Trigger).call) rospy.ServiceProxy('/motor_on',Trigger).call() WallStop().run()
// ... existing code ... self.sensor_values = LightSensorValues() rospy.Subscriber('/lightsensors', LightSensorValues, self.callback) def callback(self,messages): self.sensor_values = messages // ... rest of the code ...
cc8f0760aa5497d2285dc85c6f3c17c6ce327c35
core/__init__.py
core/__init__.py
import logging try: from google.appengine.datastore import datastore_rpc from google.appengine.datastore import datastore_query logging.info('Imported official google datastore_{rpc,query}') except ImportError: logging.warning('Importing local datastore_{rpc,query}') from . import datastore_rpc from . import datastore_query from . import monkey
import logging import sys try: from google.appengine.datastore import datastore_rpc from google.appengine.datastore import datastore_query sys.modules['core.datastore_rpc'] = datastore_rpc sys.modules['core.datastore_query'] = datastore_query logging.info('Imported official google datastore_{rpc,query}') except ImportError: logging.warning('Importing local datastore_{rpc,query}') from . import datastore_rpc from . import datastore_query from . import monkey
Make official google imports actually work.
Make official google imports actually work.
Python
apache-2.0
GoogleCloudPlatform/datastore-ndb-python,GoogleCloudPlatform/datastore-ndb-python
import logging + import sys try: from google.appengine.datastore import datastore_rpc from google.appengine.datastore import datastore_query + sys.modules['core.datastore_rpc'] = datastore_rpc + sys.modules['core.datastore_query'] = datastore_query logging.info('Imported official google datastore_{rpc,query}') except ImportError: logging.warning('Importing local datastore_{rpc,query}') from . import datastore_rpc from . import datastore_query from . import monkey
Make official google imports actually work.
## Code Before: import logging try: from google.appengine.datastore import datastore_rpc from google.appengine.datastore import datastore_query logging.info('Imported official google datastore_{rpc,query}') except ImportError: logging.warning('Importing local datastore_{rpc,query}') from . import datastore_rpc from . import datastore_query from . import monkey ## Instruction: Make official google imports actually work. ## Code After: import logging import sys try: from google.appengine.datastore import datastore_rpc from google.appengine.datastore import datastore_query sys.modules['core.datastore_rpc'] = datastore_rpc sys.modules['core.datastore_query'] = datastore_query logging.info('Imported official google datastore_{rpc,query}') except ImportError: logging.warning('Importing local datastore_{rpc,query}') from . import datastore_rpc from . import datastore_query from . import monkey
# ... existing code ... import logging import sys # ... modified code ... from google.appengine.datastore import datastore_query sys.modules['core.datastore_rpc'] = datastore_rpc sys.modules['core.datastore_query'] = datastore_query logging.info('Imported official google datastore_{rpc,query}') # ... rest of the code ...
ef89d3608b9ab54aef105528f2c15fa9cc437bcd
runtests.py
runtests.py
import sys from os.path import abspath, dirname from django.conf import settings sys.path.insert(0, abspath(dirname(__file__))) if not settings.configured: settings.configure( INSTALLED_APPS=( 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.auth', 'django.contrib.admin', 'email_log', 'email_log.tests', ), DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend', ROOT_URLCONF='email_log.tests.urls', ) def runtests(): from django.test.simple import DjangoTestSuiteRunner failures = DjangoTestSuiteRunner(failfast=False).run_tests(['tests']) sys.exit(failures) if __name__ == "__main__": runtests()
import sys from os.path import abspath, dirname from django.conf import settings import django sys.path.insert(0, abspath(dirname(__file__))) if not settings.configured: settings.configure( INSTALLED_APPS=( 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.auth', 'django.contrib.admin', 'email_log', 'email_log.tests', ), DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend', ROOT_URLCONF='email_log.tests.urls', ) def runtests(): if hasattr(django, 'setup'): django.setup() try: from django.test.runner import DiscoverRunner runner_class = DiscoverRunner test_args = ['email_log.tests'] except ImportError: from django.test.simple import DjangoTestSuiteRunner runner_class = DjangoTestSuiteRunner test_args = ['tests'] failures = runner_class(failfast=False).run_tests(test_args) sys.exit(failures) if __name__ == "__main__": runtests()
Fix tests for Django 1.7
Fix tests for Django 1.7
Python
mit
treyhunner/django-email-log,treyhunner/django-email-log
import sys from os.path import abspath, dirname from django.conf import settings + import django sys.path.insert(0, abspath(dirname(__file__))) if not settings.configured: settings.configure( INSTALLED_APPS=( 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.auth', 'django.contrib.admin', 'email_log', 'email_log.tests', ), DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend', ROOT_URLCONF='email_log.tests.urls', ) def runtests(): + if hasattr(django, 'setup'): + django.setup() + try: + from django.test.runner import DiscoverRunner + runner_class = DiscoverRunner + test_args = ['email_log.tests'] + except ImportError: - from django.test.simple import DjangoTestSuiteRunner + from django.test.simple import DjangoTestSuiteRunner + runner_class = DjangoTestSuiteRunner + test_args = ['tests'] + - failures = DjangoTestSuiteRunner(failfast=False).run_tests(['tests']) + failures = runner_class(failfast=False).run_tests(test_args) sys.exit(failures) if __name__ == "__main__": runtests()
Fix tests for Django 1.7
## Code Before: import sys from os.path import abspath, dirname from django.conf import settings sys.path.insert(0, abspath(dirname(__file__))) if not settings.configured: settings.configure( INSTALLED_APPS=( 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.auth', 'django.contrib.admin', 'email_log', 'email_log.tests', ), DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend', ROOT_URLCONF='email_log.tests.urls', ) def runtests(): from django.test.simple import DjangoTestSuiteRunner failures = DjangoTestSuiteRunner(failfast=False).run_tests(['tests']) sys.exit(failures) if __name__ == "__main__": runtests() ## Instruction: Fix tests for Django 1.7 ## Code After: import sys from os.path import abspath, dirname from django.conf import settings import django sys.path.insert(0, abspath(dirname(__file__))) if not settings.configured: settings.configure( INSTALLED_APPS=( 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.auth', 'django.contrib.admin', 'email_log', 'email_log.tests', ), DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend', ROOT_URLCONF='email_log.tests.urls', ) def runtests(): if hasattr(django, 'setup'): django.setup() try: from django.test.runner import DiscoverRunner runner_class = DiscoverRunner test_args = ['email_log.tests'] except ImportError: from django.test.simple import DjangoTestSuiteRunner runner_class = DjangoTestSuiteRunner test_args = ['tests'] failures = runner_class(failfast=False).run_tests(test_args) sys.exit(failures) if __name__ == "__main__": runtests()
... from django.conf import settings import django ... def runtests(): if hasattr(django, 'setup'): django.setup() try: from django.test.runner import DiscoverRunner runner_class = DiscoverRunner test_args = ['email_log.tests'] except ImportError: from django.test.simple import DjangoTestSuiteRunner runner_class = DjangoTestSuiteRunner test_args = ['tests'] failures = runner_class(failfast=False).run_tests(test_args) sys.exit(failures) ...
f4cfad2edaa896b471f4f44b2a3fda2bd6b1bb49
tests/conftest.py
tests/conftest.py
import pytest from flask import Flask, jsonify @pytest.fixture def app(): app = Flask(__name__) @app.route('/ping') def ping(): return jsonify(ping='pong') return app
import pytest from flask import Flask, jsonify @pytest.fixture def app(): app = Flask(__name__) @app.route('/') def index(): return app.response_class('OK') @app.route('/ping') def ping(): return jsonify(ping='pong') return app
Add index route to test application
Add index route to test application This endpoint uses to start :class:`LiveServer` instance with minimum waiting timeout.
Python
mit
amateja/pytest-flask
import pytest from flask import Flask, jsonify @pytest.fixture def app(): app = Flask(__name__) + @app.route('/') + def index(): + return app.response_class('OK') + @app.route('/ping') def ping(): return jsonify(ping='pong') return app
Add index route to test application
## Code Before: import pytest from flask import Flask, jsonify @pytest.fixture def app(): app = Flask(__name__) @app.route('/ping') def ping(): return jsonify(ping='pong') return app ## Instruction: Add index route to test application ## Code After: import pytest from flask import Flask, jsonify @pytest.fixture def app(): app = Flask(__name__) @app.route('/') def index(): return app.response_class('OK') @app.route('/ping') def ping(): return jsonify(ping='pong') return app
// ... existing code ... @app.route('/') def index(): return app.response_class('OK') @app.route('/ping') // ... rest of the code ...
ec884c9db173f093d1398de54d00f1c36f22d8e4
examples/random_valid_test_generator.py
examples/random_valid_test_generator.py
import sys import time from random import shuffle from FairDistributor import FairDistributor def main(): # User input for the number of targets and objects. number_of_targets = int(sys.argv[1]) number_of_objects = int(sys.argv[2]) # Generate dummy lists for objects, targets and dummy matrix for weights targets = ['target_{0}'.format(str(s)) for s in range(number_of_targets)] objects = ['object_{0}'.format(str(s)) for s in range(number_of_objects)] dummy_weights = list(range(1, number_of_objects+1)) weights_matrix = list() for _ in range(number_of_targets): new_random_weight_list = list(dummy_weights) shuffle(new_random_weight_list) weights_matrix.append(new_random_weight_list) start_time = time.time() distributor = FairDistributor(targets, objects, weights_matrix) distributor.distribute() elapsed_time = time.time() - start_time print('Number of Targets: {0}\nNumber of Objects: {1}\nTime elapsed: {2}'.format( number_of_targets, number_of_objects, elapsed_time)) if __name__ == '__main__': main()
import sys import time from random import shuffle from vania.fair_distributor import FairDistributor def main(): # User input for the number of targets and objects. number_of_targets = int(sys.argv[1]) number_of_objects = int(sys.argv[2]) # Generate dummy lists for objects, targets and dummy matrix for weights targets = ['target_{0}'.format(str(s)) for s in range(number_of_targets)] objects = ['object_{0}'.format(str(s)) for s in range(number_of_objects)] dummy_weights = list(range(1, number_of_objects+1)) weights_matrix = list() for _ in range(number_of_targets): new_random_weight_list = list(dummy_weights) shuffle(new_random_weight_list) weights_matrix.append(new_random_weight_list) # Benchmark solver start_time = time.time() distributor = FairDistributor(targets, objects, weights_matrix) distributor.distribute() elapsed_time = time.time() - start_time # Output print('Number of Targets: {0}\nNumber of Objects: {1}\nTime elapsed: {2}'.format( number_of_targets, number_of_objects, elapsed_time)) if __name__ == '__main__': main()
Reformat random generator reformat code
Reformat random generator reformat code
Python
mit
Hackathonners/vania
import sys import time from random import shuffle - from FairDistributor import FairDistributor + from vania.fair_distributor import FairDistributor def main(): # User input for the number of targets and objects. number_of_targets = int(sys.argv[1]) number_of_objects = int(sys.argv[2]) + # Generate dummy lists for objects, targets and dummy matrix for weights targets = ['target_{0}'.format(str(s)) for s in range(number_of_targets)] objects = ['object_{0}'.format(str(s)) for s in range(number_of_objects)] dummy_weights = list(range(1, number_of_objects+1)) weights_matrix = list() for _ in range(number_of_targets): new_random_weight_list = list(dummy_weights) shuffle(new_random_weight_list) weights_matrix.append(new_random_weight_list) + + # Benchmark solver start_time = time.time() distributor = FairDistributor(targets, objects, weights_matrix) distributor.distribute() elapsed_time = time.time() - start_time + + # Output print('Number of Targets: {0}\nNumber of Objects: {1}\nTime elapsed: {2}'.format( number_of_targets, number_of_objects, elapsed_time)) if __name__ == '__main__': main()
Reformat random generator reformat code
## Code Before: import sys import time from random import shuffle from FairDistributor import FairDistributor def main(): # User input for the number of targets and objects. number_of_targets = int(sys.argv[1]) number_of_objects = int(sys.argv[2]) # Generate dummy lists for objects, targets and dummy matrix for weights targets = ['target_{0}'.format(str(s)) for s in range(number_of_targets)] objects = ['object_{0}'.format(str(s)) for s in range(number_of_objects)] dummy_weights = list(range(1, number_of_objects+1)) weights_matrix = list() for _ in range(number_of_targets): new_random_weight_list = list(dummy_weights) shuffle(new_random_weight_list) weights_matrix.append(new_random_weight_list) start_time = time.time() distributor = FairDistributor(targets, objects, weights_matrix) distributor.distribute() elapsed_time = time.time() - start_time print('Number of Targets: {0}\nNumber of Objects: {1}\nTime elapsed: {2}'.format( number_of_targets, number_of_objects, elapsed_time)) if __name__ == '__main__': main() ## Instruction: Reformat random generator reformat code ## Code After: import sys import time from random import shuffle from vania.fair_distributor import FairDistributor def main(): # User input for the number of targets and objects. number_of_targets = int(sys.argv[1]) number_of_objects = int(sys.argv[2]) # Generate dummy lists for objects, targets and dummy matrix for weights targets = ['target_{0}'.format(str(s)) for s in range(number_of_targets)] objects = ['object_{0}'.format(str(s)) for s in range(number_of_objects)] dummy_weights = list(range(1, number_of_objects+1)) weights_matrix = list() for _ in range(number_of_targets): new_random_weight_list = list(dummy_weights) shuffle(new_random_weight_list) weights_matrix.append(new_random_weight_list) # Benchmark solver start_time = time.time() distributor = FairDistributor(targets, objects, weights_matrix) distributor.distribute() elapsed_time = time.time() - start_time # Output print('Number of Targets: {0}\nNumber of Objects: {1}\nTime elapsed: {2}'.format( number_of_targets, number_of_objects, elapsed_time)) if __name__ == '__main__': main()
# ... existing code ... from random import shuffle from vania.fair_distributor import FairDistributor # ... modified code ... number_of_objects = int(sys.argv[2]) # Generate dummy lists for objects, targets and dummy matrix for weights ... weights_matrix.append(new_random_weight_list) # Benchmark solver start_time = time.time() ... elapsed_time = time.time() - start_time # Output print('Number of Targets: {0}\nNumber of Objects: {1}\nTime elapsed: {2}'.format( # ... rest of the code ...
168937c586b228c05ada2da79a55c9416c3180d3
antifuzz.py
antifuzz.py
''' File: antifuzz.py Authors: Kaitlin Keenan and Ryan Frank ''' import sys from shutil import copy2 import subprocess import ssdeep #http://python-ssdeep.readthedocs.io/en/latest/installation.html def main(): # Take in file ogFile = sys.argv[1] # Make copy of file newFile = sys.argv[2] # Mess with the given file cmd(['lame','--quiet', '--scale', '1', ogFile]) print cmd(['mv', ogFile + ".mp3", newFile]) # Hash files ogHash = ssdeep.hash_from_file(ogFile) newHash = ssdeep.hash_from_file(newFile) # Compare the hashes #print ogHash print ssdeep.compare(ogHash, newHash) def cmd(command): #if (arg2 && arg1): p = subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE) out, err = p.communicate() return out if __name__ == "__main__": main()
''' File: antifuzz.py Authors: Kaitlin Keenan and Ryan Frank ''' import sys from shutil import copy2 import subprocess import ssdeep #http://python-ssdeep.readthedocs.io/en/latest/installation.html import argparse def main(): parser = argparse.ArgumentParser() parser.add_argument("originalFile", help="File to antifuzz") parser.add_argument("newFile", help="Name of the antifuzzed file") args = parser.parse_args() # Take in file ogFile = args.originalFile # Make copy of file nFile = args.newFile # Mess with the given file mp3(ogFile, nFile) # Hash files ogHash = ssdeep.hash_from_file(ogFile) newHash = ssdeep.hash_from_file(nFile) # Compare the hashes #print ogHash diff=str(ssdeep.compare(ogHash, newHash)) print("The files are " + diff + "% different") def mp3(ogFile, newFile): cmd(['lame','--quiet', '--scale', '1', ogFile]) cmd(['mv', ogFile + ".mp3", newFile]) def cmd(command): #if (arg2 && arg1): p = subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE) out, err = p.communicate() return out if __name__ == "__main__": main()
Add help, make output more user friendly
Add help, make output more user friendly
Python
mit
ForensicTools/antifuzzyhashing-475-2161_Keenan_Frank
''' File: antifuzz.py Authors: Kaitlin Keenan and Ryan Frank ''' import sys from shutil import copy2 import subprocess import ssdeep #http://python-ssdeep.readthedocs.io/en/latest/installation.html + import argparse def main(): + parser = argparse.ArgumentParser() + parser.add_argument("originalFile", help="File to antifuzz") + parser.add_argument("newFile", help="Name of the antifuzzed file") + args = parser.parse_args() + # Take in file - ogFile = sys.argv[1] + ogFile = args.originalFile # Make copy of file - newFile = sys.argv[2] + nFile = args.newFile # Mess with the given file + mp3(ogFile, nFile) - cmd(['lame','--quiet', '--scale', '1', ogFile]) - print cmd(['mv', ogFile + ".mp3", newFile]) # Hash files ogHash = ssdeep.hash_from_file(ogFile) - newHash = ssdeep.hash_from_file(newFile) + newHash = ssdeep.hash_from_file(nFile) # Compare the hashes #print ogHash - print ssdeep.compare(ogHash, newHash) + diff=str(ssdeep.compare(ogHash, newHash)) + print("The files are " + diff + "% different") + + def mp3(ogFile, newFile): + cmd(['lame','--quiet', '--scale', '1', ogFile]) + cmd(['mv', ogFile + ".mp3", newFile]) def cmd(command): #if (arg2 && arg1): p = subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE) out, err = p.communicate() return out if __name__ == "__main__": main()
Add help, make output more user friendly
## Code Before: ''' File: antifuzz.py Authors: Kaitlin Keenan and Ryan Frank ''' import sys from shutil import copy2 import subprocess import ssdeep #http://python-ssdeep.readthedocs.io/en/latest/installation.html def main(): # Take in file ogFile = sys.argv[1] # Make copy of file newFile = sys.argv[2] # Mess with the given file cmd(['lame','--quiet', '--scale', '1', ogFile]) print cmd(['mv', ogFile + ".mp3", newFile]) # Hash files ogHash = ssdeep.hash_from_file(ogFile) newHash = ssdeep.hash_from_file(newFile) # Compare the hashes #print ogHash print ssdeep.compare(ogHash, newHash) def cmd(command): #if (arg2 && arg1): p = subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE) out, err = p.communicate() return out if __name__ == "__main__": main() ## Instruction: Add help, make output more user friendly ## Code After: ''' File: antifuzz.py Authors: Kaitlin Keenan and Ryan Frank ''' import sys from shutil import copy2 import subprocess import ssdeep #http://python-ssdeep.readthedocs.io/en/latest/installation.html import argparse def main(): parser = argparse.ArgumentParser() parser.add_argument("originalFile", help="File to antifuzz") parser.add_argument("newFile", help="Name of the antifuzzed file") args = parser.parse_args() # Take in file ogFile = args.originalFile # Make copy of file nFile = args.newFile # Mess with the given file mp3(ogFile, nFile) # Hash files ogHash = ssdeep.hash_from_file(ogFile) newHash = ssdeep.hash_from_file(nFile) # Compare the hashes #print ogHash diff=str(ssdeep.compare(ogHash, newHash)) print("The files are " + diff + "% different") def mp3(ogFile, newFile): cmd(['lame','--quiet', '--scale', '1', ogFile]) cmd(['mv', ogFile + ".mp3", newFile]) def cmd(command): #if (arg2 && arg1): p = subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE) out, err = p.communicate() return out if __name__ == "__main__": main()
// ... existing code ... import ssdeep #http://python-ssdeep.readthedocs.io/en/latest/installation.html import argparse // ... modified code ... parser = argparse.ArgumentParser() parser.add_argument("originalFile", help="File to antifuzz") parser.add_argument("newFile", help="Name of the antifuzzed file") args = parser.parse_args() # Take in file ogFile = args.originalFile ... # Make copy of file nFile = args.newFile ... # Mess with the given file mp3(ogFile, nFile) ... ogHash = ssdeep.hash_from_file(ogFile) newHash = ssdeep.hash_from_file(nFile) ... #print ogHash diff=str(ssdeep.compare(ogHash, newHash)) print("The files are " + diff + "% different") def mp3(ogFile, newFile): cmd(['lame','--quiet', '--scale', '1', ogFile]) cmd(['mv', ogFile + ".mp3", newFile]) // ... rest of the code ...
e0fa24595a60dd3c2ab5d1b64a76bae9ce3c05a8
testproject/testapp/tests/test_root.py
testproject/testapp/tests/test_root.py
from djet import assertions, restframework from rest_framework import status import djoser.constants import djoser.utils import djoser.views class RootViewTest(restframework.APIViewTestCase, assertions.StatusCodeAssertionsMixin): view_class = djoser.views.RootView def test_get_should_return_urls_map(self): request = self.factory.get() view_object = self.create_view_object(request) response = view_object.dispatch(request) self.assert_status_equal(response, status.HTTP_200_OK) urlpattern_names = view_object.aggregate_djoser_urlpattern_names() urls_map = view_object.get_urls_map(request, urlpattern_names, None) self.assertEquals(urls_map, response.data) def test_all_urlpattern_names_are_in_urls_map(self): request = self.factory.get() view_object = self.create_view_object(request) response = view_object.dispatch(request) self.assert_status_equal(response, status.HTTP_200_OK) urlpattern_names = view_object.aggregate_djoser_urlpattern_names() for urlpattern_name in urlpattern_names: self.assertIn(urlpattern_name, response.data)
from djet import assertions, restframework from rest_framework import status import djoser.constants import djoser.utils import djoser.views class RootViewTest(restframework.APIViewTestCase, assertions.StatusCodeAssertionsMixin): view_class = djoser.views.RootView def test_get_should_return_urls_map(self): request = self.factory.get() view_object = self.create_view_object(request) response = view_object.dispatch(request) self.assert_status_equal(response, status.HTTP_200_OK) urlpattern_names = view_object.aggregate_djoser_urlpattern_names() urls_map = view_object.get_urls_map(request, urlpattern_names, None) self.assertEquals(urls_map, response.data) def test_all_urlpattern_names_are_in_urls_map(self): request = self.factory.get() view_object = self.create_view_object(request) response = view_object.dispatch(request) self.assert_status_equal(response, status.HTTP_200_OK) urlpattern_names = view_object.aggregate_djoser_urlpattern_names() for urlpattern_name in urlpattern_names: self.assertIn(urlpattern_name, response.data) def test_non_existent_urlpattern_results_in_empty_string(self): request = self.factory.get() view_object = self.create_view_object(request) urlpattern_names = ['non-existent-urlpattern'] urls_map = view_object.get_urls_map(request, urlpattern_names, None) self.assertEquals(urls_map, {urlpattern_names[0]: ''})
Add test for non existent url pattern
Add test for non existent url pattern
Python
mit
akalipetis/djoser,sunscrapers/djoser,akalipetis/djoser,sunscrapers/djoser,sunscrapers/djoser
from djet import assertions, restframework from rest_framework import status import djoser.constants import djoser.utils import djoser.views class RootViewTest(restframework.APIViewTestCase, assertions.StatusCodeAssertionsMixin): view_class = djoser.views.RootView def test_get_should_return_urls_map(self): request = self.factory.get() view_object = self.create_view_object(request) response = view_object.dispatch(request) self.assert_status_equal(response, status.HTTP_200_OK) urlpattern_names = view_object.aggregate_djoser_urlpattern_names() urls_map = view_object.get_urls_map(request, urlpattern_names, None) self.assertEquals(urls_map, response.data) def test_all_urlpattern_names_are_in_urls_map(self): request = self.factory.get() view_object = self.create_view_object(request) response = view_object.dispatch(request) self.assert_status_equal(response, status.HTTP_200_OK) urlpattern_names = view_object.aggregate_djoser_urlpattern_names() for urlpattern_name in urlpattern_names: self.assertIn(urlpattern_name, response.data) + def test_non_existent_urlpattern_results_in_empty_string(self): + request = self.factory.get() + view_object = self.create_view_object(request) + + urlpattern_names = ['non-existent-urlpattern'] + urls_map = view_object.get_urls_map(request, urlpattern_names, None) + self.assertEquals(urls_map, {urlpattern_names[0]: ''}) +
Add test for non existent url pattern
## Code Before: from djet import assertions, restframework from rest_framework import status import djoser.constants import djoser.utils import djoser.views class RootViewTest(restframework.APIViewTestCase, assertions.StatusCodeAssertionsMixin): view_class = djoser.views.RootView def test_get_should_return_urls_map(self): request = self.factory.get() view_object = self.create_view_object(request) response = view_object.dispatch(request) self.assert_status_equal(response, status.HTTP_200_OK) urlpattern_names = view_object.aggregate_djoser_urlpattern_names() urls_map = view_object.get_urls_map(request, urlpattern_names, None) self.assertEquals(urls_map, response.data) def test_all_urlpattern_names_are_in_urls_map(self): request = self.factory.get() view_object = self.create_view_object(request) response = view_object.dispatch(request) self.assert_status_equal(response, status.HTTP_200_OK) urlpattern_names = view_object.aggregate_djoser_urlpattern_names() for urlpattern_name in urlpattern_names: self.assertIn(urlpattern_name, response.data) ## Instruction: Add test for non existent url pattern ## Code After: from djet import assertions, restframework from rest_framework import status import djoser.constants import djoser.utils import djoser.views class RootViewTest(restframework.APIViewTestCase, assertions.StatusCodeAssertionsMixin): view_class = djoser.views.RootView def test_get_should_return_urls_map(self): request = self.factory.get() view_object = self.create_view_object(request) response = view_object.dispatch(request) self.assert_status_equal(response, status.HTTP_200_OK) urlpattern_names = view_object.aggregate_djoser_urlpattern_names() urls_map = view_object.get_urls_map(request, urlpattern_names, None) self.assertEquals(urls_map, response.data) def test_all_urlpattern_names_are_in_urls_map(self): request = self.factory.get() view_object = self.create_view_object(request) response = view_object.dispatch(request) self.assert_status_equal(response, status.HTTP_200_OK) urlpattern_names = view_object.aggregate_djoser_urlpattern_names() for urlpattern_name in urlpattern_names: self.assertIn(urlpattern_name, response.data) def test_non_existent_urlpattern_results_in_empty_string(self): request = self.factory.get() view_object = self.create_view_object(request) urlpattern_names = ['non-existent-urlpattern'] urls_map = view_object.get_urls_map(request, urlpattern_names, None) self.assertEquals(urls_map, {urlpattern_names[0]: ''})
... self.assertIn(urlpattern_name, response.data) def test_non_existent_urlpattern_results_in_empty_string(self): request = self.factory.get() view_object = self.create_view_object(request) urlpattern_names = ['non-existent-urlpattern'] urls_map = view_object.get_urls_map(request, urlpattern_names, None) self.assertEquals(urls_map, {urlpattern_names[0]: ''}) ...
fa75cdb0114d86b626a77ea19897abd532fd4aeb
src/hack4lt/forms.py
src/hack4lt/forms.py
from django import forms from django.contrib.auth import authenticate from django.utils.translation import ugettext_lazy as _ from hack4lt.models import Hacker class RegistrationForm(forms.ModelForm): class Meta: model = Hacker fields = ('username', 'first_name', 'last_name', 'email', 'repository', 'website', 'stackoverflow_user', 'description') class LoginForm(forms.Form): username = forms.CharField(label=_('Username'), max_length=100) password = forms.CharField(label=_('Password'), max_length=128, widget=forms.PasswordInput(render_value=False)) def clean(self): cleaned_data = super(LoginForm, self).clean() if self.errors: return cleaned_data user = authenticate(**cleaned_data) if not user: raise forms.ValidationError(_('Username or password is incorrect')) cleaned_data['user'] = user return cleaned_data
from django import forms from django.contrib.auth import authenticate from django.utils.translation import ugettext_lazy as _ from django.forms.util import ErrorList from hack4lt.models import Hacker class RegistrationForm(forms.ModelForm): password = forms.CharField(label=_('Password'), max_length=128, min_length=6, widget=forms.PasswordInput(render_value=False)) password_repeat = forms.CharField(label=_('Repeat Password'), min_length=6, max_length=128, widget=forms.PasswordInput(render_value=False)) class Meta: model = Hacker fields = ('username', 'password', 'password_repeat', 'first_name', 'last_name', 'email', 'repository', 'website', 'stackoverflow_user', 'description') def is_valid(self): valid = super(RegistrationForm, self).is_valid() if not valid: return valid first_password = self.cleaned_data.get('password') repeat_password = self.cleaned_data.get('password_repeat') if first_password == repeat_password: return True errors = self._errors.setdefault('password', ErrorList()) errors.append(u'Passwords do not match') return False class LoginForm(forms.Form): username = forms.CharField(label=_('Username'), max_length=100) password = forms.CharField(label=_('Password'), max_length=128, widget=forms.PasswordInput(render_value=False)) def clean(self): cleaned_data = super(LoginForm, self).clean() if self.errors: return cleaned_data user = authenticate(**cleaned_data) if not user: raise forms.ValidationError(_('Username or password is incorrect')) cleaned_data['user'] = user return cleaned_data
Add password and password_repeat fields to registration form.
Add password and password_repeat fields to registration form.
Python
bsd-3-clause
niekas/Hack4LT
from django import forms from django.contrib.auth import authenticate from django.utils.translation import ugettext_lazy as _ + from django.forms.util import ErrorList from hack4lt.models import Hacker + class RegistrationForm(forms.ModelForm): + password = forms.CharField(label=_('Password'), max_length=128, min_length=6, + widget=forms.PasswordInput(render_value=False)) + password_repeat = forms.CharField(label=_('Repeat Password'), min_length=6, + max_length=128, widget=forms.PasswordInput(render_value=False)) - class RegistrationForm(forms.ModelForm): class Meta: model = Hacker - fields = ('username', 'first_name', 'last_name', 'email', 'repository', + fields = ('username', 'password', 'password_repeat', 'first_name', + 'last_name', 'email', 'repository', 'website', - 'website', 'stackoverflow_user', 'description') + 'stackoverflow_user', 'description') + + def is_valid(self): + valid = super(RegistrationForm, self).is_valid() + if not valid: + return valid + + first_password = self.cleaned_data.get('password') + repeat_password = self.cleaned_data.get('password_repeat') + + if first_password == repeat_password: + return True + errors = self._errors.setdefault('password', ErrorList()) + errors.append(u'Passwords do not match') + return False class LoginForm(forms.Form): username = forms.CharField(label=_('Username'), max_length=100) password = forms.CharField(label=_('Password'), max_length=128, widget=forms.PasswordInput(render_value=False)) def clean(self): cleaned_data = super(LoginForm, self).clean() if self.errors: return cleaned_data user = authenticate(**cleaned_data) if not user: raise forms.ValidationError(_('Username or password is incorrect')) cleaned_data['user'] = user return cleaned_data
Add password and password_repeat fields to registration form.
## Code Before: from django import forms from django.contrib.auth import authenticate from django.utils.translation import ugettext_lazy as _ from hack4lt.models import Hacker class RegistrationForm(forms.ModelForm): class Meta: model = Hacker fields = ('username', 'first_name', 'last_name', 'email', 'repository', 'website', 'stackoverflow_user', 'description') class LoginForm(forms.Form): username = forms.CharField(label=_('Username'), max_length=100) password = forms.CharField(label=_('Password'), max_length=128, widget=forms.PasswordInput(render_value=False)) def clean(self): cleaned_data = super(LoginForm, self).clean() if self.errors: return cleaned_data user = authenticate(**cleaned_data) if not user: raise forms.ValidationError(_('Username or password is incorrect')) cleaned_data['user'] = user return cleaned_data ## Instruction: Add password and password_repeat fields to registration form. ## Code After: from django import forms from django.contrib.auth import authenticate from django.utils.translation import ugettext_lazy as _ from django.forms.util import ErrorList from hack4lt.models import Hacker class RegistrationForm(forms.ModelForm): password = forms.CharField(label=_('Password'), max_length=128, min_length=6, widget=forms.PasswordInput(render_value=False)) password_repeat = forms.CharField(label=_('Repeat Password'), min_length=6, max_length=128, widget=forms.PasswordInput(render_value=False)) class Meta: model = Hacker fields = ('username', 'password', 'password_repeat', 'first_name', 'last_name', 'email', 'repository', 'website', 'stackoverflow_user', 'description') def is_valid(self): valid = super(RegistrationForm, self).is_valid() if not valid: return valid first_password = self.cleaned_data.get('password') repeat_password = self.cleaned_data.get('password_repeat') if first_password == repeat_password: return True errors = self._errors.setdefault('password', ErrorList()) errors.append(u'Passwords do not match') return False class LoginForm(forms.Form): username = forms.CharField(label=_('Username'), max_length=100) password = forms.CharField(label=_('Password'), max_length=128, widget=forms.PasswordInput(render_value=False)) def clean(self): cleaned_data = super(LoginForm, self).clean() if self.errors: return cleaned_data user = authenticate(**cleaned_data) if not user: raise forms.ValidationError(_('Username or password is incorrect')) cleaned_data['user'] = user return cleaned_data
// ... existing code ... from django.utils.translation import ugettext_lazy as _ from django.forms.util import ErrorList // ... modified code ... class RegistrationForm(forms.ModelForm): password = forms.CharField(label=_('Password'), max_length=128, min_length=6, widget=forms.PasswordInput(render_value=False)) password_repeat = forms.CharField(label=_('Repeat Password'), min_length=6, max_length=128, widget=forms.PasswordInput(render_value=False)) class Meta: ... model = Hacker fields = ('username', 'password', 'password_repeat', 'first_name', 'last_name', 'email', 'repository', 'website', 'stackoverflow_user', 'description') def is_valid(self): valid = super(RegistrationForm, self).is_valid() if not valid: return valid first_password = self.cleaned_data.get('password') repeat_password = self.cleaned_data.get('password_repeat') if first_password == repeat_password: return True errors = self._errors.setdefault('password', ErrorList()) errors.append(u'Passwords do not match') return False // ... rest of the code ...
6c4883d6e4e65c9d6618244d821ca44c59ca5d58
tests/test_prepare.py
tests/test_prepare.py
from asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = (await st.execute(1))[0] self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, (await st.execute(10))[0][0]) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): st = await self.con.prepare(''' SELECT CASE WHEN $1::text IS NULL THEN 'NULL' ELSE $1::text END''') self.assertEqual((await st.execute('aaa'))[0][0], 'aaa') self.assertEqual((await st.execute(None))[0][0], 'NULL')
from asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = (await st.execute(1))[0] self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, (await st.execute(10))[0][0]) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): cases = [ ('text', ("'NULL'", 'NULL'), [ 'aaa', None ]), ('decimal', ('0', 0), [ 123, 123.5, None ]) ] for type, (none_name, none_val), vals in cases: st = await self.con.prepare(''' SELECT CASE WHEN $1::{type} IS NULL THEN {default} ELSE $1::{type} END'''.format( type=type, default=none_name)) for val in vals: with self.subTest(type=type, value=val): res = (await st.execute(val))[0][0] if val is None: self.assertEqual(res, none_val) else: self.assertEqual(res, val)
Test that we handle None->NULL conversion for TEXT and BINARY
tests: Test that we handle None->NULL conversion for TEXT and BINARY
Python
apache-2.0
MagicStack/asyncpg,MagicStack/asyncpg
from asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = (await st.execute(1))[0] self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, (await st.execute(10))[0][0]) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): - st = await self.con.prepare(''' - SELECT CASE WHEN $1::text IS NULL THEN 'NULL' - ELSE $1::text END''') + cases = [ + ('text', ("'NULL'", 'NULL'), [ + 'aaa', + None + ]), - self.assertEqual((await st.execute('aaa'))[0][0], 'aaa') - self.assertEqual((await st.execute(None))[0][0], 'NULL') + ('decimal', ('0', 0), [ + 123, + 123.5, + None + ]) + ] + for type, (none_name, none_val), vals in cases: + st = await self.con.prepare(''' + SELECT CASE WHEN $1::{type} IS NULL THEN {default} + ELSE $1::{type} END'''.format( + type=type, default=none_name)) + + for val in vals: + with self.subTest(type=type, value=val): + res = (await st.execute(val))[0][0] + if val is None: + self.assertEqual(res, none_val) + else: + self.assertEqual(res, val) +
Test that we handle None->NULL conversion for TEXT and BINARY
## Code Before: from asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = (await st.execute(1))[0] self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, (await st.execute(10))[0][0]) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): st = await self.con.prepare(''' SELECT CASE WHEN $1::text IS NULL THEN 'NULL' ELSE $1::text END''') self.assertEqual((await st.execute('aaa'))[0][0], 'aaa') self.assertEqual((await st.execute(None))[0][0], 'NULL') ## Instruction: Test that we handle None->NULL conversion for TEXT and BINARY ## Code After: from asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = (await st.execute(1))[0] self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, (await st.execute(10))[0][0]) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): cases = [ ('text', ("'NULL'", 'NULL'), [ 'aaa', None ]), ('decimal', ('0', 0), [ 123, 123.5, None ]) ] for type, (none_name, none_val), vals in cases: st = await self.con.prepare(''' SELECT CASE WHEN $1::{type} IS NULL THEN {default} ELSE $1::{type} END'''.format( type=type, default=none_name)) for val in vals: with self.subTest(type=type, value=val): res = (await st.execute(val))[0][0] if val is None: self.assertEqual(res, none_val) else: self.assertEqual(res, val)
// ... existing code ... async def test_prepare_3(self): cases = [ ('text', ("'NULL'", 'NULL'), [ 'aaa', None ]), ('decimal', ('0', 0), [ 123, 123.5, None ]) ] for type, (none_name, none_val), vals in cases: st = await self.con.prepare(''' SELECT CASE WHEN $1::{type} IS NULL THEN {default} ELSE $1::{type} END'''.format( type=type, default=none_name)) for val in vals: with self.subTest(type=type, value=val): res = (await st.execute(val))[0][0] if val is None: self.assertEqual(res, none_val) else: self.assertEqual(res, val) // ... rest of the code ...
f7a8c4a293538c4cd592ba23860b873cb378f28f
pyaxiom/netcdf/dataset.py
pyaxiom/netcdf/dataset.py
from netCDF4 import Dataset class EnhancedDataset(Dataset): def __init__(self, *args, **kwargs): super(EnhancedDataset, self).__init__(*args, **kwargs) def get_variables_by_attributes(self, **kwargs): vs = [] has_value_flag = False for vname in self.variables: var = self.variables[vname] for k, v in kwargs.iteritems(): if hasattr(var, k) and getattr(var, k) == v: has_value_flag = True else: has_value_flag = False break if has_value_flag is True: vs.append(self.variables[vname]) return vs
from netCDF4 import Dataset class EnhancedDataset(Dataset): def __init__(self, *args, **kwargs): super(EnhancedDataset, self).__init__(*args, **kwargs) def get_variables_by_attributes(self, **kwargs): vs = [] has_value_flag = False for vname in self.variables: var = self.variables[vname] for k, v in kwargs.iteritems(): if hasattr(var, k) and getattr(var, k) == v: has_value_flag = True else: has_value_flag = False break if has_value_flag is True: vs.append(self.variables[vname]) return vs def close(self): try: self.sync() self.close() except RuntimeError: pass
Add a close method to EnhancedDataset that won't raise a RuntimeError
Add a close method to EnhancedDataset that won't raise a RuntimeError
Python
mit
axiom-data-science/pyaxiom,ocefpaf/pyaxiom,ocefpaf/pyaxiom,axiom-data-science/pyaxiom
from netCDF4 import Dataset class EnhancedDataset(Dataset): def __init__(self, *args, **kwargs): super(EnhancedDataset, self).__init__(*args, **kwargs) def get_variables_by_attributes(self, **kwargs): vs = [] has_value_flag = False for vname in self.variables: var = self.variables[vname] for k, v in kwargs.iteritems(): if hasattr(var, k) and getattr(var, k) == v: has_value_flag = True else: has_value_flag = False break if has_value_flag is True: vs.append(self.variables[vname]) return vs + def close(self): + try: + self.sync() + self.close() + except RuntimeError: + pass +
Add a close method to EnhancedDataset that won't raise a RuntimeError
## Code Before: from netCDF4 import Dataset class EnhancedDataset(Dataset): def __init__(self, *args, **kwargs): super(EnhancedDataset, self).__init__(*args, **kwargs) def get_variables_by_attributes(self, **kwargs): vs = [] has_value_flag = False for vname in self.variables: var = self.variables[vname] for k, v in kwargs.iteritems(): if hasattr(var, k) and getattr(var, k) == v: has_value_flag = True else: has_value_flag = False break if has_value_flag is True: vs.append(self.variables[vname]) return vs ## Instruction: Add a close method to EnhancedDataset that won't raise a RuntimeError ## Code After: from netCDF4 import Dataset class EnhancedDataset(Dataset): def __init__(self, *args, **kwargs): super(EnhancedDataset, self).__init__(*args, **kwargs) def get_variables_by_attributes(self, **kwargs): vs = [] has_value_flag = False for vname in self.variables: var = self.variables[vname] for k, v in kwargs.iteritems(): if hasattr(var, k) and getattr(var, k) == v: has_value_flag = True else: has_value_flag = False break if has_value_flag is True: vs.append(self.variables[vname]) return vs def close(self): try: self.sync() self.close() except RuntimeError: pass
// ... existing code ... return vs def close(self): try: self.sync() self.close() except RuntimeError: pass // ... rest of the code ...
6bff4763f486f10e43890191244b33d5b609bfdd
flashcards/commands/sets.py
flashcards/commands/sets.py
import os import click from flashcards import sets from flashcards import storage @click.group('sets') def sets_group(): """Command related to the StudySet object """ pass @click.command('new') @click.option('--title', prompt='Title of the study set') @click.option('--desc', prompt='Description for the study set (optional)') def new(title, desc): """ Create a new study set. User supplies a title and a description. If this study set does not exist, it is created. """ study_set = sets.StudySet(title, desc) filepath = storage.create_studyset_file(study_set) # automatically select this studyset storage.link_selected_studyset(filepath) click.echo('Study set created !') @click.command('select') @click.argument('studyset') def select(studyset): studyset_path = os.path.join(storage.studyset_storage_path(), studyset) storage.link_selected_studyset(studyset_path) studyset_obj = storage.load_studyset(studyset_path).load() click.echo('Selected studyset: %s' % studyset_obj.title) click.echo('Next created cards will be automatically added ' 'to this studyset.') sets_group.add_command(new) sets_group.add_command(select)
import os import click from flashcards import sets from flashcards import storage @click.group('sets') def sets_group(): """Command related to the StudySet object """ pass @click.command('new') @click.option('--title', prompt='Title of the study set') @click.option('--desc', prompt='Description for the study set (optional)') def new(title, desc): """ Create a new study set. User supplies a title and a description. If this study set does not exist, it is created. """ study_set = sets.StudySet(title, desc) filepath = storage.create_studyset_file(study_set) # automatically select this studyset storage.link_selected_studyset(filepath) click.echo('Study set created !') @click.command('select') @click.argument('studyset') def select(studyset): """ Select a studyset. Focus on a studyset, every new added cards are going to be put directly in this studyset. """ studyset_path = os.path.join(storage.studyset_storage_path(), studyset) storage.link_selected_studyset(studyset_path) studyset_obj = storage.load_studyset(studyset_path).load() click.echo('Selected studyset: %s' % studyset_obj.title) click.echo('Next created cards will be automatically added ' 'to this studyset.') sets_group.add_command(new) sets_group.add_command(select)
Add docstring to select command.
Add docstring to select command.
Python
mit
zergov/flashcards,zergov/flashcards
import os import click from flashcards import sets from flashcards import storage @click.group('sets') def sets_group(): """Command related to the StudySet object """ pass @click.command('new') @click.option('--title', prompt='Title of the study set') @click.option('--desc', prompt='Description for the study set (optional)') def new(title, desc): """ Create a new study set. User supplies a title and a description. If this study set does not exist, it is created. """ study_set = sets.StudySet(title, desc) filepath = storage.create_studyset_file(study_set) # automatically select this studyset storage.link_selected_studyset(filepath) click.echo('Study set created !') @click.command('select') @click.argument('studyset') def select(studyset): + """ + Select a studyset. + + Focus on a studyset, every new added cards are going to be put directly in + this studyset. + """ studyset_path = os.path.join(storage.studyset_storage_path(), studyset) storage.link_selected_studyset(studyset_path) studyset_obj = storage.load_studyset(studyset_path).load() click.echo('Selected studyset: %s' % studyset_obj.title) click.echo('Next created cards will be automatically added ' 'to this studyset.') sets_group.add_command(new) sets_group.add_command(select)
Add docstring to select command.
## Code Before: import os import click from flashcards import sets from flashcards import storage @click.group('sets') def sets_group(): """Command related to the StudySet object """ pass @click.command('new') @click.option('--title', prompt='Title of the study set') @click.option('--desc', prompt='Description for the study set (optional)') def new(title, desc): """ Create a new study set. User supplies a title and a description. If this study set does not exist, it is created. """ study_set = sets.StudySet(title, desc) filepath = storage.create_studyset_file(study_set) # automatically select this studyset storage.link_selected_studyset(filepath) click.echo('Study set created !') @click.command('select') @click.argument('studyset') def select(studyset): studyset_path = os.path.join(storage.studyset_storage_path(), studyset) storage.link_selected_studyset(studyset_path) studyset_obj = storage.load_studyset(studyset_path).load() click.echo('Selected studyset: %s' % studyset_obj.title) click.echo('Next created cards will be automatically added ' 'to this studyset.') sets_group.add_command(new) sets_group.add_command(select) ## Instruction: Add docstring to select command. ## Code After: import os import click from flashcards import sets from flashcards import storage @click.group('sets') def sets_group(): """Command related to the StudySet object """ pass @click.command('new') @click.option('--title', prompt='Title of the study set') @click.option('--desc', prompt='Description for the study set (optional)') def new(title, desc): """ Create a new study set. User supplies a title and a description. If this study set does not exist, it is created. """ study_set = sets.StudySet(title, desc) filepath = storage.create_studyset_file(study_set) # automatically select this studyset storage.link_selected_studyset(filepath) click.echo('Study set created !') @click.command('select') @click.argument('studyset') def select(studyset): """ Select a studyset. Focus on a studyset, every new added cards are going to be put directly in this studyset. """ studyset_path = os.path.join(storage.studyset_storage_path(), studyset) storage.link_selected_studyset(studyset_path) studyset_obj = storage.load_studyset(studyset_path).load() click.echo('Selected studyset: %s' % studyset_obj.title) click.echo('Next created cards will be automatically added ' 'to this studyset.') sets_group.add_command(new) sets_group.add_command(select)
// ... existing code ... def select(studyset): """ Select a studyset. Focus on a studyset, every new added cards are going to be put directly in this studyset. """ studyset_path = os.path.join(storage.studyset_storage_path(), studyset) // ... rest of the code ...
0a4aceb87eae57188c5f61bb93d78d5cc9f1779f
lava_scheduler_app/templatetags/utils.py
lava_scheduler_app/templatetags/utils.py
from django import template from django.utils.safestring import mark_safe from lava_scheduler_app.models import TestJob register = template.Library() @register.filter def get_priority_select(current): select = "" val = TestJob.PRIORITY_CHOICES for priority, label in val: check = " checked" if priority == current else "" default = " [default]" if current != 50 and priority == 50 else "" select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\ (label.lower(), priority, check, label, default) return mark_safe(select)
from django import template from django.utils.safestring import mark_safe from lava_scheduler_app.models import TestJob register = template.Library() @register.filter def get_priority_select(current): select = "" val = TestJob.PRIORITY_CHOICES for priority, label in val: check = " checked" if priority == current else "" default = " [default]" if current != 50 and priority == 50 else "" select += '<label class="checkbox-inline">' select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\ (label.lower(), priority, check, label, default) select += '</label>' return mark_safe(select)
Use inline radio buttons for priority changes.
Use inline radio buttons for priority changes. Change-Id: Ifb9a685bca654c5139aef3ca78e800b66ce77eb9
Python
agpl-3.0
Linaro/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server
from django import template from django.utils.safestring import mark_safe from lava_scheduler_app.models import TestJob register = template.Library() @register.filter def get_priority_select(current): select = "" val = TestJob.PRIORITY_CHOICES for priority, label in val: check = " checked" if priority == current else "" default = " [default]" if current != 50 and priority == 50 else "" + select += '<label class="checkbox-inline">' select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\ (label.lower(), priority, check, label, default) + select += '</label>' return mark_safe(select)
Use inline radio buttons for priority changes.
## Code Before: from django import template from django.utils.safestring import mark_safe from lava_scheduler_app.models import TestJob register = template.Library() @register.filter def get_priority_select(current): select = "" val = TestJob.PRIORITY_CHOICES for priority, label in val: check = " checked" if priority == current else "" default = " [default]" if current != 50 and priority == 50 else "" select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\ (label.lower(), priority, check, label, default) return mark_safe(select) ## Instruction: Use inline radio buttons for priority changes. ## Code After: from django import template from django.utils.safestring import mark_safe from lava_scheduler_app.models import TestJob register = template.Library() @register.filter def get_priority_select(current): select = "" val = TestJob.PRIORITY_CHOICES for priority, label in val: check = " checked" if priority == current else "" default = " [default]" if current != 50 and priority == 50 else "" select += '<label class="checkbox-inline">' select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\ (label.lower(), priority, check, label, default) select += '</label>' return mark_safe(select)
# ... existing code ... default = " [default]" if current != 50 and priority == 50 else "" select += '<label class="checkbox-inline">' select += '<input type="radio" name="priority" style="..." id="%s" value="%d"%s>%s%s</input><br/>' %\ # ... modified code ... (label.lower(), priority, check, label, default) select += '</label>' return mark_safe(select) # ... rest of the code ...
611c34eee4b5aa263669f1b7321b97fab9a98b5e
dask/distributed/tests/test_ipython_utils.py
dask/distributed/tests/test_ipython_utils.py
from dask.distributed import dask_client_from_ipclient def test_dask_client_from_ipclient(): from IPython.parallel import Client c = Client() dc = dask_client_from_ipclient(c) assert 2 == dc.get({'a': 1, 'b': (lambda x: x + 1, 'a')}, 'b') dc.close(close_workers=True, close_scheduler=True)
from dask.distributed import dask_client_from_ipclient import numpy as np from numpy.testing import assert_array_almost_equal import dask.array as da def test_dask_client_from_ipclient(): from IPython.parallel import Client c = Client() dask_client = dask_client_from_ipclient(c) # data a = np.arange(100).reshape(10, 10) d = da.from_array(a, ((5, 5), (5, 5))) # test array.mean expected = a.mean(axis=0) d1 = d.mean(axis=0) result = d1.compute(get=dask_client.get) assert_array_almost_equal(result, expected) # test ghosting d2 = da.ghost.ghost(d, depth=1, boundary='reflect') d3 = da.ghost.trim_internal(d2, {0: 1, 1: 1}) result1 = d3.compute(get=dask_client.get) assert_array_almost_equal(result1, a) # close the workers dask_client.close(close_workers=True, close_scheduler=True)
Remove lambda test. Add dask array tests.
Remove lambda test. Add dask array tests.
Python
bsd-3-clause
PhE/dask,clarkfitzg/dask,jayhetee/dask,simudream/dask,mikegraham/dask,vikhyat/dask,PhE/dask,wiso/dask,jcrist/dask,esc/dask,mraspaud/dask,esc/dask,marianotepper/dask,vikhyat/dask,pombredanne/dask,simudream/dask,freeman-lab/dask,cpcloud/dask,blaze/dask,marianotepper/dask,jcrist/dask,hainm/dask,ContinuumIO/dask,blaze/dask,wiso/dask,mraspaud/dask,chrisbarber/dask,cowlicks/dask,gameduell/dask,freeman-lab/dask,jayhetee/dask,jakirkham/dask,jakirkham/dask,mrocklin/dask,mrocklin/dask,dask/dask,pombredanne/dask,hainm/dask,clarkfitzg/dask,ContinuumIO/dask,ssanderson/dask,dask/dask,ssanderson/dask
from dask.distributed import dask_client_from_ipclient + import numpy as np + from numpy.testing import assert_array_almost_equal + import dask.array as da + def test_dask_client_from_ipclient(): from IPython.parallel import Client c = Client() - dc = dask_client_from_ipclient(c) + dask_client = dask_client_from_ipclient(c) - assert 2 == dc.get({'a': 1, 'b': (lambda x: x + 1, 'a')}, 'b') - dc.close(close_workers=True, close_scheduler=True) + # data + a = np.arange(100).reshape(10, 10) + d = da.from_array(a, ((5, 5), (5, 5))) + + # test array.mean + expected = a.mean(axis=0) + d1 = d.mean(axis=0) + result = d1.compute(get=dask_client.get) + assert_array_almost_equal(result, expected) + + # test ghosting + d2 = da.ghost.ghost(d, depth=1, boundary='reflect') + d3 = da.ghost.trim_internal(d2, {0: 1, 1: 1}) + result1 = d3.compute(get=dask_client.get) + assert_array_almost_equal(result1, a) + + # close the workers + dask_client.close(close_workers=True, close_scheduler=True) +
Remove lambda test. Add dask array tests.
## Code Before: from dask.distributed import dask_client_from_ipclient def test_dask_client_from_ipclient(): from IPython.parallel import Client c = Client() dc = dask_client_from_ipclient(c) assert 2 == dc.get({'a': 1, 'b': (lambda x: x + 1, 'a')}, 'b') dc.close(close_workers=True, close_scheduler=True) ## Instruction: Remove lambda test. Add dask array tests. ## Code After: from dask.distributed import dask_client_from_ipclient import numpy as np from numpy.testing import assert_array_almost_equal import dask.array as da def test_dask_client_from_ipclient(): from IPython.parallel import Client c = Client() dask_client = dask_client_from_ipclient(c) # data a = np.arange(100).reshape(10, 10) d = da.from_array(a, ((5, 5), (5, 5))) # test array.mean expected = a.mean(axis=0) d1 = d.mean(axis=0) result = d1.compute(get=dask_client.get) assert_array_almost_equal(result, expected) # test ghosting d2 = da.ghost.ghost(d, depth=1, boundary='reflect') d3 = da.ghost.trim_internal(d2, {0: 1, 1: 1}) result1 = d3.compute(get=dask_client.get) assert_array_almost_equal(result1, a) # close the workers dask_client.close(close_workers=True, close_scheduler=True)
... from dask.distributed import dask_client_from_ipclient import numpy as np from numpy.testing import assert_array_almost_equal import dask.array as da ... c = Client() dask_client = dask_client_from_ipclient(c) # data a = np.arange(100).reshape(10, 10) d = da.from_array(a, ((5, 5), (5, 5))) # test array.mean expected = a.mean(axis=0) d1 = d.mean(axis=0) result = d1.compute(get=dask_client.get) assert_array_almost_equal(result, expected) # test ghosting d2 = da.ghost.ghost(d, depth=1, boundary='reflect') d3 = da.ghost.trim_internal(d2, {0: 1, 1: 1}) result1 = d3.compute(get=dask_client.get) assert_array_almost_equal(result1, a) # close the workers dask_client.close(close_workers=True, close_scheduler=True) ...
81246153033d38132903759cb7e33cf86c26a548
tests/test_attime.py
tests/test_attime.py
import datetime import time from graphite_api.render.attime import parseATTime from . import TestCase class AtTestCase(TestCase): def test_parse(self): for value in [ str(int(time.time())), '20140319', '20130319+1y', '20130319+1mon', '20130319+1w', '12:12_20130319', '3:05am_20130319', '3:05pm_20130319', 'noon20130319', 'midnight20130319', 'teatime20130319', 'yesterday', 'tomorrow', '03/19/2014', '03/19/1800', '03/19/1950', 'feb 27', 'mar 5', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun', ]: self.assertIsInstance(parseATTime(value), datetime.datetime) for value in [ '20130319+1foo', 'mar', 'wat', ]: with self.assertRaises(Exception): parseATTime(value)
import datetime import time from graphite_api.render.attime import parseATTime from . import TestCase class AtTestCase(TestCase): def test_parse(self): for value in [ str(int(time.time())), '20140319', '20130319+1y', '20130319+1mon', '20130319+1w', '12:12_20130319', '3:05am_20130319', '3:05pm_20130319', 'noon20130319', 'midnight20130319', 'teatime20130319', 'yesterday', 'tomorrow', '03/19/2014', '03/19/1800', '03/19/1950', 'feb 27', 'mar 5', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun', '10:00', ]: self.assertIsInstance(parseATTime(value), datetime.datetime) for value in [ '20130319+1foo', 'mar', 'wat', ]: with self.assertRaises(Exception): parseATTime(value)
Make sure HH:MM values are allowed
Make sure HH:MM values are allowed
Python
apache-2.0
michaelrice/graphite-api,alphapigger/graphite-api,Knewton/graphite-api,vladimir-smirnov-sociomantic/graphite-api,hubrick/graphite-api,GeorgeJahad/graphite-api,absalon-james/graphite-api,raintank/graphite-api,winguru/graphite-api,DaveBlooman/graphite-api,absalon-james/graphite-api,alphapigger/graphite-api,raintank/graphite-api,raintank/graphite-api,rackerlabs/graphite-api,michaelrice/graphite-api,DaveBlooman/graphite-api,Knewton/graphite-api,bogus-py/graphite-api,cybem/graphite-api-iow,vladimir-smirnov-sociomantic/graphite-api,GeorgeJahad/graphite-api,brutasse/graphite-api,tpeng/graphite-api,winguru/graphite-api,cybem/graphite-api-iow,rackerlabs/graphite-api,brutasse/graphite-api,hubrick/graphite-api,bogus-py/graphite-api,tpeng/graphite-api
import datetime import time from graphite_api.render.attime import parseATTime from . import TestCase class AtTestCase(TestCase): def test_parse(self): for value in [ str(int(time.time())), '20140319', '20130319+1y', '20130319+1mon', '20130319+1w', '12:12_20130319', '3:05am_20130319', '3:05pm_20130319', 'noon20130319', 'midnight20130319', 'teatime20130319', 'yesterday', 'tomorrow', '03/19/2014', '03/19/1800', '03/19/1950', 'feb 27', 'mar 5', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun', + '10:00', ]: self.assertIsInstance(parseATTime(value), datetime.datetime) for value in [ '20130319+1foo', 'mar', 'wat', ]: with self.assertRaises(Exception): parseATTime(value)
Make sure HH:MM values are allowed
## Code Before: import datetime import time from graphite_api.render.attime import parseATTime from . import TestCase class AtTestCase(TestCase): def test_parse(self): for value in [ str(int(time.time())), '20140319', '20130319+1y', '20130319+1mon', '20130319+1w', '12:12_20130319', '3:05am_20130319', '3:05pm_20130319', 'noon20130319', 'midnight20130319', 'teatime20130319', 'yesterday', 'tomorrow', '03/19/2014', '03/19/1800', '03/19/1950', 'feb 27', 'mar 5', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun', ]: self.assertIsInstance(parseATTime(value), datetime.datetime) for value in [ '20130319+1foo', 'mar', 'wat', ]: with self.assertRaises(Exception): parseATTime(value) ## Instruction: Make sure HH:MM values are allowed ## Code After: import datetime import time from graphite_api.render.attime import parseATTime from . import TestCase class AtTestCase(TestCase): def test_parse(self): for value in [ str(int(time.time())), '20140319', '20130319+1y', '20130319+1mon', '20130319+1w', '12:12_20130319', '3:05am_20130319', '3:05pm_20130319', 'noon20130319', 'midnight20130319', 'teatime20130319', 'yesterday', 'tomorrow', '03/19/2014', '03/19/1800', '03/19/1950', 'feb 27', 'mar 5', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun', '10:00', ]: self.assertIsInstance(parseATTime(value), datetime.datetime) for value in [ '20130319+1foo', 'mar', 'wat', ]: with self.assertRaises(Exception): parseATTime(value)
# ... existing code ... 'sun', '10:00', ]: # ... rest of the code ...
232c0a600946e2a679947fe638938e56d2fa7709
vint/ast/parsing.py
vint/ast/parsing.py
import extlib.vimlparser class Parser(object): def __init__(self, plugins=None): """ Initialize Parser with the specified plugins. The plugins can add attributes to the AST. """ self.plugins = plugins or [] def parse(self, string): """ Parse vim script string and return the AST. """ lines = string.split('\n') reader = extlib.vimlparser.StringReader(lines) parser = extlib.vimlparser.VimLParser() ast = parser.parse(reader) for plugin in self.plugins: plugin.process(ast) return ast def parse_file(self, file_path): """ Parse vim script file and return the AST. """ with file_path.open() as f: return self.parse(f.read())
import extlib.vimlparser class Parser(object): def __init__(self, plugins=None): """ Initialize Parser with the specified plugins. The plugins can add attributes to the AST. """ self.plugins = plugins or [] def parse(self, string): """ Parse vim script string and return the AST. """ lines = string.split('\n') reader = extlib.vimlparser.StringReader(lines) parser = extlib.vimlparser.VimLParser() ast = parser.parse(reader) # TOPLEVEL does not have a pos, but we need pos for all nodes ast['pos'] = {'col': 1, 'i': 0, 'lnum': 1} for plugin in self.plugins: plugin.process(ast) return ast def parse_file(self, file_path): """ Parse vim script file and return the AST. """ with file_path.open() as f: return self.parse(f.read())
Add TOPLEVEL pos to unify node pos interface
Add TOPLEVEL pos to unify node pos interface
Python
mit
RianFuro/vint,Kuniwak/vint,RianFuro/vint,Kuniwak/vint
import extlib.vimlparser class Parser(object): def __init__(self, plugins=None): """ Initialize Parser with the specified plugins. The plugins can add attributes to the AST. """ self.plugins = plugins or [] def parse(self, string): """ Parse vim script string and return the AST. """ lines = string.split('\n') reader = extlib.vimlparser.StringReader(lines) parser = extlib.vimlparser.VimLParser() ast = parser.parse(reader) + # TOPLEVEL does not have a pos, but we need pos for all nodes + ast['pos'] = {'col': 1, 'i': 0, 'lnum': 1} + for plugin in self.plugins: plugin.process(ast) return ast def parse_file(self, file_path): """ Parse vim script file and return the AST. """ with file_path.open() as f: return self.parse(f.read())
Add TOPLEVEL pos to unify node pos interface
## Code Before: import extlib.vimlparser class Parser(object): def __init__(self, plugins=None): """ Initialize Parser with the specified plugins. The plugins can add attributes to the AST. """ self.plugins = plugins or [] def parse(self, string): """ Parse vim script string and return the AST. """ lines = string.split('\n') reader = extlib.vimlparser.StringReader(lines) parser = extlib.vimlparser.VimLParser() ast = parser.parse(reader) for plugin in self.plugins: plugin.process(ast) return ast def parse_file(self, file_path): """ Parse vim script file and return the AST. """ with file_path.open() as f: return self.parse(f.read()) ## Instruction: Add TOPLEVEL pos to unify node pos interface ## Code After: import extlib.vimlparser class Parser(object): def __init__(self, plugins=None): """ Initialize Parser with the specified plugins. The plugins can add attributes to the AST. """ self.plugins = plugins or [] def parse(self, string): """ Parse vim script string and return the AST. """ lines = string.split('\n') reader = extlib.vimlparser.StringReader(lines) parser = extlib.vimlparser.VimLParser() ast = parser.parse(reader) # TOPLEVEL does not have a pos, but we need pos for all nodes ast['pos'] = {'col': 1, 'i': 0, 'lnum': 1} for plugin in self.plugins: plugin.process(ast) return ast def parse_file(self, file_path): """ Parse vim script file and return the AST. """ with file_path.open() as f: return self.parse(f.read())
// ... existing code ... # TOPLEVEL does not have a pos, but we need pos for all nodes ast['pos'] = {'col': 1, 'i': 0, 'lnum': 1} for plugin in self.plugins: // ... rest of the code ...
2ac4bca0db8609bc92c9de8b1c272b2a607f6c15
tests/resource_tests.py
tests/resource_tests.py
"""Tests for the :mod:`retdec.resource` module.""" import unittest from unittest import mock from retdec.conn import APIConnection from retdec.resource import Resource class ResourceTests(unittest.TestCase): """Tests for :class:`retdec.resource.Resource`.""" def test_id_returns_passed_id(self): r = Resource('ID', mock.Mock(spec_set=APIConnection)) self.assertEqual(r.id, 'ID') def test_wait_until_finished_returns_when_resource_is_finished(self): conn_mock = mock.Mock(spec_set=APIConnection) conn_mock.send_get_request.return_value = {'finished': True} r = Resource('ID', conn_mock) r.wait_until_finished() conn_mock.send_get_request.assert_called_once_with('/ID/status')
"""Tests for the :mod:`retdec.resource` module.""" import unittest from unittest import mock from retdec.conn import APIConnection from retdec.resource import Resource class ResourceTests(unittest.TestCase): """Tests for :class:`retdec.resource.Resource`.""" def test_id_returns_passed_id(self): r = Resource('ID', mock.Mock(spec_set=APIConnection)) self.assertEqual(r.id, 'ID') class ResourceWaitUntilFinishedTests(unittest.TestCase): """Tests for :func:`retdec.resource.Resource.wait_until_finished()`.""" def test_returns_when_resource_is_finished(self): conn_mock = mock.Mock(spec_set=APIConnection) conn_mock.send_get_request.return_value = {'finished': True} r = Resource('ID', conn_mock) r.wait_until_finished() conn_mock.send_get_request.assert_called_once_with('/ID/status')
Move tests for Resource.wait_until_finished() into a separate class.
Move tests for Resource.wait_until_finished() into a separate class.
Python
mit
s3rvac/retdec-python
"""Tests for the :mod:`retdec.resource` module.""" import unittest from unittest import mock from retdec.conn import APIConnection from retdec.resource import Resource class ResourceTests(unittest.TestCase): """Tests for :class:`retdec.resource.Resource`.""" def test_id_returns_passed_id(self): r = Resource('ID', mock.Mock(spec_set=APIConnection)) self.assertEqual(r.id, 'ID') + + class ResourceWaitUntilFinishedTests(unittest.TestCase): + """Tests for :func:`retdec.resource.Resource.wait_until_finished()`.""" + - def test_wait_until_finished_returns_when_resource_is_finished(self): + def test_returns_when_resource_is_finished(self): conn_mock = mock.Mock(spec_set=APIConnection) conn_mock.send_get_request.return_value = {'finished': True} r = Resource('ID', conn_mock) r.wait_until_finished() conn_mock.send_get_request.assert_called_once_with('/ID/status')
Move tests for Resource.wait_until_finished() into a separate class.
## Code Before: """Tests for the :mod:`retdec.resource` module.""" import unittest from unittest import mock from retdec.conn import APIConnection from retdec.resource import Resource class ResourceTests(unittest.TestCase): """Tests for :class:`retdec.resource.Resource`.""" def test_id_returns_passed_id(self): r = Resource('ID', mock.Mock(spec_set=APIConnection)) self.assertEqual(r.id, 'ID') def test_wait_until_finished_returns_when_resource_is_finished(self): conn_mock = mock.Mock(spec_set=APIConnection) conn_mock.send_get_request.return_value = {'finished': True} r = Resource('ID', conn_mock) r.wait_until_finished() conn_mock.send_get_request.assert_called_once_with('/ID/status') ## Instruction: Move tests for Resource.wait_until_finished() into a separate class. ## Code After: """Tests for the :mod:`retdec.resource` module.""" import unittest from unittest import mock from retdec.conn import APIConnection from retdec.resource import Resource class ResourceTests(unittest.TestCase): """Tests for :class:`retdec.resource.Resource`.""" def test_id_returns_passed_id(self): r = Resource('ID', mock.Mock(spec_set=APIConnection)) self.assertEqual(r.id, 'ID') class ResourceWaitUntilFinishedTests(unittest.TestCase): """Tests for :func:`retdec.resource.Resource.wait_until_finished()`.""" def test_returns_when_resource_is_finished(self): conn_mock = mock.Mock(spec_set=APIConnection) conn_mock.send_get_request.return_value = {'finished': True} r = Resource('ID', conn_mock) r.wait_until_finished() conn_mock.send_get_request.assert_called_once_with('/ID/status')
... class ResourceWaitUntilFinishedTests(unittest.TestCase): """Tests for :func:`retdec.resource.Resource.wait_until_finished()`.""" def test_returns_when_resource_is_finished(self): conn_mock = mock.Mock(spec_set=APIConnection) ...
a4f09620d8939aa8141b39972fb49d82f5380875
src/build/console.py
src/build/console.py
import time import datetime start_time = 0 def start_timer(): global start_time start_time = int(round(time.time()*1000)) def log(operation=None, message=None, timestamp=True): current_time = int(round(time.time()*1000)) d = datetime.timedelta(milliseconds=current_time-start_time) m = d.seconds // 60 s = d.seconds - (m * 60) ms = d.microseconds//10000 timestamp = "{:02}:{:02}.{:02}".format(m, s, ms) if operation: print("{} {:^15s} {}".format(timestamp, operation, message)) else: print("{} {}".format(timestamp, message))
import time import datetime start_time = 0 def start_timer(): global start_time start_time = int(round(time.time()*1000)) def log(operation=None, message=None, timestamp=True): current_time = int(round(time.time() * 1000)) d = datetime.timedelta(milliseconds=current_time-start_time) m = d.seconds // 60 s = d.seconds - (m * 60) ms = d.microseconds // 10000 timestamp = "{:2}:{:02}.{:02}".format(m, s, ms) if operation: print("\033[34m{}\033[0m {:^15s} {}".format(timestamp, operation, message)) else: print("\033[34m{}\033[0m {}".format(timestamp, message))
Add colored time in output
Add colored time in output
Python
mpl-2.0
seleznev/firefox-complete-theme-build-system
import time import datetime start_time = 0 def start_timer(): global start_time start_time = int(round(time.time()*1000)) def log(operation=None, message=None, timestamp=True): - current_time = int(round(time.time()*1000)) + current_time = int(round(time.time() * 1000)) d = datetime.timedelta(milliseconds=current_time-start_time) m = d.seconds // 60 s = d.seconds - (m * 60) - ms = d.microseconds//10000 + ms = d.microseconds // 10000 - timestamp = "{:02}:{:02}.{:02}".format(m, s, ms) + timestamp = "{:2}:{:02}.{:02}".format(m, s, ms) if operation: - print("{} {:^15s} {}".format(timestamp, operation, message)) + print("\033[34m{}\033[0m {:^15s} {}".format(timestamp, operation, message)) else: - print("{} {}".format(timestamp, message)) + print("\033[34m{}\033[0m {}".format(timestamp, message))
Add colored time in output
## Code Before: import time import datetime start_time = 0 def start_timer(): global start_time start_time = int(round(time.time()*1000)) def log(operation=None, message=None, timestamp=True): current_time = int(round(time.time()*1000)) d = datetime.timedelta(milliseconds=current_time-start_time) m = d.seconds // 60 s = d.seconds - (m * 60) ms = d.microseconds//10000 timestamp = "{:02}:{:02}.{:02}".format(m, s, ms) if operation: print("{} {:^15s} {}".format(timestamp, operation, message)) else: print("{} {}".format(timestamp, message)) ## Instruction: Add colored time in output ## Code After: import time import datetime start_time = 0 def start_timer(): global start_time start_time = int(round(time.time()*1000)) def log(operation=None, message=None, timestamp=True): current_time = int(round(time.time() * 1000)) d = datetime.timedelta(milliseconds=current_time-start_time) m = d.seconds // 60 s = d.seconds - (m * 60) ms = d.microseconds // 10000 timestamp = "{:2}:{:02}.{:02}".format(m, s, ms) if operation: print("\033[34m{}\033[0m {:^15s} {}".format(timestamp, operation, message)) else: print("\033[34m{}\033[0m {}".format(timestamp, message))
# ... existing code ... def log(operation=None, message=None, timestamp=True): current_time = int(round(time.time() * 1000)) d = datetime.timedelta(milliseconds=current_time-start_time) # ... modified code ... s = d.seconds - (m * 60) ms = d.microseconds // 10000 timestamp = "{:2}:{:02}.{:02}".format(m, s, ms) if operation: print("\033[34m{}\033[0m {:^15s} {}".format(timestamp, operation, message)) else: print("\033[34m{}\033[0m {}".format(timestamp, message)) # ... rest of the code ...
bf6d4c4622b9a0161fad3b03422747fb16faf5de
setup.py
setup.py
from distutils.core import setup setup( name='BitstampClient', version='0.1', packages=['bitstamp'], url='', license='MIT', author='Kamil Madac', author_email='[email protected]', description='Bitstamp API python implementation', requires=['requests'] )
from distutils.core import setup setup( name='bitstamp-python-client', version='0.1', packages=['bitstamp'], url='', license='MIT', author='Kamil Madac', author_email='[email protected]', description='Bitstamp API python implementation', requires=['requests'] )
Rename because of clash with original package.
Rename because of clash with original package.
Python
mit
nederhoed/bitstamp-python-client
from distutils.core import setup setup( - name='BitstampClient', + name='bitstamp-python-client', version='0.1', packages=['bitstamp'], url='', license='MIT', author='Kamil Madac', author_email='[email protected]', description='Bitstamp API python implementation', requires=['requests'] )
Rename because of clash with original package.
## Code Before: from distutils.core import setup setup( name='BitstampClient', version='0.1', packages=['bitstamp'], url='', license='MIT', author='Kamil Madac', author_email='[email protected]', description='Bitstamp API python implementation', requires=['requests'] ) ## Instruction: Rename because of clash with original package. ## Code After: from distutils.core import setup setup( name='bitstamp-python-client', version='0.1', packages=['bitstamp'], url='', license='MIT', author='Kamil Madac', author_email='[email protected]', description='Bitstamp API python implementation', requires=['requests'] )
// ... existing code ... setup( name='bitstamp-python-client', version='0.1', // ... rest of the code ...
0cb0fee339883adeb93f787b5cc19e5293463c06
skimage/_shared/utils.py
skimage/_shared/utils.py
import warnings import functools __all__ = ['deprecated'] class deprecated(object): '''Decorator to mark deprecated functions with warning. Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>. Parameters ---------- alt_func : str If given, tell user what function to use instead. behavior : {'warn', 'raise'} Behavior during call to deprecated function: 'warn' = warn user that function is deprecated; 'raise' = raise error. ''' def __init__(self, alt_func=None, behavior='warn'): self.alt_func = alt_func self.behavior = behavior def __call__(self, func): msg = 'Call to deprecated function `%s`.' % func.__name__ if self.alt_func is not None: msg += ' Use `%s` instead.' % self.alt_func @functools.wraps(func) def wrapped(*args, **kwargs): if self.behavior == 'warn': warnings.warn_explicit(msg, category=DeprecationWarning, filename=func.func_code.co_filename, lineno=func.func_code.co_firstlineno + 1) elif self.behavior == 'raise': raise DeprecationWarning(msg) return func(*args, **kwargs) # modify doc string to display deprecation warning doc = 'Deprecated function.' if self.alt_func is not None: doc += ' Use `%s` instead.' % self.alt_func if wrapped.__doc__ is None: wrapped.__doc__ = doc else: wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__ return wrapped
import warnings import functools __all__ = ['deprecated'] class deprecated(object): '''Decorator to mark deprecated functions with warning. Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>. Parameters ---------- alt_func : str If given, tell user what function to use instead. behavior : {'warn', 'raise'} Behavior during call to deprecated function: 'warn' = warn user that function is deprecated; 'raise' = raise error. ''' def __init__(self, alt_func=None, behavior='warn'): self.alt_func = alt_func self.behavior = behavior def __call__(self, func): alt_msg = '' if self.alt_func is not None: alt_msg = ' Use `%s` instead.' % self.alt_func msg = 'Call to deprecated function `%s`.' % func.__name__ msg += alt_msg @functools.wraps(func) def wrapped(*args, **kwargs): if self.behavior == 'warn': warnings.warn_explicit(msg, category=DeprecationWarning, filename=func.func_code.co_filename, lineno=func.func_code.co_firstlineno + 1) elif self.behavior == 'raise': raise DeprecationWarning(msg) return func(*args, **kwargs) # modify doc string to display deprecation warning doc = 'Deprecated function.' + alt_msg if wrapped.__doc__ is None: wrapped.__doc__ = doc else: wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__ return wrapped
Remove duplicate code for alternative function
Remove duplicate code for alternative function
Python
bsd-3-clause
michaelpacer/scikit-image,Midafi/scikit-image,jwiggins/scikit-image,Midafi/scikit-image,michaelaye/scikit-image,Hiyorimi/scikit-image,paalge/scikit-image,SamHames/scikit-image,GaZ3ll3/scikit-image,juliusbierk/scikit-image,WarrenWeckesser/scikits-image,ajaybhat/scikit-image,oew1v07/scikit-image,rjeli/scikit-image,Britefury/scikit-image,SamHames/scikit-image,emon10005/scikit-image,chintak/scikit-image,bennlich/scikit-image,robintw/scikit-image,SamHames/scikit-image,Britefury/scikit-image,ClinicalGraphics/scikit-image,almarklein/scikit-image,youprofit/scikit-image,vighneshbirodkar/scikit-image,dpshelio/scikit-image,vighneshbirodkar/scikit-image,ajaybhat/scikit-image,GaZ3ll3/scikit-image,rjeli/scikit-image,rjeli/scikit-image,chintak/scikit-image,keflavich/scikit-image,chriscrosscutler/scikit-image,ofgulban/scikit-image,emon10005/scikit-image,almarklein/scikit-image,bsipocz/scikit-image,blink1073/scikit-image,michaelaye/scikit-image,blink1073/scikit-image,warmspringwinds/scikit-image,ofgulban/scikit-image,bsipocz/scikit-image,chriscrosscutler/scikit-image,juliusbierk/scikit-image,warmspringwinds/scikit-image,paalge/scikit-image,SamHames/scikit-image,newville/scikit-image,youprofit/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,bennlich/scikit-image,oew1v07/scikit-image,Hiyorimi/scikit-image,ofgulban/scikit-image,newville/scikit-image,vighneshbirodkar/scikit-image,pratapvardhan/scikit-image,keflavich/scikit-image,jwiggins/scikit-image,robintw/scikit-image,pratapvardhan/scikit-image,almarklein/scikit-image,paalge/scikit-image,almarklein/scikit-image,ClinicalGraphics/scikit-image,WarrenWeckesser/scikits-image,dpshelio/scikit-image,chintak/scikit-image
import warnings import functools __all__ = ['deprecated'] class deprecated(object): '''Decorator to mark deprecated functions with warning. Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>. Parameters ---------- alt_func : str If given, tell user what function to use instead. behavior : {'warn', 'raise'} Behavior during call to deprecated function: 'warn' = warn user that function is deprecated; 'raise' = raise error. ''' def __init__(self, alt_func=None, behavior='warn'): self.alt_func = alt_func self.behavior = behavior def __call__(self, func): + alt_msg = '' + if self.alt_func is not None: + alt_msg = ' Use `%s` instead.' % self.alt_func + msg = 'Call to deprecated function `%s`.' % func.__name__ + msg += alt_msg - if self.alt_func is not None: - msg += ' Use `%s` instead.' % self.alt_func @functools.wraps(func) def wrapped(*args, **kwargs): if self.behavior == 'warn': warnings.warn_explicit(msg, category=DeprecationWarning, filename=func.func_code.co_filename, lineno=func.func_code.co_firstlineno + 1) elif self.behavior == 'raise': raise DeprecationWarning(msg) return func(*args, **kwargs) # modify doc string to display deprecation warning - doc = 'Deprecated function.' + doc = 'Deprecated function.' + alt_msg - if self.alt_func is not None: - doc += ' Use `%s` instead.' % self.alt_func - if wrapped.__doc__ is None: wrapped.__doc__ = doc else: wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__ return wrapped
Remove duplicate code for alternative function
## Code Before: import warnings import functools __all__ = ['deprecated'] class deprecated(object): '''Decorator to mark deprecated functions with warning. Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>. Parameters ---------- alt_func : str If given, tell user what function to use instead. behavior : {'warn', 'raise'} Behavior during call to deprecated function: 'warn' = warn user that function is deprecated; 'raise' = raise error. ''' def __init__(self, alt_func=None, behavior='warn'): self.alt_func = alt_func self.behavior = behavior def __call__(self, func): msg = 'Call to deprecated function `%s`.' % func.__name__ if self.alt_func is not None: msg += ' Use `%s` instead.' % self.alt_func @functools.wraps(func) def wrapped(*args, **kwargs): if self.behavior == 'warn': warnings.warn_explicit(msg, category=DeprecationWarning, filename=func.func_code.co_filename, lineno=func.func_code.co_firstlineno + 1) elif self.behavior == 'raise': raise DeprecationWarning(msg) return func(*args, **kwargs) # modify doc string to display deprecation warning doc = 'Deprecated function.' if self.alt_func is not None: doc += ' Use `%s` instead.' % self.alt_func if wrapped.__doc__ is None: wrapped.__doc__ = doc else: wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__ return wrapped ## Instruction: Remove duplicate code for alternative function ## Code After: import warnings import functools __all__ = ['deprecated'] class deprecated(object): '''Decorator to mark deprecated functions with warning. Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>. Parameters ---------- alt_func : str If given, tell user what function to use instead. behavior : {'warn', 'raise'} Behavior during call to deprecated function: 'warn' = warn user that function is deprecated; 'raise' = raise error. ''' def __init__(self, alt_func=None, behavior='warn'): self.alt_func = alt_func self.behavior = behavior def __call__(self, func): alt_msg = '' if self.alt_func is not None: alt_msg = ' Use `%s` instead.' % self.alt_func msg = 'Call to deprecated function `%s`.' % func.__name__ msg += alt_msg @functools.wraps(func) def wrapped(*args, **kwargs): if self.behavior == 'warn': warnings.warn_explicit(msg, category=DeprecationWarning, filename=func.func_code.co_filename, lineno=func.func_code.co_firstlineno + 1) elif self.behavior == 'raise': raise DeprecationWarning(msg) return func(*args, **kwargs) # modify doc string to display deprecation warning doc = 'Deprecated function.' + alt_msg if wrapped.__doc__ is None: wrapped.__doc__ = doc else: wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__ return wrapped
// ... existing code ... alt_msg = '' if self.alt_func is not None: alt_msg = ' Use `%s` instead.' % self.alt_func msg = 'Call to deprecated function `%s`.' % func.__name__ msg += alt_msg // ... modified code ... # modify doc string to display deprecation warning doc = 'Deprecated function.' + alt_msg if wrapped.__doc__ is None: // ... rest of the code ...
df2fe66f64f79127374d2f183cb76966f77761ee
signac/common/errors.py
signac/common/errors.py
class Error(Exception): pass class ConfigError(Error, RuntimeError): pass class AuthenticationError(Error, RuntimeError): pass
class Error(Exception): pass class ConfigError(Error, RuntimeError): pass class AuthenticationError(Error, RuntimeError): def __str__(self): if len(self.args) > 0: return "Failed to authenticate with host '{}'.".format(self.args[0]) else: return "Failed to authenticate with host."
Improve error message for authentication issues.
Improve error message for authentication issues.
Python
bsd-3-clause
csadorf/signac,csadorf/signac
class Error(Exception): pass class ConfigError(Error, RuntimeError): pass class AuthenticationError(Error, RuntimeError): - pass + def __str__(self): + if len(self.args) > 0: + return "Failed to authenticate with host '{}'.".format(self.args[0]) + else: + return "Failed to authenticate with host." +
Improve error message for authentication issues.
## Code Before: class Error(Exception): pass class ConfigError(Error, RuntimeError): pass class AuthenticationError(Error, RuntimeError): pass ## Instruction: Improve error message for authentication issues. ## Code After: class Error(Exception): pass class ConfigError(Error, RuntimeError): pass class AuthenticationError(Error, RuntimeError): def __str__(self): if len(self.args) > 0: return "Failed to authenticate with host '{}'.".format(self.args[0]) else: return "Failed to authenticate with host."
... class AuthenticationError(Error, RuntimeError): def __str__(self): if len(self.args) > 0: return "Failed to authenticate with host '{}'.".format(self.args[0]) else: return "Failed to authenticate with host." ...
a2d4381de5dc50110a0e57d7e56a668edbee2ccf
bot/utils.py
bot/utils.py
from discord import Embed def build_embed(ctx, desc: str, title: str = ''): name = ctx.message.server.me.nick if ctx.message.server.me.nick is not None else ctx.bot.user.name embed = Embed( title=title, description=desc ) embed.set_author(name=name, icon_url=ctx.bot.user.avatar_url) return embed
from enum import IntEnum from discord import Embed class OpStatus(IntEnum): SUCCESS = 0x2ECC71, FAILURE = 0xc0392B, WARNING = 0xf39C12 def build_embed(ctx, desc: str, title: str = '', status: OpStatus = OpStatus.SUCCESS) -> Embed: name = ctx.message.server.me.nick if ctx.message.server.me.nick is not None else ctx.bot.user.name embed = Embed( title=title, description=desc, color=status.value if status is not None else OpStatus.WARNING ) embed.set_author(name=name, icon_url=ctx.bot.user.avatar_url) return embed
Add support for colored output in embeds
Add support for colored output in embeds
Python
apache-2.0
HellPie/discord-reply-bot
+ from enum import IntEnum from discord import Embed - def build_embed(ctx, desc: str, title: str = ''): + class OpStatus(IntEnum): + SUCCESS = 0x2ECC71, + FAILURE = 0xc0392B, + WARNING = 0xf39C12 + + + def build_embed(ctx, desc: str, title: str = '', status: OpStatus = OpStatus.SUCCESS) -> Embed: name = ctx.message.server.me.nick if ctx.message.server.me.nick is not None else ctx.bot.user.name embed = Embed( title=title, - description=desc + description=desc, + color=status.value if status is not None else OpStatus.WARNING ) embed.set_author(name=name, icon_url=ctx.bot.user.avatar_url) return embed
Add support for colored output in embeds
## Code Before: from discord import Embed def build_embed(ctx, desc: str, title: str = ''): name = ctx.message.server.me.nick if ctx.message.server.me.nick is not None else ctx.bot.user.name embed = Embed( title=title, description=desc ) embed.set_author(name=name, icon_url=ctx.bot.user.avatar_url) return embed ## Instruction: Add support for colored output in embeds ## Code After: from enum import IntEnum from discord import Embed class OpStatus(IntEnum): SUCCESS = 0x2ECC71, FAILURE = 0xc0392B, WARNING = 0xf39C12 def build_embed(ctx, desc: str, title: str = '', status: OpStatus = OpStatus.SUCCESS) -> Embed: name = ctx.message.server.me.nick if ctx.message.server.me.nick is not None else ctx.bot.user.name embed = Embed( title=title, description=desc, color=status.value if status is not None else OpStatus.WARNING ) embed.set_author(name=name, icon_url=ctx.bot.user.avatar_url) return embed
... from enum import IntEnum from discord import Embed ... class OpStatus(IntEnum): SUCCESS = 0x2ECC71, FAILURE = 0xc0392B, WARNING = 0xf39C12 def build_embed(ctx, desc: str, title: str = '', status: OpStatus = OpStatus.SUCCESS) -> Embed: name = ctx.message.server.me.nick if ctx.message.server.me.nick is not None else ctx.bot.user.name ... title=title, description=desc, color=status.value if status is not None else OpStatus.WARNING ) ...
0cff7d25a9d0fc76c723e058652551bb2c43d1fc
benchmarks/test_benchmark.py
benchmarks/test_benchmark.py
import re import urllib import random import unittest from funkload.FunkLoadTestCase import FunkLoadTestCase class Benchmark(FunkLoadTestCase): """This test uses a configuration file Benchmark.conf.""" def setUp(self): self.server_url = self.conf_get('main', 'url') def test_simple(self): server_url = self.server_url if not re.match('https?://', server_url): raise Exception("The `server_url` setting doesn't have a scheme.") username = self.conf_get('test_benchmark', 'username', None) password = self.conf_get('test_benchmark', 'password', None) if username and password: self.post(self.server_url + "/api/user/login", params=[['username', username], ['password', password]], description="Login as %s" % username) nb_times = self.conf_getInt('test_benchmark', 'nb_times') names = self.conf_get('test_benchmark', 'page_names').split(';') for i in range(nb_times): r = random.randint(0, len(names) - 1) url = server_url + '/api/read/' + urllib.quote(names[r]) self.get(url, description='Getting %s' % names[r]) if __name__ in ('main', '__main__'): unittest.main()
import re import urllib.parse import random import unittest from funkload.FunkLoadTestCase import FunkLoadTestCase class Benchmark(FunkLoadTestCase): """This test uses a configuration file Benchmark.conf.""" def setUp(self): self.server_url = self.conf_get('main', 'url') def test_simple(self): server_url = self.server_url if not re.match('https?://', server_url): raise Exception("The `server_url` setting doesn't have a scheme.") username = self.conf_get('test_benchmark', 'username', None) password = self.conf_get('test_benchmark', 'password', None) if username and password: self.post(self.server_url + "/api/user/login", params=[['username', username], ['password', password]], description="Login as %s" % username) nb_times = self.conf_getInt('test_benchmark', 'nb_times') names = self.conf_get('test_benchmark', 'page_names').split(';') for i in range(nb_times): r = random.randint(0, len(names) - 1) url = server_url + '/api/read/' + urllib.parse.quote(names[r]) self.get(url, description='Getting %s' % names[r]) if __name__ in ('main', '__main__'): unittest.main()
Update benchmarks to Pyton 3.
Update benchmarks to Pyton 3.
Python
apache-2.0
ludovicchabant/Wikked,ludovicchabant/Wikked,ludovicchabant/Wikked
import re - import urllib + import urllib.parse import random import unittest from funkload.FunkLoadTestCase import FunkLoadTestCase class Benchmark(FunkLoadTestCase): """This test uses a configuration file Benchmark.conf.""" def setUp(self): self.server_url = self.conf_get('main', 'url') def test_simple(self): server_url = self.server_url if not re.match('https?://', server_url): raise Exception("The `server_url` setting doesn't have a scheme.") username = self.conf_get('test_benchmark', 'username', None) password = self.conf_get('test_benchmark', 'password', None) if username and password: self.post(self.server_url + "/api/user/login", params=[['username', username], ['password', password]], description="Login as %s" % username) nb_times = self.conf_getInt('test_benchmark', 'nb_times') names = self.conf_get('test_benchmark', 'page_names').split(';') for i in range(nb_times): r = random.randint(0, len(names) - 1) - url = server_url + '/api/read/' + urllib.quote(names[r]) + url = server_url + '/api/read/' + urllib.parse.quote(names[r]) self.get(url, description='Getting %s' % names[r]) if __name__ in ('main', '__main__'): unittest.main()
Update benchmarks to Pyton 3.
## Code Before: import re import urllib import random import unittest from funkload.FunkLoadTestCase import FunkLoadTestCase class Benchmark(FunkLoadTestCase): """This test uses a configuration file Benchmark.conf.""" def setUp(self): self.server_url = self.conf_get('main', 'url') def test_simple(self): server_url = self.server_url if not re.match('https?://', server_url): raise Exception("The `server_url` setting doesn't have a scheme.") username = self.conf_get('test_benchmark', 'username', None) password = self.conf_get('test_benchmark', 'password', None) if username and password: self.post(self.server_url + "/api/user/login", params=[['username', username], ['password', password]], description="Login as %s" % username) nb_times = self.conf_getInt('test_benchmark', 'nb_times') names = self.conf_get('test_benchmark', 'page_names').split(';') for i in range(nb_times): r = random.randint(0, len(names) - 1) url = server_url + '/api/read/' + urllib.quote(names[r]) self.get(url, description='Getting %s' % names[r]) if __name__ in ('main', '__main__'): unittest.main() ## Instruction: Update benchmarks to Pyton 3. ## Code After: import re import urllib.parse import random import unittest from funkload.FunkLoadTestCase import FunkLoadTestCase class Benchmark(FunkLoadTestCase): """This test uses a configuration file Benchmark.conf.""" def setUp(self): self.server_url = self.conf_get('main', 'url') def test_simple(self): server_url = self.server_url if not re.match('https?://', server_url): raise Exception("The `server_url` setting doesn't have a scheme.") username = self.conf_get('test_benchmark', 'username', None) password = self.conf_get('test_benchmark', 'password', None) if username and password: self.post(self.server_url + "/api/user/login", params=[['username', username], ['password', password]], description="Login as %s" % username) nb_times = self.conf_getInt('test_benchmark', 'nb_times') names = self.conf_get('test_benchmark', 'page_names').split(';') for i in range(nb_times): r = random.randint(0, len(names) - 1) url = server_url + '/api/read/' + urllib.parse.quote(names[r]) self.get(url, description='Getting %s' % names[r]) if __name__ in ('main', '__main__'): unittest.main()
// ... existing code ... import re import urllib.parse import random // ... modified code ... r = random.randint(0, len(names) - 1) url = server_url + '/api/read/' + urllib.parse.quote(names[r]) self.get(url, description='Getting %s' % names[r]) // ... rest of the code ...
f6c2f222db0f529d3f5906d5de7a7541e835ea77
litecord/api/guilds.py
litecord/api/guilds.py
''' guilds.py - All handlers under /guilds/* ''' import json import logging from ..utils import _err, _json, strip_user_data log = logging.getLogger(__name__) class GuildsEndpoint: def __init__(self, server): self.server = server async def h_post_guilds(self, request): pass
''' guilds.py - All handlers under /guilds/* ''' import json import logging from ..utils import _err, _json, strip_user_data log = logging.getLogger(__name__) class GuildsEndpoint: def __init__(self, server): self.server = server async def h_post_guilds(self, request): pass async def h_guilds(self, request): ''' GuildsEndpoint.h_guilds Handle `GET /guilds/{guild_id}` ''' _error = await self.server.check_request(request) _error_json = json.loads(_error.text) if _error_json['code'] == 0: return _error guild_id = request.match_info['guild_id'] guild = self.server.guild_man.get_guild(guild_id) if guild is None: return _err('404: Not Found') return _json(guild.as_json) async def h_get_guild_channels(self, request): ''' GuildsEndpoint.h_get_guild_channels `GET /guilds/{guild_id}/channels` ''' _error = await self.server.check_request(request) _error_json = json.loads(_error.text) if _error_json['code'] == 0: return _error guild_id = request.match_info['guild_id'] return _json('Not Implemented')
Add some dummy routes in GuildsEndpoint
Add some dummy routes in GuildsEndpoint
Python
mit
nullpixel/litecord,nullpixel/litecord
''' guilds.py - All handlers under /guilds/* ''' import json import logging from ..utils import _err, _json, strip_user_data log = logging.getLogger(__name__) class GuildsEndpoint: def __init__(self, server): self.server = server async def h_post_guilds(self, request): pass + async def h_guilds(self, request): + ''' + GuildsEndpoint.h_guilds + + Handle `GET /guilds/{guild_id}` + ''' + _error = await self.server.check_request(request) + _error_json = json.loads(_error.text) + if _error_json['code'] == 0: + return _error + + guild_id = request.match_info['guild_id'] + + guild = self.server.guild_man.get_guild(guild_id) + if guild is None: + return _err('404: Not Found') + + return _json(guild.as_json) + + async def h_get_guild_channels(self, request): + ''' + GuildsEndpoint.h_get_guild_channels + + `GET /guilds/{guild_id}/channels` + ''' + _error = await self.server.check_request(request) + _error_json = json.loads(_error.text) + if _error_json['code'] == 0: + return _error + + guild_id = request.match_info['guild_id'] + + return _json('Not Implemented') +
Add some dummy routes in GuildsEndpoint
## Code Before: ''' guilds.py - All handlers under /guilds/* ''' import json import logging from ..utils import _err, _json, strip_user_data log = logging.getLogger(__name__) class GuildsEndpoint: def __init__(self, server): self.server = server async def h_post_guilds(self, request): pass ## Instruction: Add some dummy routes in GuildsEndpoint ## Code After: ''' guilds.py - All handlers under /guilds/* ''' import json import logging from ..utils import _err, _json, strip_user_data log = logging.getLogger(__name__) class GuildsEndpoint: def __init__(self, server): self.server = server async def h_post_guilds(self, request): pass async def h_guilds(self, request): ''' GuildsEndpoint.h_guilds Handle `GET /guilds/{guild_id}` ''' _error = await self.server.check_request(request) _error_json = json.loads(_error.text) if _error_json['code'] == 0: return _error guild_id = request.match_info['guild_id'] guild = self.server.guild_man.get_guild(guild_id) if guild is None: return _err('404: Not Found') return _json(guild.as_json) async def h_get_guild_channels(self, request): ''' GuildsEndpoint.h_get_guild_channels `GET /guilds/{guild_id}/channels` ''' _error = await self.server.check_request(request) _error_json = json.loads(_error.text) if _error_json['code'] == 0: return _error guild_id = request.match_info['guild_id'] return _json('Not Implemented')
# ... existing code ... pass async def h_guilds(self, request): ''' GuildsEndpoint.h_guilds Handle `GET /guilds/{guild_id}` ''' _error = await self.server.check_request(request) _error_json = json.loads(_error.text) if _error_json['code'] == 0: return _error guild_id = request.match_info['guild_id'] guild = self.server.guild_man.get_guild(guild_id) if guild is None: return _err('404: Not Found') return _json(guild.as_json) async def h_get_guild_channels(self, request): ''' GuildsEndpoint.h_get_guild_channels `GET /guilds/{guild_id}/channels` ''' _error = await self.server.check_request(request) _error_json = json.loads(_error.text) if _error_json['code'] == 0: return _error guild_id = request.match_info['guild_id'] return _json('Not Implemented') # ... rest of the code ...
504c50bd5cf229b5686f398304ab26e707d0cad8
partner_firstname/exceptions.py
partner_firstname/exceptions.py
from openerp import _, exceptions class PartnerNameError(exceptions.ValidationError): def __init__(self, record, value=None): self.record = record self._value = value self.name = _("Error(s) with partner %d's name.") % record.id @property def value(self): raise NotImplementedError() class EmptyNames(PartnerNameError): @property def value(self): return _("No name is set.")
from openerp import _, exceptions class EmptyNames(exceptions.ValidationError): def __init__(self, record, value=_("No name is set.")): self.record = record self._value = value self.name = _("Error(s) with partner %d's name.") % record.id
Remove subclassing of exception, since there is only one.
Remove subclassing of exception, since there is only one.
Python
agpl-3.0
microcom/partner-contact,brain-tec/partner-contact,brain-tec/partner-contact,microcom/partner-contact
from openerp import _, exceptions - class PartnerNameError(exceptions.ValidationError): + class EmptyNames(exceptions.ValidationError): - def __init__(self, record, value=None): + def __init__(self, record, value=_("No name is set.")): self.record = record self._value = value self.name = _("Error(s) with partner %d's name.") % record.id - @property - def value(self): - raise NotImplementedError() - - - class EmptyNames(PartnerNameError): - @property - def value(self): - return _("No name is set.") -
Remove subclassing of exception, since there is only one.
## Code Before: from openerp import _, exceptions class PartnerNameError(exceptions.ValidationError): def __init__(self, record, value=None): self.record = record self._value = value self.name = _("Error(s) with partner %d's name.") % record.id @property def value(self): raise NotImplementedError() class EmptyNames(PartnerNameError): @property def value(self): return _("No name is set.") ## Instruction: Remove subclassing of exception, since there is only one. ## Code After: from openerp import _, exceptions class EmptyNames(exceptions.ValidationError): def __init__(self, record, value=_("No name is set.")): self.record = record self._value = value self.name = _("Error(s) with partner %d's name.") % record.id
// ... existing code ... class EmptyNames(exceptions.ValidationError): def __init__(self, record, value=_("No name is set.")): self.record = record // ... modified code ... self.name = _("Error(s) with partner %d's name.") % record.id // ... rest of the code ...
2daee974533d1510a17280cddb5a4dfc147338fa
tests/level/test_map.py
tests/level/test_map.py
import unittest from hunting.level.map import LevelTile, LevelMap class TestPathfinding(unittest.TestCase): def test_basic_diagonal(self): level_map = LevelMap() level_map.set_map([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)]) self.assertEqual([(1, 1), (2, 2), (3, 3), (4, 4)], level_map.a_star_path(0, 0, 4, 4)) def test_paths_around_wall(self): level_map = LevelMap() level_map.set_map([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)]) for x in range(1, 5): level_map[x][1].blocks = True self.assertEqual([(3, 0), (2, 0), (1, 0), (0, 1), (1, 2), (2, 2), (3, 2), (4, 2)], level_map.a_star_path(4, 0, 4, 2))
import unittest from hunting.level.map import LevelTile, LevelMap class TestPathfinding(unittest.TestCase): def test_basic_diagonal(self): level_map = LevelMap([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)]) self.assertEqual([(1, 1), (2, 2), (3, 3), (4, 4)], level_map.a_star_path(0, 0, 4, 4)) def test_paths_around_wall(self): level_map = LevelMap([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)]) for x in range(1, 5): level_map[x][1].blocks = True self.assertEqual([(3, 0), (2, 0), (1, 0), (0, 1), (1, 2), (2, 2), (3, 2), (4, 2)], level_map.a_star_path(4, 0, 4, 2)) def tests_force_pathable_endpoint_parameter(self): level_map = LevelMap([[LevelTile(False, False)], [LevelTile(True, True)]]) self.assertEqual([(1, 0)], level_map.a_star_path(0, 0, 1, 0, True)) self.assertEqual([], level_map.a_star_path(0, 0, 1, 0, False))
Add test for force_pathable_endpoint pathfind param
Add test for force_pathable_endpoint pathfind param This parameter is intended to allow pathing to adjacent squares of an unpassable square. This is necessary because if you want to pathfind to a monster which blocks a square, you don't want to actually go *onto* the square, you just want to go next to it, presumably so you can hit it.
Python
mit
MoyTW/RL_Arena_Experiment
import unittest from hunting.level.map import LevelTile, LevelMap class TestPathfinding(unittest.TestCase): def test_basic_diagonal(self): - level_map = LevelMap() - level_map.set_map([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)]) + level_map = LevelMap([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)]) self.assertEqual([(1, 1), (2, 2), (3, 3), (4, 4)], level_map.a_star_path(0, 0, 4, 4)) def test_paths_around_wall(self): - level_map = LevelMap() - level_map.set_map([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)]) + level_map = LevelMap([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)]) for x in range(1, 5): level_map[x][1].blocks = True self.assertEqual([(3, 0), (2, 0), (1, 0), (0, 1), (1, 2), (2, 2), (3, 2), (4, 2)], level_map.a_star_path(4, 0, 4, 2)) + def tests_force_pathable_endpoint_parameter(self): + level_map = LevelMap([[LevelTile(False, False)], [LevelTile(True, True)]]) + + self.assertEqual([(1, 0)], level_map.a_star_path(0, 0, 1, 0, True)) + self.assertEqual([], level_map.a_star_path(0, 0, 1, 0, False)) +
Add test for force_pathable_endpoint pathfind param
## Code Before: import unittest from hunting.level.map import LevelTile, LevelMap class TestPathfinding(unittest.TestCase): def test_basic_diagonal(self): level_map = LevelMap() level_map.set_map([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)]) self.assertEqual([(1, 1), (2, 2), (3, 3), (4, 4)], level_map.a_star_path(0, 0, 4, 4)) def test_paths_around_wall(self): level_map = LevelMap() level_map.set_map([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)]) for x in range(1, 5): level_map[x][1].blocks = True self.assertEqual([(3, 0), (2, 0), (1, 0), (0, 1), (1, 2), (2, 2), (3, 2), (4, 2)], level_map.a_star_path(4, 0, 4, 2)) ## Instruction: Add test for force_pathable_endpoint pathfind param ## Code After: import unittest from hunting.level.map import LevelTile, LevelMap class TestPathfinding(unittest.TestCase): def test_basic_diagonal(self): level_map = LevelMap([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)]) self.assertEqual([(1, 1), (2, 2), (3, 3), (4, 4)], level_map.a_star_path(0, 0, 4, 4)) def test_paths_around_wall(self): level_map = LevelMap([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)]) for x in range(1, 5): level_map[x][1].blocks = True self.assertEqual([(3, 0), (2, 0), (1, 0), (0, 1), (1, 2), (2, 2), (3, 2), (4, 2)], level_map.a_star_path(4, 0, 4, 2)) def tests_force_pathable_endpoint_parameter(self): level_map = LevelMap([[LevelTile(False, False)], [LevelTile(True, True)]]) self.assertEqual([(1, 0)], level_map.a_star_path(0, 0, 1, 0, True)) self.assertEqual([], level_map.a_star_path(0, 0, 1, 0, False))
... def test_basic_diagonal(self): level_map = LevelMap([[LevelTile() for _ in range(0, 5)] for _ in range(0, 5)]) ... def test_paths_around_wall(self): level_map = LevelMap([[LevelTile() for _ in range(0, 3)] for _ in range(0, 5)]) ... level_map.a_star_path(4, 0, 4, 2)) def tests_force_pathable_endpoint_parameter(self): level_map = LevelMap([[LevelTile(False, False)], [LevelTile(True, True)]]) self.assertEqual([(1, 0)], level_map.a_star_path(0, 0, 1, 0, True)) self.assertEqual([], level_map.a_star_path(0, 0, 1, 0, False)) ...
b2803c40b2fcee7ab466c83fc95bb693a28576d0
messageboard/views.py
messageboard/views.py
from django.shortcuts import render from .models import Message from .serializers import MessageSerializer from .permissions import IsOwnerOrReadOnly from rest_framework import generics, permissions from rest_framework.permissions import IsAuthenticated from rest_framework import viewsets from rest_framework.decorators import list_route from rest_framework.response import Response from django.core.files import File import base64 class MessageViewSet(viewsets.ModelViewSet): serializer_class = MessageSerializer permission_classes = (permissions.IsAuthenticated,) queryset = Message.objects.all() @list_route(methods=['get'], permission_classes=[permissions.AllowAny]) def all(self, request): messages = Message.objects.all() serializer = MessageSerializer(messages, many=True) return Response(serializer.data) def perform_create(self, serializer): photo_file = None if 'photo' in self.request.data: photo = base64.b64decode(self.request.data['photo']) with open('media/img/snapshot.jpg', 'wb') as f: f.write(photo) photo_file = File(f, name='snapshot.jpg') serializer.save( author=self.request.user, message=self.request.data['message'], image=photo_file )
from django.shortcuts import render from .models import Message from .serializers import MessageSerializer from .permissions import IsOwnerOrReadOnly from rest_framework import generics, permissions from rest_framework.permissions import IsAuthenticated from rest_framework import viewsets from rest_framework.decorators import list_route from rest_framework.response import Response from django.core.files import File from django.core.files.temp import NamedTemporaryFile import base64 class MessageViewSet(viewsets.ModelViewSet): serializer_class = MessageSerializer permission_classes = (permissions.IsAuthenticated,) queryset = Message.objects.all() @list_route(methods=['get'], permission_classes=[permissions.AllowAny]) def all(self, request): messages = Message.objects.all() serializer = MessageSerializer(messages, many=True) return Response(serializer.data) def perform_create(self, serializer): photo_file = None if 'photo' in self.request.data: photo = base64.b64decode(self.request.data['photo']) img_temp = NamedTemporaryFile(delete=True) img_temp.write(photo) img_temp.flush() photo_file = File(img_temp) serializer.save( author=self.request.user, message=self.request.data['message'], image=photo_file )
Use temporary file and fix to image save handling
Use temporary file and fix to image save handling
Python
mit
DjangoBeer/message-board,DjangoBeer/message-board,fmarco/message-board,DjangoBeer/message-board,fmarco/message-board,fmarco/message-board
from django.shortcuts import render from .models import Message from .serializers import MessageSerializer from .permissions import IsOwnerOrReadOnly from rest_framework import generics, permissions from rest_framework.permissions import IsAuthenticated from rest_framework import viewsets from rest_framework.decorators import list_route from rest_framework.response import Response from django.core.files import File + from django.core.files.temp import NamedTemporaryFile import base64 class MessageViewSet(viewsets.ModelViewSet): serializer_class = MessageSerializer permission_classes = (permissions.IsAuthenticated,) queryset = Message.objects.all() @list_route(methods=['get'], permission_classes=[permissions.AllowAny]) def all(self, request): messages = Message.objects.all() serializer = MessageSerializer(messages, many=True) return Response(serializer.data) def perform_create(self, serializer): photo_file = None if 'photo' in self.request.data: photo = base64.b64decode(self.request.data['photo']) - with open('media/img/snapshot.jpg', 'wb') as f: + img_temp = NamedTemporaryFile(delete=True) - f.write(photo) + img_temp.write(photo) - photo_file = File(f, name='snapshot.jpg') + img_temp.flush() + photo_file = File(img_temp) serializer.save( author=self.request.user, message=self.request.data['message'], image=photo_file )
Use temporary file and fix to image save handling
## Code Before: from django.shortcuts import render from .models import Message from .serializers import MessageSerializer from .permissions import IsOwnerOrReadOnly from rest_framework import generics, permissions from rest_framework.permissions import IsAuthenticated from rest_framework import viewsets from rest_framework.decorators import list_route from rest_framework.response import Response from django.core.files import File import base64 class MessageViewSet(viewsets.ModelViewSet): serializer_class = MessageSerializer permission_classes = (permissions.IsAuthenticated,) queryset = Message.objects.all() @list_route(methods=['get'], permission_classes=[permissions.AllowAny]) def all(self, request): messages = Message.objects.all() serializer = MessageSerializer(messages, many=True) return Response(serializer.data) def perform_create(self, serializer): photo_file = None if 'photo' in self.request.data: photo = base64.b64decode(self.request.data['photo']) with open('media/img/snapshot.jpg', 'wb') as f: f.write(photo) photo_file = File(f, name='snapshot.jpg') serializer.save( author=self.request.user, message=self.request.data['message'], image=photo_file ) ## Instruction: Use temporary file and fix to image save handling ## Code After: from django.shortcuts import render from .models import Message from .serializers import MessageSerializer from .permissions import IsOwnerOrReadOnly from rest_framework import generics, permissions from rest_framework.permissions import IsAuthenticated from rest_framework import viewsets from rest_framework.decorators import list_route from rest_framework.response import Response from django.core.files import File from django.core.files.temp import NamedTemporaryFile import base64 class MessageViewSet(viewsets.ModelViewSet): serializer_class = MessageSerializer permission_classes = (permissions.IsAuthenticated,) queryset = Message.objects.all() @list_route(methods=['get'], permission_classes=[permissions.AllowAny]) def all(self, request): messages = Message.objects.all() serializer = MessageSerializer(messages, many=True) return Response(serializer.data) def perform_create(self, serializer): photo_file = None if 'photo' in self.request.data: photo = base64.b64decode(self.request.data['photo']) img_temp = NamedTemporaryFile(delete=True) img_temp.write(photo) img_temp.flush() photo_file = File(img_temp) serializer.save( author=self.request.user, message=self.request.data['message'], image=photo_file )
... from django.core.files import File from django.core.files.temp import NamedTemporaryFile ... photo = base64.b64decode(self.request.data['photo']) img_temp = NamedTemporaryFile(delete=True) img_temp.write(photo) img_temp.flush() photo_file = File(img_temp) serializer.save( ...
c83c63a6b1ff1cc6d6d4f71f2da3affbb167738d
tabler/tabler.py
tabler/tabler.py
from parser import TableParser class Tabler: def __init__(self, html): self._html = html self.header = [] self.body = [] self.footer = [] self.parser = TableParser(self.add_header_row, self.add_body_row, self.add_footer_row) self.parser.feed(html) def rows(self): return self.header + self.body + self.footer def add_header_row(self, cells): self.header.append(cells) def add_body_row(self, cells): self.body.append(cells) def add_footer_row(self, cells): self.footer.append(cells)
from parser import TableParser class Tabler: def __init__(self, html): self._html = html self.header = [] self.body = [] self.footer = [] self.parser = TableParser(self.add_header_row, self.add_body_row, self.add_footer_row) self.parser.feed(html) def rows(self): return self.header + self.body + self.footer def body_rows(self): return self.body def add_header_row(self, cells): self.header.append(self.Row(self, cells)) def add_body_row(self, cells): self.body.append(self.Row(self, cells)) def add_footer_row(self, cells): self.footer.append(self.Row(self, cells)) def index_of(self, index): if isinstance(index, str): if len(self.header) > 0: try: return self.header[0].index(index) except ValueError: raise ValueError(index + " is not a valid index value.") raise ValueError(index + " is not a valid index value.") return index class Row: def __init__(self, tabler, cells): self._tabler = tabler self._cells = cells def __getitem__(self, index): return self._cells[self._tabler.index_of(index)] def index(self, elt): return self._cells.index(elt)
Use a class to store row data, and allow lookup via index or header cell value.
Use a class to store row data, and allow lookup via index or header cell value.
Python
bsd-3-clause
bschmeck/tabler
from parser import TableParser class Tabler: def __init__(self, html): self._html = html self.header = [] self.body = [] self.footer = [] self.parser = TableParser(self.add_header_row, self.add_body_row, self.add_footer_row) self.parser.feed(html) def rows(self): return self.header + self.body + self.footer + def body_rows(self): + return self.body + def add_header_row(self, cells): - self.header.append(cells) + self.header.append(self.Row(self, cells)) def add_body_row(self, cells): - self.body.append(cells) + self.body.append(self.Row(self, cells)) def add_footer_row(self, cells): - self.footer.append(cells) + self.footer.append(self.Row(self, cells)) + def index_of(self, index): + if isinstance(index, str): + if len(self.header) > 0: + try: + return self.header[0].index(index) + except ValueError: + raise ValueError(index + " is not a valid index value.") + raise ValueError(index + " is not a valid index value.") + return index + + class Row: + def __init__(self, tabler, cells): + self._tabler = tabler + self._cells = cells + + def __getitem__(self, index): + return self._cells[self._tabler.index_of(index)] + + def index(self, elt): + return self._cells.index(elt) +
Use a class to store row data, and allow lookup via index or header cell value.
## Code Before: from parser import TableParser class Tabler: def __init__(self, html): self._html = html self.header = [] self.body = [] self.footer = [] self.parser = TableParser(self.add_header_row, self.add_body_row, self.add_footer_row) self.parser.feed(html) def rows(self): return self.header + self.body + self.footer def add_header_row(self, cells): self.header.append(cells) def add_body_row(self, cells): self.body.append(cells) def add_footer_row(self, cells): self.footer.append(cells) ## Instruction: Use a class to store row data, and allow lookup via index or header cell value. ## Code After: from parser import TableParser class Tabler: def __init__(self, html): self._html = html self.header = [] self.body = [] self.footer = [] self.parser = TableParser(self.add_header_row, self.add_body_row, self.add_footer_row) self.parser.feed(html) def rows(self): return self.header + self.body + self.footer def body_rows(self): return self.body def add_header_row(self, cells): self.header.append(self.Row(self, cells)) def add_body_row(self, cells): self.body.append(self.Row(self, cells)) def add_footer_row(self, cells): self.footer.append(self.Row(self, cells)) def index_of(self, index): if isinstance(index, str): if len(self.header) > 0: try: return self.header[0].index(index) except ValueError: raise ValueError(index + " is not a valid index value.") raise ValueError(index + " is not a valid index value.") return index class Row: def __init__(self, tabler, cells): self._tabler = tabler self._cells = cells def __getitem__(self, index): return self._cells[self._tabler.index_of(index)] def index(self, elt): return self._cells.index(elt)
... def body_rows(self): return self.body def add_header_row(self, cells): self.header.append(self.Row(self, cells)) ... def add_body_row(self, cells): self.body.append(self.Row(self, cells)) ... def add_footer_row(self, cells): self.footer.append(self.Row(self, cells)) def index_of(self, index): if isinstance(index, str): if len(self.header) > 0: try: return self.header[0].index(index) except ValueError: raise ValueError(index + " is not a valid index value.") raise ValueError(index + " is not a valid index value.") return index class Row: def __init__(self, tabler, cells): self._tabler = tabler self._cells = cells def __getitem__(self, index): return self._cells[self._tabler.index_of(index)] def index(self, elt): return self._cells.index(elt) ...
8d9b2bdbf47b51e3ada3b5e14fcc27bcaafce4fb
dbsync/logs.py
dbsync/logs.py
import logging #: All the library loggers loggers = set() log_handler = None def get_logger(name): logger = logging.getLogger(name) logger.setLevel(logging.WARNING) loggers.add(logger) if log_handler is not None: logger.addHandler(log_handler) return logger def set_log_target(fo): "Set a stream as target for dbsync's logging." global log_handler if log_handler is None: log_handler = logging.StreamHandler(fo) log_handler.setLevel(logging.WARNING) log_handler.setFormatter( logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s")) for logger in loggers: logger.addHandler(log_handler)
import logging #: All the library loggers loggers = set() log_handler = None def get_logger(name): logger = logging.getLogger(name) logger.setLevel(logging.WARNING) loggers.add(logger) if log_handler is not None: logger.addHandler(log_handler) return logger def set_log_target(fo): """ Set a stream as target for dbsync's logging. If a string is given, it will be considered to be a path to a file. """ global log_handler if log_handler is None: log_handler = logging.FileHandler(fo) if isinstance(fo, basestring) \ else logging.StreamHandler(fo) log_handler.setLevel(logging.WARNING) log_handler.setFormatter( logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s")) for logger in loggers: logger.addHandler(log_handler)
Allow file paths to be given to set_log_target.
Allow file paths to be given to set_log_target.
Python
mit
bintlabs/python-sync-db
import logging #: All the library loggers loggers = set() log_handler = None def get_logger(name): logger = logging.getLogger(name) logger.setLevel(logging.WARNING) loggers.add(logger) if log_handler is not None: logger.addHandler(log_handler) return logger def set_log_target(fo): + """ - "Set a stream as target for dbsync's logging." + Set a stream as target for dbsync's logging. If a string is given, + it will be considered to be a path to a file. + """ global log_handler if log_handler is None: + log_handler = logging.FileHandler(fo) if isinstance(fo, basestring) \ - log_handler = logging.StreamHandler(fo) + else logging.StreamHandler(fo) log_handler.setLevel(logging.WARNING) log_handler.setFormatter( logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s")) for logger in loggers: logger.addHandler(log_handler)
Allow file paths to be given to set_log_target.
## Code Before: import logging #: All the library loggers loggers = set() log_handler = None def get_logger(name): logger = logging.getLogger(name) logger.setLevel(logging.WARNING) loggers.add(logger) if log_handler is not None: logger.addHandler(log_handler) return logger def set_log_target(fo): "Set a stream as target for dbsync's logging." global log_handler if log_handler is None: log_handler = logging.StreamHandler(fo) log_handler.setLevel(logging.WARNING) log_handler.setFormatter( logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s")) for logger in loggers: logger.addHandler(log_handler) ## Instruction: Allow file paths to be given to set_log_target. ## Code After: import logging #: All the library loggers loggers = set() log_handler = None def get_logger(name): logger = logging.getLogger(name) logger.setLevel(logging.WARNING) loggers.add(logger) if log_handler is not None: logger.addHandler(log_handler) return logger def set_log_target(fo): """ Set a stream as target for dbsync's logging. If a string is given, it will be considered to be a path to a file. """ global log_handler if log_handler is None: log_handler = logging.FileHandler(fo) if isinstance(fo, basestring) \ else logging.StreamHandler(fo) log_handler.setLevel(logging.WARNING) log_handler.setFormatter( logging.Formatter( "%(asctime)s - %(name)s - %(levelname)s - %(message)s")) for logger in loggers: logger.addHandler(log_handler)
# ... existing code ... def set_log_target(fo): """ Set a stream as target for dbsync's logging. If a string is given, it will be considered to be a path to a file. """ global log_handler # ... modified code ... if log_handler is None: log_handler = logging.FileHandler(fo) if isinstance(fo, basestring) \ else logging.StreamHandler(fo) log_handler.setLevel(logging.WARNING) # ... rest of the code ...
e28c9da712574618eb28b6ff82631462fee67c16
changes/utils/times.py
changes/utils/times.py
def duration(value): ONE_SECOND = 1000 ONE_MINUTE = ONE_SECOND * 60 if not value: return '0 s' if value < 3 * ONE_SECOND: return '%d ms' % (value,) elif value < 5 * ONE_MINUTE: return '%d s' % (value / ONE_SECOND,) else: return '%d m' % (value / ONE_MINUTE,)
def duration(value): ONE_SECOND = 1000 ONE_MINUTE = ONE_SECOND * 60 if not value: return '0 s' abs_value = abs(value) if abs_value < 3 * ONE_SECOND: return '%d ms' % (value,) elif abs_value < 5 * ONE_MINUTE: return '%d s' % (value / ONE_SECOND,) else: return '%d m' % (value / ONE_MINUTE,)
Fix for negative values in duration
Fix for negative values in duration
Python
apache-2.0
bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,dropbox/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes
def duration(value): ONE_SECOND = 1000 ONE_MINUTE = ONE_SECOND * 60 if not value: return '0 s' + abs_value = abs(value) + - if value < 3 * ONE_SECOND: + if abs_value < 3 * ONE_SECOND: return '%d ms' % (value,) - elif value < 5 * ONE_MINUTE: + elif abs_value < 5 * ONE_MINUTE: return '%d s' % (value / ONE_SECOND,) else: return '%d m' % (value / ONE_MINUTE,)
Fix for negative values in duration
## Code Before: def duration(value): ONE_SECOND = 1000 ONE_MINUTE = ONE_SECOND * 60 if not value: return '0 s' if value < 3 * ONE_SECOND: return '%d ms' % (value,) elif value < 5 * ONE_MINUTE: return '%d s' % (value / ONE_SECOND,) else: return '%d m' % (value / ONE_MINUTE,) ## Instruction: Fix for negative values in duration ## Code After: def duration(value): ONE_SECOND = 1000 ONE_MINUTE = ONE_SECOND * 60 if not value: return '0 s' abs_value = abs(value) if abs_value < 3 * ONE_SECOND: return '%d ms' % (value,) elif abs_value < 5 * ONE_MINUTE: return '%d s' % (value / ONE_SECOND,) else: return '%d m' % (value / ONE_MINUTE,)
# ... existing code ... abs_value = abs(value) if abs_value < 3 * ONE_SECOND: return '%d ms' % (value,) elif abs_value < 5 * ONE_MINUTE: return '%d s' % (value / ONE_SECOND,) # ... rest of the code ...
e50fc12459e6ff77864fe499b512a57e89f7ead2
pi_control_service/gpio_service.py
pi_control_service/gpio_service.py
from rpc import RPCService from pi_pin_manager import PinManager ALLOWED_ACTIONS = ('on', 'off', 'read') class GPIOService(RPCService): def __init__(self, rabbit_url, device_key, pin_config): self.pins = PinManager(config_file=pin_config) super(GPIOService, self).__init__( rabbit_url=rabbit_url, queue_name='gpio_service', device_key=device_key, request_action=self._perform_gpio_action) def _perform_gpio_action(self, instruction): result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"} if instruction['action'] not in ALLOWED_ACTIONS: result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS)) return result try: result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin'])) result['error'] = 0 except ValueError: result['response'] = "'pin' value must be an integer" except: pass return result def stop(self): self.pins.cleanup() super(GPIOService, self).stop()
from rpc import RPCService from pi_pin_manager import PinManager ALLOWED_ACTIONS = ('on', 'off', 'read') class GPIOService(RPCService): def __init__(self, rabbit_url, device_key, pin_config): self.pins = PinManager(config_file=pin_config) super(GPIOService, self).__init__( rabbit_url=rabbit_url, queue_name='gpio_service', device_key=device_key, request_action=self._perform_gpio_action) def _perform_gpio_action(self, instruction): result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"} if instruction['action'] not in ALLOWED_ACTIONS: result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS)) return result try: result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin'])) result['error'] = 0 except ValueError: result['response'] = "'pin' value must be an integer" except Exception as e: result['response'] = e.message return result def stop(self): self.pins.cleanup() super(GPIOService, self).stop()
Send exception message in response
Send exception message in response
Python
mit
projectweekend/Pi-Control-Service,HydAu/ProjectWeekds_Pi-Control-Service
from rpc import RPCService from pi_pin_manager import PinManager ALLOWED_ACTIONS = ('on', 'off', 'read') class GPIOService(RPCService): def __init__(self, rabbit_url, device_key, pin_config): self.pins = PinManager(config_file=pin_config) super(GPIOService, self).__init__( rabbit_url=rabbit_url, queue_name='gpio_service', device_key=device_key, request_action=self._perform_gpio_action) def _perform_gpio_action(self, instruction): result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"} if instruction['action'] not in ALLOWED_ACTIONS: result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS)) return result try: result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin'])) result['error'] = 0 except ValueError: result['response'] = "'pin' value must be an integer" - except: - pass + except Exception as e: + result['response'] = e.message return result def stop(self): self.pins.cleanup() super(GPIOService, self).stop()
Send exception message in response
## Code Before: from rpc import RPCService from pi_pin_manager import PinManager ALLOWED_ACTIONS = ('on', 'off', 'read') class GPIOService(RPCService): def __init__(self, rabbit_url, device_key, pin_config): self.pins = PinManager(config_file=pin_config) super(GPIOService, self).__init__( rabbit_url=rabbit_url, queue_name='gpio_service', device_key=device_key, request_action=self._perform_gpio_action) def _perform_gpio_action(self, instruction): result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"} if instruction['action'] not in ALLOWED_ACTIONS: result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS)) return result try: result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin'])) result['error'] = 0 except ValueError: result['response'] = "'pin' value must be an integer" except: pass return result def stop(self): self.pins.cleanup() super(GPIOService, self).stop() ## Instruction: Send exception message in response ## Code After: from rpc import RPCService from pi_pin_manager import PinManager ALLOWED_ACTIONS = ('on', 'off', 'read') class GPIOService(RPCService): def __init__(self, rabbit_url, device_key, pin_config): self.pins = PinManager(config_file=pin_config) super(GPIOService, self).__init__( rabbit_url=rabbit_url, queue_name='gpio_service', device_key=device_key, request_action=self._perform_gpio_action) def _perform_gpio_action(self, instruction): result = {'error': 1, 'pin': instruction['pin'], 'response': "An error occurred"} if instruction['action'] not in ALLOWED_ACTIONS: result['response'] = "'action' must be one of: {0}".format(', '.join(ALLOWED_ACTIONS)) return result try: result['response'] = getattr(self.pins, instruction['action'])(int(instruction['pin'])) result['error'] = 0 except ValueError: result['response'] = "'pin' value must be an integer" except Exception as e: result['response'] = e.message return result def stop(self): self.pins.cleanup() super(GPIOService, self).stop()
... result['response'] = "'pin' value must be an integer" except Exception as e: result['response'] = e.message return result ...
a59d756072a72e3110875058729e15f17a4b7f8a
bibliopixel/util/log_errors.py
bibliopixel/util/log_errors.py
from .. util import class_name, log class LogErrors: """ Wraps a function call to catch and report exceptions. """ def __init__(self, function, max_errors=-1): """ :param function: the function to wrap :param int max_errors: if ``max_errors`` is non-zero, then only the first ``max_errors`` error messages are printed """ self.function = function self.max_errors = max_errors self.errors = 0 def __call__(self, *args, **kwds): """ Calls ``self.function`` with the given arguments and keywords, and returns its value - or if the call throws an exception, returns None. If is ``self.max_errors`` is `0`, all the exceptions are reported, otherwise just the first ``self.max_errors`` are. """ try: return self.function(*args, **kwds) except Exception as e: args = (class_name.class_name(e),) + e.args raise self.errors += 1 if self.max_errors < 0 or self.errors <= self.max_errors: log.error(str(args)) elif self.errors == self.max_errors + 1: log.error('Exceeded max_errors of %d', self.max_errors)
from .. util import class_name, log class LogErrors: """ Wraps a function call to catch and report exceptions. """ def __init__(self, function, max_errors=-1): """ :param function: the function to wrap :param int max_errors: if ``max_errors`` is non-zero, then only the first ``max_errors`` error messages are printed """ self.function = function self.max_errors = max_errors self.errors = 0 def __call__(self, *args, **kwds): """ Calls ``self.function`` with the given arguments and keywords, and returns its value - or if the call throws an exception, returns None. If is ``self.max_errors`` is `0`, all the exceptions are reported, otherwise just the first ``self.max_errors`` are. """ try: return self.function(*args, **kwds) except Exception as e: args = (class_name.class_name(e),) + e.args self.errors += 1 if self.max_errors < 0 or self.errors <= self.max_errors: log.error(str(args)) elif self.errors == self.max_errors + 1: log.error('Exceeded max_errors of %d', self.max_errors)
Fix log_error so it now catches exceptions
Fix log_error so it now catches exceptions * This got accidentally disabled
Python
mit
rec/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel
from .. util import class_name, log class LogErrors: """ Wraps a function call to catch and report exceptions. """ def __init__(self, function, max_errors=-1): """ :param function: the function to wrap :param int max_errors: if ``max_errors`` is non-zero, then only the first ``max_errors`` error messages are printed """ self.function = function self.max_errors = max_errors self.errors = 0 def __call__(self, *args, **kwds): """ Calls ``self.function`` with the given arguments and keywords, and returns its value - or if the call throws an exception, returns None. If is ``self.max_errors`` is `0`, all the exceptions are reported, otherwise just the first ``self.max_errors`` are. """ try: return self.function(*args, **kwds) except Exception as e: args = (class_name.class_name(e),) + e.args - raise self.errors += 1 if self.max_errors < 0 or self.errors <= self.max_errors: log.error(str(args)) elif self.errors == self.max_errors + 1: log.error('Exceeded max_errors of %d', self.max_errors)
Fix log_error so it now catches exceptions
## Code Before: from .. util import class_name, log class LogErrors: """ Wraps a function call to catch and report exceptions. """ def __init__(self, function, max_errors=-1): """ :param function: the function to wrap :param int max_errors: if ``max_errors`` is non-zero, then only the first ``max_errors`` error messages are printed """ self.function = function self.max_errors = max_errors self.errors = 0 def __call__(self, *args, **kwds): """ Calls ``self.function`` with the given arguments and keywords, and returns its value - or if the call throws an exception, returns None. If is ``self.max_errors`` is `0`, all the exceptions are reported, otherwise just the first ``self.max_errors`` are. """ try: return self.function(*args, **kwds) except Exception as e: args = (class_name.class_name(e),) + e.args raise self.errors += 1 if self.max_errors < 0 or self.errors <= self.max_errors: log.error(str(args)) elif self.errors == self.max_errors + 1: log.error('Exceeded max_errors of %d', self.max_errors) ## Instruction: Fix log_error so it now catches exceptions ## Code After: from .. util import class_name, log class LogErrors: """ Wraps a function call to catch and report exceptions. """ def __init__(self, function, max_errors=-1): """ :param function: the function to wrap :param int max_errors: if ``max_errors`` is non-zero, then only the first ``max_errors`` error messages are printed """ self.function = function self.max_errors = max_errors self.errors = 0 def __call__(self, *args, **kwds): """ Calls ``self.function`` with the given arguments and keywords, and returns its value - or if the call throws an exception, returns None. If is ``self.max_errors`` is `0`, all the exceptions are reported, otherwise just the first ``self.max_errors`` are. """ try: return self.function(*args, **kwds) except Exception as e: args = (class_name.class_name(e),) + e.args self.errors += 1 if self.max_errors < 0 or self.errors <= self.max_errors: log.error(str(args)) elif self.errors == self.max_errors + 1: log.error('Exceeded max_errors of %d', self.max_errors)
... args = (class_name.class_name(e),) + e.args ...
5553dd2aba90749fdedda55067c2010cf1522f54
mesobox/boxes.py
mesobox/boxes.py
from django.conf import settings import importlib # Merge two lots of mesobox-compatible context additions def merge_context_additions(additions): context = {} boxes = {} for c in additions: try: context.update(c.get("context")) except TypeError: pass try: for k, v in c.get("boxes").items(): if k in boxes: boxes[k].append(v) else: boxes[k] = v except TypeError: pass except AttributeError: pass return {"context": context, "boxes": boxes} def context_processor(request): additions = {} # Get the boxes and accompanying context additions from all the installed apps. for app in settings.INSTALLED_APPS: print(app) try: module = importlib.import_module(app+".boxes") except ImportError: continue # Run each function now. for b in module.BOX_INCLUDES: b_func = getattr(module, b) if not b_func: raise Exception("Method %s not implemented in module %s" % (b, app)) additions = merge_context_additions([additions, b_func(request)]) # Merge boxes down to being part of the context additions dict now they have all been assembled result = additions['context'] result['boxes'] = additions['boxes'] return result
from django.conf import settings import importlib # Merge two lots of mesobox-compatible context additions def merge_context_additions(additions): context = {} boxes = {} for c in additions: try: context.update(c.get("context")) except TypeError: pass try: for k, v in c.get("boxes").items(): if k in boxes: boxes[k].append(v) else: boxes[k] = v except TypeError: pass except AttributeError: pass return {"context": context, "boxes": boxes} def context_processor(request): additions = {} # Get the boxes and accompanying context additions from all the installed apps. for app in settings.INSTALLED_APPS: try: module = importlib.import_module(app+".boxes") except ImportError: continue # Run each function now. for b in module.BOX_INCLUDES: b_func = getattr(module, b) if not b_func: raise Exception("Method %s not implemented in module %s" % (b, app)) additions = merge_context_additions([additions, b_func(request)]) # Merge boxes down to being part of the context additions dict now they have all been assembled result = additions['context'] result['boxes'] = additions['boxes'] return result
Remove a debugging print that should never have been left there anyway.
Remove a debugging print that should never have been left there anyway.
Python
mit
grundleborg/mesosphere
from django.conf import settings import importlib # Merge two lots of mesobox-compatible context additions def merge_context_additions(additions): context = {} boxes = {} for c in additions: try: context.update(c.get("context")) except TypeError: pass try: for k, v in c.get("boxes").items(): if k in boxes: boxes[k].append(v) else: boxes[k] = v except TypeError: pass except AttributeError: pass return {"context": context, "boxes": boxes} def context_processor(request): additions = {} # Get the boxes and accompanying context additions from all the installed apps. for app in settings.INSTALLED_APPS: - print(app) try: module = importlib.import_module(app+".boxes") except ImportError: continue # Run each function now. for b in module.BOX_INCLUDES: b_func = getattr(module, b) if not b_func: raise Exception("Method %s not implemented in module %s" % (b, app)) additions = merge_context_additions([additions, b_func(request)]) # Merge boxes down to being part of the context additions dict now they have all been assembled result = additions['context'] result['boxes'] = additions['boxes'] return result
Remove a debugging print that should never have been left there anyway.
## Code Before: from django.conf import settings import importlib # Merge two lots of mesobox-compatible context additions def merge_context_additions(additions): context = {} boxes = {} for c in additions: try: context.update(c.get("context")) except TypeError: pass try: for k, v in c.get("boxes").items(): if k in boxes: boxes[k].append(v) else: boxes[k] = v except TypeError: pass except AttributeError: pass return {"context": context, "boxes": boxes} def context_processor(request): additions = {} # Get the boxes and accompanying context additions from all the installed apps. for app in settings.INSTALLED_APPS: print(app) try: module = importlib.import_module(app+".boxes") except ImportError: continue # Run each function now. for b in module.BOX_INCLUDES: b_func = getattr(module, b) if not b_func: raise Exception("Method %s not implemented in module %s" % (b, app)) additions = merge_context_additions([additions, b_func(request)]) # Merge boxes down to being part of the context additions dict now they have all been assembled result = additions['context'] result['boxes'] = additions['boxes'] return result ## Instruction: Remove a debugging print that should never have been left there anyway. ## Code After: from django.conf import settings import importlib # Merge two lots of mesobox-compatible context additions def merge_context_additions(additions): context = {} boxes = {} for c in additions: try: context.update(c.get("context")) except TypeError: pass try: for k, v in c.get("boxes").items(): if k in boxes: boxes[k].append(v) else: boxes[k] = v except TypeError: pass except AttributeError: pass return {"context": context, "boxes": boxes} def context_processor(request): additions = {} # Get the boxes and accompanying context additions from all the installed apps. for app in settings.INSTALLED_APPS: try: module = importlib.import_module(app+".boxes") except ImportError: continue # Run each function now. for b in module.BOX_INCLUDES: b_func = getattr(module, b) if not b_func: raise Exception("Method %s not implemented in module %s" % (b, app)) additions = merge_context_additions([additions, b_func(request)]) # Merge boxes down to being part of the context additions dict now they have all been assembled result = additions['context'] result['boxes'] = additions['boxes'] return result
... for app in settings.INSTALLED_APPS: try: ...
ff2d9b276928d2bf06ae81b1fa243ee2816cd694
seawater/__init__.py
seawater/__init__.py
__all__ = ["csiro", "extras"] __authors__ = 'Filipe Fernandes' __created_ = "14-Jan-2010" __email__ = "[email protected]" __license__ = "MIT" __maintainer__ = "Filipe Fernandes" __modified__ = "16-Mar-2013" __status__ = "Production" __version__ = "2.0.0" import csiro import extras
from csiro import *
Update to reflect some small re-factoring.
Update to reflect some small re-factoring.
Python
mit
ocefpaf/python-seawater,pyoceans/python-seawater,pyoceans/python-seawater,ocefpaf/python-seawater
+ from csiro import * - __all__ = ["csiro", "extras"] - __authors__ = 'Filipe Fernandes' - __created_ = "14-Jan-2010" - __email__ = "[email protected]" - __license__ = "MIT" - __maintainer__ = "Filipe Fernandes" - __modified__ = "16-Mar-2013" - __status__ = "Production" - __version__ = "2.0.0" - import csiro - import extras -
Update to reflect some small re-factoring.
## Code Before: __all__ = ["csiro", "extras"] __authors__ = 'Filipe Fernandes' __created_ = "14-Jan-2010" __email__ = "[email protected]" __license__ = "MIT" __maintainer__ = "Filipe Fernandes" __modified__ = "16-Mar-2013" __status__ = "Production" __version__ = "2.0.0" import csiro import extras ## Instruction: Update to reflect some small re-factoring. ## Code After: from csiro import *
... from csiro import * ...
d546d6901859a5fee8a16ffea6df560ecbb1e280
tests/unit_tests.py
tests/unit_tests.py
import os import sys import unittest parentDir = os.path.join(os.path.dirname(__file__), "../") sys.path.insert(0, parentDir) from oxyfloat import OxyFloat class DataTest(unittest.TestCase): def setUp(self): self.of = OxyFloat() def test_get_oxyfloats(self): float_list = self.of.get_oxy_floats() print len(float_list) self.assertNotEqual(len(float_list), 0) if __name__ == '__main__': unittest.main()
import os import sys import unittest parentDir = os.path.join(os.path.dirname(__file__), "../") sys.path.insert(0, parentDir) from oxyfloat import OxyFloat class DataTest(unittest.TestCase): def setUp(self): self.of = OxyFloat() def test_get_oxyfloats(self): self.oga_floats = self.of.get_oxy_floats() self.assertNotEqual(len(self.oga_floats), 0) def _get_dac_urls(self): # Testing with a float that has data oga_floats = ['1900650'] for dac_url in self.of.get_dac_urls(oga_floats): self.dac_url = dac_url self.assertTrue(self.dac_url.startswith('http')) break def _get_profile_opendap_urls(self): for profile_url in self.of.get_profile_opendap_urls(self.dac_url): self.profile_url = profile_url break def _get_profile_data(self): d = self.of.get_profile_data(self.profile_url) self.assertNotEqual(len(d), 0) def test_read_data(self): # Methods need to be called in order self._get_dac_urls() self._get_profile_opendap_urls() self._get_profile_data() if __name__ == '__main__': unittest.main()
Add tests for reading profile data
Add tests for reading profile data
Python
mit
biofloat/biofloat,MBARIMike/biofloat,biofloat/biofloat,MBARIMike/biofloat,MBARIMike/oxyfloat,MBARIMike/oxyfloat
import os import sys import unittest parentDir = os.path.join(os.path.dirname(__file__), "../") sys.path.insert(0, parentDir) from oxyfloat import OxyFloat class DataTest(unittest.TestCase): def setUp(self): self.of = OxyFloat() def test_get_oxyfloats(self): - float_list = self.of.get_oxy_floats() + self.oga_floats = self.of.get_oxy_floats() - print len(float_list) - self.assertNotEqual(len(float_list), 0) + self.assertNotEqual(len(self.oga_floats), 0) + def _get_dac_urls(self): + # Testing with a float that has data + oga_floats = ['1900650'] + for dac_url in self.of.get_dac_urls(oga_floats): + self.dac_url = dac_url + self.assertTrue(self.dac_url.startswith('http')) + break + + def _get_profile_opendap_urls(self): + for profile_url in self.of.get_profile_opendap_urls(self.dac_url): + self.profile_url = profile_url + break + + def _get_profile_data(self): + d = self.of.get_profile_data(self.profile_url) + self.assertNotEqual(len(d), 0) + + def test_read_data(self): + # Methods need to be called in order + self._get_dac_urls() + self._get_profile_opendap_urls() + self._get_profile_data() if __name__ == '__main__': unittest.main()
Add tests for reading profile data
## Code Before: import os import sys import unittest parentDir = os.path.join(os.path.dirname(__file__), "../") sys.path.insert(0, parentDir) from oxyfloat import OxyFloat class DataTest(unittest.TestCase): def setUp(self): self.of = OxyFloat() def test_get_oxyfloats(self): float_list = self.of.get_oxy_floats() print len(float_list) self.assertNotEqual(len(float_list), 0) if __name__ == '__main__': unittest.main() ## Instruction: Add tests for reading profile data ## Code After: import os import sys import unittest parentDir = os.path.join(os.path.dirname(__file__), "../") sys.path.insert(0, parentDir) from oxyfloat import OxyFloat class DataTest(unittest.TestCase): def setUp(self): self.of = OxyFloat() def test_get_oxyfloats(self): self.oga_floats = self.of.get_oxy_floats() self.assertNotEqual(len(self.oga_floats), 0) def _get_dac_urls(self): # Testing with a float that has data oga_floats = ['1900650'] for dac_url in self.of.get_dac_urls(oga_floats): self.dac_url = dac_url self.assertTrue(self.dac_url.startswith('http')) break def _get_profile_opendap_urls(self): for profile_url in self.of.get_profile_opendap_urls(self.dac_url): self.profile_url = profile_url break def _get_profile_data(self): d = self.of.get_profile_data(self.profile_url) self.assertNotEqual(len(d), 0) def test_read_data(self): # Methods need to be called in order self._get_dac_urls() self._get_profile_opendap_urls() self._get_profile_data() if __name__ == '__main__': unittest.main()
... def test_get_oxyfloats(self): self.oga_floats = self.of.get_oxy_floats() self.assertNotEqual(len(self.oga_floats), 0) def _get_dac_urls(self): # Testing with a float that has data oga_floats = ['1900650'] for dac_url in self.of.get_dac_urls(oga_floats): self.dac_url = dac_url self.assertTrue(self.dac_url.startswith('http')) break def _get_profile_opendap_urls(self): for profile_url in self.of.get_profile_opendap_urls(self.dac_url): self.profile_url = profile_url break def _get_profile_data(self): d = self.of.get_profile_data(self.profile_url) self.assertNotEqual(len(d), 0) def test_read_data(self): # Methods need to be called in order self._get_dac_urls() self._get_profile_opendap_urls() self._get_profile_data() ...
ae0e2a481f91e94cf05ac2df63f1d66f76a5e442
indra/preassembler/grounding_mapper/gilda.py
indra/preassembler/grounding_mapper/gilda.py
import requests from .mapper import GroundingMapper grounding_service_url = 'http://grounding.indra.bio/ground' def ground_statements(stmts): """Set grounding for Agents in a list of Statements using Gilda. This function modifies the original Statements/Agents in place. Parameters ---------- stmts : list[indra.statements.Statements] A list of Statements to ground """ for stmt in stmts: if stmt.evidence and stmt.evidence[0].text: context = stmt.evidence[0].text else: context = None for agent in stmt.agent_list(): if agent is not None and 'TEXT' in agent.db_refs: txt = agent.db_refs['TEXT'] resp = requests.post(grounding_service_url, json={'text': txt, 'context': context}) results = resp.json() if results: db_refs = {'TEXT': txt, results[0]['term']['db']: results[0]['term']['id']} agent.db_refs = db_refs GroundingMapper.standardize_agent_name(agent, standardize_refs=True)
import requests from .mapper import GroundingMapper grounding_service_url = 'http://grounding.indra.bio' def get_gilda_models(): """Return a list of strings for which Gilda has a disambiguation model. Returns ------- list[str] A list of entity strings. """ res = requests.post(grounding_service_url + '/models') models = res.json() return models def ground_statement(stmt): """Set grounding for Agents in a given Statement using Gilda. This function modifies the original Statement/Agents in place. Parameters ---------- stmt : indra.statements.Statement A Statement to ground """ if stmt.evidence and stmt.evidence[0].text: context = stmt.evidence[0].text else: context = None for agent in stmt.agent_list(): if agent is not None and 'TEXT' in agent.db_refs: txt = agent.db_refs['TEXT'] resp = requests.post(grounding_service_url + '/ground', json={'text': txt, 'context': context}) results = resp.json() if results: db_refs = {'TEXT': txt, results[0]['term']['db']: results[0]['term']['id']} agent.db_refs = db_refs GroundingMapper.standardize_agent_name(agent, standardize_refs=True) def ground_statements(stmts): """Set grounding for Agents in a list of Statements using Gilda. This function modifies the original Statements/Agents in place. Parameters ---------- stmts : list[indra.statements.Statement] A list of Statements to ground """ for stmt in stmts: ground_statement(stmt)
Refactor Gilda module and add function to get models
Refactor Gilda module and add function to get models
Python
bsd-2-clause
bgyori/indra,johnbachman/belpy,johnbachman/belpy,johnbachman/indra,sorgerlab/indra,sorgerlab/belpy,sorgerlab/belpy,bgyori/indra,sorgerlab/belpy,johnbachman/belpy,sorgerlab/indra,bgyori/indra,johnbachman/indra,sorgerlab/indra,johnbachman/indra
import requests from .mapper import GroundingMapper - grounding_service_url = 'http://grounding.indra.bio/ground' + grounding_service_url = 'http://grounding.indra.bio' + + + def get_gilda_models(): + """Return a list of strings for which Gilda has a disambiguation model. + + Returns + ------- + list[str] + A list of entity strings. + """ + res = requests.post(grounding_service_url + '/models') + models = res.json() + return models + + + def ground_statement(stmt): + """Set grounding for Agents in a given Statement using Gilda. + + This function modifies the original Statement/Agents in place. + + Parameters + ---------- + stmt : indra.statements.Statement + A Statement to ground + """ + if stmt.evidence and stmt.evidence[0].text: + context = stmt.evidence[0].text + else: + context = None + for agent in stmt.agent_list(): + if agent is not None and 'TEXT' in agent.db_refs: + txt = agent.db_refs['TEXT'] + resp = requests.post(grounding_service_url + '/ground', + json={'text': txt, + 'context': context}) + results = resp.json() + if results: + db_refs = {'TEXT': txt, + results[0]['term']['db']: + results[0]['term']['id']} + agent.db_refs = db_refs + GroundingMapper.standardize_agent_name(agent, + standardize_refs=True) def ground_statements(stmts): """Set grounding for Agents in a list of Statements using Gilda. This function modifies the original Statements/Agents in place. Parameters ---------- - stmts : list[indra.statements.Statements] + stmts : list[indra.statements.Statement] A list of Statements to ground """ for stmt in stmts: + ground_statement(stmt) - if stmt.evidence and stmt.evidence[0].text: - context = stmt.evidence[0].text - else: - context = None - for agent in stmt.agent_list(): - if agent is not None and 'TEXT' in agent.db_refs: - txt = agent.db_refs['TEXT'] - resp = requests.post(grounding_service_url, - json={'text': txt, - 'context': context}) - results = resp.json() - if results: - db_refs = {'TEXT': txt, - results[0]['term']['db']: - results[0]['term']['id']} - agent.db_refs = db_refs - GroundingMapper.standardize_agent_name(agent, - standardize_refs=True)
Refactor Gilda module and add function to get models
## Code Before: import requests from .mapper import GroundingMapper grounding_service_url = 'http://grounding.indra.bio/ground' def ground_statements(stmts): """Set grounding for Agents in a list of Statements using Gilda. This function modifies the original Statements/Agents in place. Parameters ---------- stmts : list[indra.statements.Statements] A list of Statements to ground """ for stmt in stmts: if stmt.evidence and stmt.evidence[0].text: context = stmt.evidence[0].text else: context = None for agent in stmt.agent_list(): if agent is not None and 'TEXT' in agent.db_refs: txt = agent.db_refs['TEXT'] resp = requests.post(grounding_service_url, json={'text': txt, 'context': context}) results = resp.json() if results: db_refs = {'TEXT': txt, results[0]['term']['db']: results[0]['term']['id']} agent.db_refs = db_refs GroundingMapper.standardize_agent_name(agent, standardize_refs=True) ## Instruction: Refactor Gilda module and add function to get models ## Code After: import requests from .mapper import GroundingMapper grounding_service_url = 'http://grounding.indra.bio' def get_gilda_models(): """Return a list of strings for which Gilda has a disambiguation model. Returns ------- list[str] A list of entity strings. """ res = requests.post(grounding_service_url + '/models') models = res.json() return models def ground_statement(stmt): """Set grounding for Agents in a given Statement using Gilda. This function modifies the original Statement/Agents in place. Parameters ---------- stmt : indra.statements.Statement A Statement to ground """ if stmt.evidence and stmt.evidence[0].text: context = stmt.evidence[0].text else: context = None for agent in stmt.agent_list(): if agent is not None and 'TEXT' in agent.db_refs: txt = agent.db_refs['TEXT'] resp = requests.post(grounding_service_url + '/ground', json={'text': txt, 'context': context}) results = resp.json() if results: db_refs = {'TEXT': txt, results[0]['term']['db']: results[0]['term']['id']} agent.db_refs = db_refs GroundingMapper.standardize_agent_name(agent, standardize_refs=True) def ground_statements(stmts): """Set grounding for Agents in a list of Statements using Gilda. This function modifies the original Statements/Agents in place. Parameters ---------- stmts : list[indra.statements.Statement] A list of Statements to ground """ for stmt in stmts: ground_statement(stmt)
# ... existing code ... grounding_service_url = 'http://grounding.indra.bio' def get_gilda_models(): """Return a list of strings for which Gilda has a disambiguation model. Returns ------- list[str] A list of entity strings. """ res = requests.post(grounding_service_url + '/models') models = res.json() return models def ground_statement(stmt): """Set grounding for Agents in a given Statement using Gilda. This function modifies the original Statement/Agents in place. Parameters ---------- stmt : indra.statements.Statement A Statement to ground """ if stmt.evidence and stmt.evidence[0].text: context = stmt.evidence[0].text else: context = None for agent in stmt.agent_list(): if agent is not None and 'TEXT' in agent.db_refs: txt = agent.db_refs['TEXT'] resp = requests.post(grounding_service_url + '/ground', json={'text': txt, 'context': context}) results = resp.json() if results: db_refs = {'TEXT': txt, results[0]['term']['db']: results[0]['term']['id']} agent.db_refs = db_refs GroundingMapper.standardize_agent_name(agent, standardize_refs=True) # ... modified code ... ---------- stmts : list[indra.statements.Statement] A list of Statements to ground ... for stmt in stmts: ground_statement(stmt) # ... rest of the code ...
939a96a93d959bf2c26da37adb672f5538c1f222
mmmpaste/db.py
mmmpaste/db.py
from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker from hashlib import md5 engine = create_engine("sqlite:///db/pastebin.db") session = scoped_session(sessionmaker(bind = engine, autoflush = False)) Base = declarative_base(bind = engine) def init_db(): """ Creates the database schema. Import the models below to add them to the schema generation. Nothing happens when the database already exists. """ from mmmpaste.models import Paste Base.metadata.create_all() def nuke_db(): """ Drop the bass. """ from mmmpaste.models import Paste Base.metadata.drop_all() def new_paste(content, filename = None): from mmmpaste.models import Paste, Content hash = md5(content).hexdigest() dupe = session.query(Content).filter_by(hash = hash).first() paste = Paste(Content(content), filename) if dupe is not None: paste.content = dupe session.add(paste) session.commit() def get_paste(id): from mmmpaste.models import Paste return session.query(Paste).filter_by(id = id).first()
from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker from hashlib import md5 engine = create_engine("sqlite:///db/pastebin.db") session = scoped_session(sessionmaker(bind = engine, autoflush = False)) Base = declarative_base(bind = engine) def init_db(): """ Creates the database schema. Import the models below to add them to the schema generation. Nothing happens when the database already exists. """ from mmmpaste.models import Paste Base.metadata.create_all() def nuke_db(): """ Drop the bass. """ from mmmpaste.models import Paste Base.metadata.drop_all() def new_paste(content, filename = None): from mmmpaste.models import Paste, Content from mmmpaste.base62 import b62_encode hash = md5(content).hexdigest() dupe = session.query(Content).filter_by(hash = hash).first() paste = Paste(content, filename) if dupe is not None: paste.content = dupe session.add(paste) session.commit() paste.id_b62 = b62_encode(paste.id) session.commit() return paste.id_b62 def get_paste(id_b62): from mmmpaste.models import Paste return session.query(Paste).filter_by(id_b62 = id_b62).first()
Update base 62 id after paste creation.
Update base 62 id after paste creation.
Python
bsd-2-clause
ryanc/mmmpaste,ryanc/mmmpaste
from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker from hashlib import md5 engine = create_engine("sqlite:///db/pastebin.db") session = scoped_session(sessionmaker(bind = engine, autoflush = False)) Base = declarative_base(bind = engine) def init_db(): """ Creates the database schema. Import the models below to add them to the schema generation. Nothing happens when the database already exists. """ from mmmpaste.models import Paste Base.metadata.create_all() def nuke_db(): """ Drop the bass. """ from mmmpaste.models import Paste Base.metadata.drop_all() def new_paste(content, filename = None): from mmmpaste.models import Paste, Content + from mmmpaste.base62 import b62_encode hash = md5(content).hexdigest() dupe = session.query(Content).filter_by(hash = hash).first() - paste = Paste(Content(content), filename) + paste = Paste(content, filename) if dupe is not None: paste.content = dupe session.add(paste) session.commit() + paste.id_b62 = b62_encode(paste.id) + session.commit() + return paste.id_b62 + + - def get_paste(id): + def get_paste(id_b62): from mmmpaste.models import Paste - return session.query(Paste).filter_by(id = id).first() + return session.query(Paste).filter_by(id_b62 = id_b62).first()
Update base 62 id after paste creation.
## Code Before: from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker from hashlib import md5 engine = create_engine("sqlite:///db/pastebin.db") session = scoped_session(sessionmaker(bind = engine, autoflush = False)) Base = declarative_base(bind = engine) def init_db(): """ Creates the database schema. Import the models below to add them to the schema generation. Nothing happens when the database already exists. """ from mmmpaste.models import Paste Base.metadata.create_all() def nuke_db(): """ Drop the bass. """ from mmmpaste.models import Paste Base.metadata.drop_all() def new_paste(content, filename = None): from mmmpaste.models import Paste, Content hash = md5(content).hexdigest() dupe = session.query(Content).filter_by(hash = hash).first() paste = Paste(Content(content), filename) if dupe is not None: paste.content = dupe session.add(paste) session.commit() def get_paste(id): from mmmpaste.models import Paste return session.query(Paste).filter_by(id = id).first() ## Instruction: Update base 62 id after paste creation. ## Code After: from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import scoped_session, sessionmaker from hashlib import md5 engine = create_engine("sqlite:///db/pastebin.db") session = scoped_session(sessionmaker(bind = engine, autoflush = False)) Base = declarative_base(bind = engine) def init_db(): """ Creates the database schema. Import the models below to add them to the schema generation. Nothing happens when the database already exists. """ from mmmpaste.models import Paste Base.metadata.create_all() def nuke_db(): """ Drop the bass. """ from mmmpaste.models import Paste Base.metadata.drop_all() def new_paste(content, filename = None): from mmmpaste.models import Paste, Content from mmmpaste.base62 import b62_encode hash = md5(content).hexdigest() dupe = session.query(Content).filter_by(hash = hash).first() paste = Paste(content, filename) if dupe is not None: paste.content = dupe session.add(paste) session.commit() paste.id_b62 = b62_encode(paste.id) session.commit() return paste.id_b62 def get_paste(id_b62): from mmmpaste.models import Paste return session.query(Paste).filter_by(id_b62 = id_b62).first()
# ... existing code ... from mmmpaste.models import Paste, Content from mmmpaste.base62 import b62_encode # ... modified code ... dupe = session.query(Content).filter_by(hash = hash).first() paste = Paste(content, filename) ... paste.id_b62 = b62_encode(paste.id) session.commit() return paste.id_b62 def get_paste(id_b62): from mmmpaste.models import Paste return session.query(Paste).filter_by(id_b62 = id_b62).first() # ... rest of the code ...
8d3931fd5effabf9c5d56cb03ae15630ae984963
postalcodes_mexico/cli.py
postalcodes_mexico/cli.py
"""Console script for postalcodes_mexico.""" import sys import click @click.command() def main(args=None): """Console script for postalcodes_mexico.""" click.echo("Replace this message by putting your code into " "postalcodes_mexico.cli.main") click.echo("See click documentation at http://click.pocoo.org/") return 0 if __name__ == "__main__": sys.exit(main()) # pragma: no cover
"""Console script for postalcodes_mexico.""" import sys import click from postalcodes_mexico import postalcodes_mexico @click.command() @click.argument('postalcode', type=str) def main(postalcode): """Console script for postalcodes_mexico.""" places = postalcodes_mexico.places(postalcode) click.echo(places) return 0 if __name__ == "__main__": sys.exit(main()) # pragma: no cover
Create simple CLI for the `places` function
Create simple CLI for the `places` function
Python
mit
FlowFX/postalcodes_mexico
"""Console script for postalcodes_mexico.""" import sys import click + from postalcodes_mexico import postalcodes_mexico + @click.command() - def main(args=None): + @click.argument('postalcode', type=str) + def main(postalcode): """Console script for postalcodes_mexico.""" + places = postalcodes_mexico.places(postalcode) + click.echo(places) - click.echo("Replace this message by putting your code into " - "postalcodes_mexico.cli.main") - click.echo("See click documentation at http://click.pocoo.org/") return 0 if __name__ == "__main__": sys.exit(main()) # pragma: no cover
Create simple CLI for the `places` function
## Code Before: """Console script for postalcodes_mexico.""" import sys import click @click.command() def main(args=None): """Console script for postalcodes_mexico.""" click.echo("Replace this message by putting your code into " "postalcodes_mexico.cli.main") click.echo("See click documentation at http://click.pocoo.org/") return 0 if __name__ == "__main__": sys.exit(main()) # pragma: no cover ## Instruction: Create simple CLI for the `places` function ## Code After: """Console script for postalcodes_mexico.""" import sys import click from postalcodes_mexico import postalcodes_mexico @click.command() @click.argument('postalcode', type=str) def main(postalcode): """Console script for postalcodes_mexico.""" places = postalcodes_mexico.places(postalcode) click.echo(places) return 0 if __name__ == "__main__": sys.exit(main()) # pragma: no cover
... from postalcodes_mexico import postalcodes_mexico ... @click.command() @click.argument('postalcode', type=str) def main(postalcode): """Console script for postalcodes_mexico.""" places = postalcodes_mexico.places(postalcode) click.echo(places) return 0 ...
9560ccf476a887c20b2373eca52f38f186b6ed58
conanfile.py
conanfile.py
from conans import ConanFile, CMake class NostalgiaConan(ConanFile): settings = "os", "compiler", "build_type", "arch" requires = "jsoncpp/1.9.2", "sdl2/2.0.10@bincrafters/stable", "qt/5.14.1@bincrafters/stable", "sqlite3/3.31.0", "libiconv/1.16" generators = "cmake", "cmake_find_package", "cmake_paths" #default_options = { # "sdl2:nas": False #} def requirements(self): pass
from conans import ConanFile, CMake class NostalgiaConan(ConanFile): settings = "os", "compiler", "build_type", "arch" requires = "jsoncpp/1.9.2", "sdl2/2.0.10@bincrafters/stable" generators = "cmake", "cmake_find_package", "cmake_paths" #default_options = { # "sdl2:nas": False #}
Remove conan Qt, as it is currently being ignored
[nostalgia] Remove conan Qt, as it is currently being ignored
Python
mpl-2.0
wombatant/nostalgia,wombatant/nostalgia,wombatant/nostalgia
from conans import ConanFile, CMake class NostalgiaConan(ConanFile): settings = "os", "compiler", "build_type", "arch" - requires = "jsoncpp/1.9.2", "sdl2/2.0.10@bincrafters/stable", "qt/5.14.1@bincrafters/stable", "sqlite3/3.31.0", "libiconv/1.16" + requires = "jsoncpp/1.9.2", "sdl2/2.0.10@bincrafters/stable" generators = "cmake", "cmake_find_package", "cmake_paths" #default_options = { # "sdl2:nas": False #} - def requirements(self): - pass -
Remove conan Qt, as it is currently being ignored
## Code Before: from conans import ConanFile, CMake class NostalgiaConan(ConanFile): settings = "os", "compiler", "build_type", "arch" requires = "jsoncpp/1.9.2", "sdl2/2.0.10@bincrafters/stable", "qt/5.14.1@bincrafters/stable", "sqlite3/3.31.0", "libiconv/1.16" generators = "cmake", "cmake_find_package", "cmake_paths" #default_options = { # "sdl2:nas": False #} def requirements(self): pass ## Instruction: Remove conan Qt, as it is currently being ignored ## Code After: from conans import ConanFile, CMake class NostalgiaConan(ConanFile): settings = "os", "compiler", "build_type", "arch" requires = "jsoncpp/1.9.2", "sdl2/2.0.10@bincrafters/stable" generators = "cmake", "cmake_find_package", "cmake_paths" #default_options = { # "sdl2:nas": False #}
// ... existing code ... settings = "os", "compiler", "build_type", "arch" requires = "jsoncpp/1.9.2", "sdl2/2.0.10@bincrafters/stable" generators = "cmake", "cmake_find_package", "cmake_paths" // ... modified code ... #} // ... rest of the code ...
baedff75f2b86f09368e3bd72b72e27bf887cc88
rotational-cipher/rotational_cipher.py
rotational-cipher/rotational_cipher.py
import string UPPER = string.ascii_uppercase LOWER = string.ascii_lowercase def rotate(s, n): return "".join(rot_gen(s,n)) def shift_rules(n): shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n] return {k:v for k,v in zip(UPPER+LOWER, shifted)} def rot_gen(s, n): rules = shift_rules(n) for ch in s: try: yield rules[ch] except KeyError: yield ch
import string UPPER = string.ascii_uppercase LOWER = string.ascii_lowercase def rotate(s, n): rules = shift_rules(n) return "".join(map(lambda k: rules.get(k, k), s)) def shift_rules(n): shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n] return {k:v for k,v in zip(UPPER+LOWER, shifted)}
Use lambda function with method
Use lambda function with method
Python
agpl-3.0
CubicComet/exercism-python-solutions
import string UPPER = string.ascii_uppercase LOWER = string.ascii_lowercase def rotate(s, n): - return "".join(rot_gen(s,n)) + rules = shift_rules(n) + return "".join(map(lambda k: rules.get(k, k), s)) def shift_rules(n): shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n] return {k:v for k,v in zip(UPPER+LOWER, shifted)} - - def rot_gen(s, n): - rules = shift_rules(n) - for ch in s: - try: - yield rules[ch] - except KeyError: - yield ch -
Use lambda function with method
## Code Before: import string UPPER = string.ascii_uppercase LOWER = string.ascii_lowercase def rotate(s, n): return "".join(rot_gen(s,n)) def shift_rules(n): shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n] return {k:v for k,v in zip(UPPER+LOWER, shifted)} def rot_gen(s, n): rules = shift_rules(n) for ch in s: try: yield rules[ch] except KeyError: yield ch ## Instruction: Use lambda function with method ## Code After: import string UPPER = string.ascii_uppercase LOWER = string.ascii_lowercase def rotate(s, n): rules = shift_rules(n) return "".join(map(lambda k: rules.get(k, k), s)) def shift_rules(n): shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n] return {k:v for k,v in zip(UPPER+LOWER, shifted)}
# ... existing code ... def rotate(s, n): rules = shift_rules(n) return "".join(map(lambda k: rules.get(k, k), s)) # ... modified code ... return {k:v for k,v in zip(UPPER+LOWER, shifted)} # ... rest of the code ...
71c9235a7e48882fc8c1393e9527fea4531c536c
filter_plugins/fap.py
filter_plugins/fap.py
import ipaddress def site_code(ipv4): # Verify IP address _ = ipaddress.ip_address(ipv4) segments = ipv4.split(".") return int(segments[1]) class FilterModule(object): def filters(self): return {"site_code": site_code}
import ipaddress def site_code(ipv4): # Verify IP address _ = ipaddress.ip_address(ipv4) segments = ipv4.split(".") return int(segments[1]) # rest:https://restic.storage.tjoda.fap.no/rpi1.ldn.fap.no # rclone:Jotta:storage.tjoda.fap.no # /Volumes/storage/restic/kramacbook def restic_repo_friendly_name(repo: str) -> str: if "https://" in repo: repo = repo.replace("https://", "") print(repo) type_, address, *_ = repo.split(":") (r, *_) = address.split("/") return "_".join([type_, r]).replace(".", "_") elif ":" not in repo: # Most likely a file path type_ = "disk" path = list(filter(None, repo.split("/"))) if path[0] == "Volumes": return "_".join([type_, path[1]]) return "_".join([type_, repo.replace("/", "_")]) else: type_, *rest = repo.split(":") return "_".join([type_, rest[0]]) class FilterModule(object): def filters(self): return { "site_code": site_code, "restic_repo_friendly_name": restic_repo_friendly_name, }
Add really hacky way to reformat restic repos
Add really hacky way to reformat restic repos
Python
mit
kradalby/plays,kradalby/plays
import ipaddress def site_code(ipv4): # Verify IP address _ = ipaddress.ip_address(ipv4) segments = ipv4.split(".") return int(segments[1]) + # rest:https://restic.storage.tjoda.fap.no/rpi1.ldn.fap.no + # rclone:Jotta:storage.tjoda.fap.no + # /Volumes/storage/restic/kramacbook + def restic_repo_friendly_name(repo: str) -> str: + if "https://" in repo: + repo = repo.replace("https://", "") + print(repo) + type_, address, *_ = repo.split(":") + (r, *_) = address.split("/") + return "_".join([type_, r]).replace(".", "_") + elif ":" not in repo: + # Most likely a file path + type_ = "disk" + path = list(filter(None, repo.split("/"))) + if path[0] == "Volumes": + return "_".join([type_, path[1]]) + + return "_".join([type_, repo.replace("/", "_")]) + + else: + type_, *rest = repo.split(":") + return "_".join([type_, rest[0]]) + + class FilterModule(object): def filters(self): + return { - return {"site_code": site_code} + "site_code": site_code, + "restic_repo_friendly_name": restic_repo_friendly_name, + }
Add really hacky way to reformat restic repos
## Code Before: import ipaddress def site_code(ipv4): # Verify IP address _ = ipaddress.ip_address(ipv4) segments = ipv4.split(".") return int(segments[1]) class FilterModule(object): def filters(self): return {"site_code": site_code} ## Instruction: Add really hacky way to reformat restic repos ## Code After: import ipaddress def site_code(ipv4): # Verify IP address _ = ipaddress.ip_address(ipv4) segments = ipv4.split(".") return int(segments[1]) # rest:https://restic.storage.tjoda.fap.no/rpi1.ldn.fap.no # rclone:Jotta:storage.tjoda.fap.no # /Volumes/storage/restic/kramacbook def restic_repo_friendly_name(repo: str) -> str: if "https://" in repo: repo = repo.replace("https://", "") print(repo) type_, address, *_ = repo.split(":") (r, *_) = address.split("/") return "_".join([type_, r]).replace(".", "_") elif ":" not in repo: # Most likely a file path type_ = "disk" path = list(filter(None, repo.split("/"))) if path[0] == "Volumes": return "_".join([type_, path[1]]) return "_".join([type_, repo.replace("/", "_")]) else: type_, *rest = repo.split(":") return "_".join([type_, rest[0]]) class FilterModule(object): def filters(self): return { "site_code": site_code, "restic_repo_friendly_name": restic_repo_friendly_name, }
... # rest:https://restic.storage.tjoda.fap.no/rpi1.ldn.fap.no # rclone:Jotta:storage.tjoda.fap.no # /Volumes/storage/restic/kramacbook def restic_repo_friendly_name(repo: str) -> str: if "https://" in repo: repo = repo.replace("https://", "") print(repo) type_, address, *_ = repo.split(":") (r, *_) = address.split("/") return "_".join([type_, r]).replace(".", "_") elif ":" not in repo: # Most likely a file path type_ = "disk" path = list(filter(None, repo.split("/"))) if path[0] == "Volumes": return "_".join([type_, path[1]]) return "_".join([type_, repo.replace("/", "_")]) else: type_, *rest = repo.split(":") return "_".join([type_, rest[0]]) class FilterModule(object): ... def filters(self): return { "site_code": site_code, "restic_repo_friendly_name": restic_repo_friendly_name, } ...
dceae6725d10a5d1af6287e1b684c651683d1750
runtests.py
runtests.py
import sys from os.path import dirname, abspath from django.conf import settings if len(sys.argv) > 1 and 'postgres' in sys.argv: sys.argv.remove('postgres') db_engine = 'postgresql_psycopg2' db_name = 'test_main' else: db_engine = 'sqlite3' db_name = '' if not settings.configured: settings.configure( DATABASE_ENGINE = db_engine, DATABASE_NAME = db_name, INSTALLED_APPS = [ 'django.contrib.contenttypes', 'genericm2m', 'genericm2m.genericm2m_tests', ], ) from django.test.simple import run_tests def runtests(*test_args): if not test_args: test_args = ['genericm2m_tests'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])
import sys from os.path import dirname, abspath import django from django.conf import settings if len(sys.argv) > 1 and 'postgres' in sys.argv: sys.argv.remove('postgres') db_engine = 'django.db.backends.postgresql_psycopg2' db_name = 'test_main' else: db_engine = 'django.db.backends.sqlite3' db_name = '' if not settings.configured: settings.configure( DATABASES=dict(default=dict(ENGINE=db_engine, NAME=db_name)), INSTALLED_APPS = [ 'django.contrib.contenttypes', 'genericm2m', 'genericm2m.genericm2m_tests', ], ) from django.test.utils import get_runner def runtests(*test_args): if not test_args: test_args = ['genericm2m_tests'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(test_args) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])
Allow tests to be run on 1.4
Allow tests to be run on 1.4
Python
mit
jayfk/django-generic-m2m,jayfk/django-generic-m2m,coleifer/django-generic-m2m,coleifer/django-generic-m2m,coleifer/django-generic-m2m
import sys from os.path import dirname, abspath + import django from django.conf import settings if len(sys.argv) > 1 and 'postgres' in sys.argv: sys.argv.remove('postgres') - db_engine = 'postgresql_psycopg2' + db_engine = 'django.db.backends.postgresql_psycopg2' db_name = 'test_main' else: - db_engine = 'sqlite3' + db_engine = 'django.db.backends.sqlite3' db_name = '' if not settings.configured: settings.configure( + DATABASES=dict(default=dict(ENGINE=db_engine, NAME=db_name)), - DATABASE_ENGINE = db_engine, - DATABASE_NAME = db_name, INSTALLED_APPS = [ 'django.contrib.contenttypes', 'genericm2m', 'genericm2m.genericm2m_tests', ], ) - from django.test.simple import run_tests + from django.test.utils import get_runner def runtests(*test_args): if not test_args: test_args = ['genericm2m_tests'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) - failures = run_tests(test_args, verbosity=1, interactive=True) + TestRunner = get_runner(settings) + test_runner = TestRunner(verbosity=1, interactive=True) + failures = test_runner.run_tests(test_args) sys.exit(failures) - if __name__ == '__main__': runtests(*sys.argv[1:])
Allow tests to be run on 1.4
## Code Before: import sys from os.path import dirname, abspath from django.conf import settings if len(sys.argv) > 1 and 'postgres' in sys.argv: sys.argv.remove('postgres') db_engine = 'postgresql_psycopg2' db_name = 'test_main' else: db_engine = 'sqlite3' db_name = '' if not settings.configured: settings.configure( DATABASE_ENGINE = db_engine, DATABASE_NAME = db_name, INSTALLED_APPS = [ 'django.contrib.contenttypes', 'genericm2m', 'genericm2m.genericm2m_tests', ], ) from django.test.simple import run_tests def runtests(*test_args): if not test_args: test_args = ['genericm2m_tests'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) failures = run_tests(test_args, verbosity=1, interactive=True) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:]) ## Instruction: Allow tests to be run on 1.4 ## Code After: import sys from os.path import dirname, abspath import django from django.conf import settings if len(sys.argv) > 1 and 'postgres' in sys.argv: sys.argv.remove('postgres') db_engine = 'django.db.backends.postgresql_psycopg2' db_name = 'test_main' else: db_engine = 'django.db.backends.sqlite3' db_name = '' if not settings.configured: settings.configure( DATABASES=dict(default=dict(ENGINE=db_engine, NAME=db_name)), INSTALLED_APPS = [ 'django.contrib.contenttypes', 'genericm2m', 'genericm2m.genericm2m_tests', ], ) from django.test.utils import get_runner def runtests(*test_args): if not test_args: test_args = ['genericm2m_tests'] parent = dirname(abspath(__file__)) sys.path.insert(0, parent) TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(test_args) sys.exit(failures) if __name__ == '__main__': runtests(*sys.argv[1:])
// ... existing code ... import django from django.conf import settings // ... modified code ... sys.argv.remove('postgres') db_engine = 'django.db.backends.postgresql_psycopg2' db_name = 'test_main' ... else: db_engine = 'django.db.backends.sqlite3' db_name = '' ... settings.configure( DATABASES=dict(default=dict(ENGINE=db_engine, NAME=db_name)), INSTALLED_APPS = [ ... from django.test.utils import get_runner ... sys.path.insert(0, parent) TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=1, interactive=True) failures = test_runner.run_tests(test_args) sys.exit(failures) // ... rest of the code ...
64750014a91669b6067459a14743a6c5e6257856
umap/__init__.py
umap/__init__.py
from .umap_ import UMAP # Workaround: https://github.com/numba/numba/issues/3341 import numba import pkg_resources __version__ = pkg_resources.get_distribution("umap-learn").version
from .umap_ import UMAP # Workaround: https://github.com/numba/numba/issues/3341 import numba import pkg_resources try: __version__ = pkg_resources.get_distribution("umap-learn").version except pkg_resources.DistributionNotFound: __version__ = '0.4-dev'
Patch init import to allow for local dev of UMAP code
Patch init import to allow for local dev of UMAP code
Python
bsd-3-clause
lmcinnes/umap,lmcinnes/umap
from .umap_ import UMAP # Workaround: https://github.com/numba/numba/issues/3341 import numba import pkg_resources + try: - __version__ = pkg_resources.get_distribution("umap-learn").version + __version__ = pkg_resources.get_distribution("umap-learn").version + except pkg_resources.DistributionNotFound: + __version__ = '0.4-dev'
Patch init import to allow for local dev of UMAP code
## Code Before: from .umap_ import UMAP # Workaround: https://github.com/numba/numba/issues/3341 import numba import pkg_resources __version__ = pkg_resources.get_distribution("umap-learn").version ## Instruction: Patch init import to allow for local dev of UMAP code ## Code After: from .umap_ import UMAP # Workaround: https://github.com/numba/numba/issues/3341 import numba import pkg_resources try: __version__ = pkg_resources.get_distribution("umap-learn").version except pkg_resources.DistributionNotFound: __version__ = '0.4-dev'
... try: __version__ = pkg_resources.get_distribution("umap-learn").version except pkg_resources.DistributionNotFound: __version__ = '0.4-dev' ...
905690beacad9731bb113bdbeedf0ed2c7df3160
profile_audfprint_match.py
profile_audfprint_match.py
import audfprint import cProfile import pstats argv = ["audfprint", "match", "-d", "tmp.fpdb", "--density", "200", "query.mp3", "query2.mp3"] cProfile.run('audfprint.main(argv)', 'fpmstats') p = pstats.Stats('fpmstats') p.sort_stats('time') p.print_stats(10)
import audfprint import cProfile import pstats argv = ["audfprint", "match", "-d", "fpdbase.pklz", "--density", "200", "query.mp3"] cProfile.run('audfprint.main(argv)', 'fpmstats') p = pstats.Stats('fpmstats') p.sort_stats('time') p.print_stats(10)
Update profile for local data.
Update profile for local data.
Python
mit
dpwe/audfprint
import audfprint import cProfile import pstats - argv = ["audfprint", "match", "-d", "tmp.fpdb", "--density", "200", "query.mp3", "query2.mp3"] + argv = ["audfprint", "match", "-d", "fpdbase.pklz", "--density", "200", "query.mp3"] cProfile.run('audfprint.main(argv)', 'fpmstats') p = pstats.Stats('fpmstats') p.sort_stats('time') p.print_stats(10)
Update profile for local data.
## Code Before: import audfprint import cProfile import pstats argv = ["audfprint", "match", "-d", "tmp.fpdb", "--density", "200", "query.mp3", "query2.mp3"] cProfile.run('audfprint.main(argv)', 'fpmstats') p = pstats.Stats('fpmstats') p.sort_stats('time') p.print_stats(10) ## Instruction: Update profile for local data. ## Code After: import audfprint import cProfile import pstats argv = ["audfprint", "match", "-d", "fpdbase.pklz", "--density", "200", "query.mp3"] cProfile.run('audfprint.main(argv)', 'fpmstats') p = pstats.Stats('fpmstats') p.sort_stats('time') p.print_stats(10)
// ... existing code ... argv = ["audfprint", "match", "-d", "fpdbase.pklz", "--density", "200", "query.mp3"] // ... rest of the code ...
8a8b152566b92cfe0ccbc379b9871da795cd4b5b
keystoneclient/hacking/checks.py
keystoneclient/hacking/checks.py
import re def check_oslo_namespace_imports(logical_line, blank_before, filename): oslo_namespace_imports = re.compile( r"(((from)|(import))\s+oslo\." "((config)|(serialization)|(utils)|(i18n)))|" "(from\s+oslo\s+import\s+((config)|(serialization)|(utils)|(i18n)))") if re.match(oslo_namespace_imports, logical_line): msg = ("K333: '%s' must be used instead of '%s'.") % ( logical_line.replace('oslo.', 'oslo_'), logical_line) yield(0, msg) def factory(register): register(check_oslo_namespace_imports)
import re def check_oslo_namespace_imports(logical_line, blank_before, filename): oslo_namespace_imports = re.compile( r"(((from)|(import))\s+oslo\.)|(from\s+oslo\s+import\s+)") if re.match(oslo_namespace_imports, logical_line): msg = ("K333: '%s' must be used instead of '%s'.") % ( logical_line.replace('oslo.', 'oslo_'), logical_line) yield(0, msg) def factory(register): register(check_oslo_namespace_imports)
Change hacking check to verify all oslo imports
Change hacking check to verify all oslo imports The hacking check was verifying that specific oslo imports weren't using the oslo-namespaced package. Since all the oslo libraries used by keystoneclient are now changed to use the new package name the hacking check can be simplified. bp drop-namespace-packages Change-Id: I6466e857c6eda0add6918e9fb14dc9296ed98600
Python
apache-2.0
jamielennox/keystoneauth,citrix-openstack-build/keystoneauth,sileht/keystoneauth
import re def check_oslo_namespace_imports(logical_line, blank_before, filename): oslo_namespace_imports = re.compile( + r"(((from)|(import))\s+oslo\.)|(from\s+oslo\s+import\s+)") - r"(((from)|(import))\s+oslo\." - "((config)|(serialization)|(utils)|(i18n)))|" - "(from\s+oslo\s+import\s+((config)|(serialization)|(utils)|(i18n)))") if re.match(oslo_namespace_imports, logical_line): msg = ("K333: '%s' must be used instead of '%s'.") % ( logical_line.replace('oslo.', 'oslo_'), logical_line) yield(0, msg) def factory(register): register(check_oslo_namespace_imports)
Change hacking check to verify all oslo imports
## Code Before: import re def check_oslo_namespace_imports(logical_line, blank_before, filename): oslo_namespace_imports = re.compile( r"(((from)|(import))\s+oslo\." "((config)|(serialization)|(utils)|(i18n)))|" "(from\s+oslo\s+import\s+((config)|(serialization)|(utils)|(i18n)))") if re.match(oslo_namespace_imports, logical_line): msg = ("K333: '%s' must be used instead of '%s'.") % ( logical_line.replace('oslo.', 'oslo_'), logical_line) yield(0, msg) def factory(register): register(check_oslo_namespace_imports) ## Instruction: Change hacking check to verify all oslo imports ## Code After: import re def check_oslo_namespace_imports(logical_line, blank_before, filename): oslo_namespace_imports = re.compile( r"(((from)|(import))\s+oslo\.)|(from\s+oslo\s+import\s+)") if re.match(oslo_namespace_imports, logical_line): msg = ("K333: '%s' must be used instead of '%s'.") % ( logical_line.replace('oslo.', 'oslo_'), logical_line) yield(0, msg) def factory(register): register(check_oslo_namespace_imports)
# ... existing code ... oslo_namespace_imports = re.compile( r"(((from)|(import))\s+oslo\.)|(from\s+oslo\s+import\s+)") # ... rest of the code ...
069e98f036c77f635a955ea2c48580709089e702
src/conference_scheduler/resources.py
src/conference_scheduler/resources.py
from typing import NamedTuple, Sequence, Dict, Iterable, List from datetime import datetime class Slot(NamedTuple): venue: str starts_at: datetime duration: int capacity: int session: str class Event(NamedTuple): name: str duration: int demand: int tags: List[str] = [] unavailability: List = [] class ScheduledItem(NamedTuple): event: Event slot: Slot class ChangedEventScheduledItem(NamedTuple): event: Event old_slot: Slot = None new_slot: Slot = None class ChangedSlotScheduledItem(NamedTuple): slot: Slot old_event: Event = None new_event: Event = None class Shape(NamedTuple): """Represents the shape of a 2 dimensional array of events and slots""" events: int slots: int class Constraint(NamedTuple): label: str condition: bool
from typing import NamedTuple, Sequence, Dict, Iterable, List from datetime import datetime class Slot(NamedTuple): venue: str starts_at: datetime duration: int capacity: int session: str class BaseEvent(NamedTuple): name: str duration: int demand: int tags: List[str] unavailability: List class Event(BaseEvent): __slots__ = () def __new__(cls, name, duration, demand, tags=None, unavailability=None): if tags is None: tags = [] if unavailability is None: unavailability = [] return super().__new__( cls, name, duration, demand, tags, unavailability ) class ScheduledItem(NamedTuple): event: Event slot: Slot class ChangedEventScheduledItem(NamedTuple): event: Event old_slot: Slot = None new_slot: Slot = None class ChangedSlotScheduledItem(NamedTuple): slot: Slot old_event: Event = None new_event: Event = None class Shape(NamedTuple): """Represents the shape of a 2 dimensional array of events and slots""" events: int slots: int class Constraint(NamedTuple): label: str condition: bool
Set default values for `tags` and `availability`
Set default values for `tags` and `availability`
Python
mit
PyconUK/ConferenceScheduler
from typing import NamedTuple, Sequence, Dict, Iterable, List from datetime import datetime class Slot(NamedTuple): venue: str starts_at: datetime duration: int capacity: int session: str - class Event(NamedTuple): + class BaseEvent(NamedTuple): name: str duration: int demand: int - tags: List[str] = [] + tags: List[str] - unavailability: List = [] + unavailability: List + + + class Event(BaseEvent): + + __slots__ = () + + def __new__(cls, name, duration, demand, tags=None, unavailability=None): + if tags is None: + tags = [] + if unavailability is None: + unavailability = [] + return super().__new__( + cls, name, duration, demand, tags, unavailability + ) class ScheduledItem(NamedTuple): event: Event slot: Slot class ChangedEventScheduledItem(NamedTuple): event: Event old_slot: Slot = None new_slot: Slot = None class ChangedSlotScheduledItem(NamedTuple): slot: Slot old_event: Event = None new_event: Event = None class Shape(NamedTuple): """Represents the shape of a 2 dimensional array of events and slots""" events: int slots: int class Constraint(NamedTuple): label: str condition: bool
Set default values for `tags` and `availability`
## Code Before: from typing import NamedTuple, Sequence, Dict, Iterable, List from datetime import datetime class Slot(NamedTuple): venue: str starts_at: datetime duration: int capacity: int session: str class Event(NamedTuple): name: str duration: int demand: int tags: List[str] = [] unavailability: List = [] class ScheduledItem(NamedTuple): event: Event slot: Slot class ChangedEventScheduledItem(NamedTuple): event: Event old_slot: Slot = None new_slot: Slot = None class ChangedSlotScheduledItem(NamedTuple): slot: Slot old_event: Event = None new_event: Event = None class Shape(NamedTuple): """Represents the shape of a 2 dimensional array of events and slots""" events: int slots: int class Constraint(NamedTuple): label: str condition: bool ## Instruction: Set default values for `tags` and `availability` ## Code After: from typing import NamedTuple, Sequence, Dict, Iterable, List from datetime import datetime class Slot(NamedTuple): venue: str starts_at: datetime duration: int capacity: int session: str class BaseEvent(NamedTuple): name: str duration: int demand: int tags: List[str] unavailability: List class Event(BaseEvent): __slots__ = () def __new__(cls, name, duration, demand, tags=None, unavailability=None): if tags is None: tags = [] if unavailability is None: unavailability = [] return super().__new__( cls, name, duration, demand, tags, unavailability ) class ScheduledItem(NamedTuple): event: Event slot: Slot class ChangedEventScheduledItem(NamedTuple): event: Event old_slot: Slot = None new_slot: Slot = None class ChangedSlotScheduledItem(NamedTuple): slot: Slot old_event: Event = None new_event: Event = None class Shape(NamedTuple): """Represents the shape of a 2 dimensional array of events and slots""" events: int slots: int class Constraint(NamedTuple): label: str condition: bool
// ... existing code ... class BaseEvent(NamedTuple): name: str // ... modified code ... demand: int tags: List[str] unavailability: List class Event(BaseEvent): __slots__ = () def __new__(cls, name, duration, demand, tags=None, unavailability=None): if tags is None: tags = [] if unavailability is None: unavailability = [] return super().__new__( cls, name, duration, demand, tags, unavailability ) // ... rest of the code ...
6d22cc47174139b56fad7d94696b08d9830a7ea4
lettuce_webdriver/tests/__init__.py
lettuce_webdriver/tests/__init__.py
from __future__ import print_function import os from contextlib import contextmanager from selenium import webdriver from aloe import around, world here = os.path.dirname(__file__) html_pages = os.path.join(here, 'html_pages') @around.each_feature @contextmanager def with_browser(feature): world.browser = webdriver.Firefox() world.browser.get('') yield world.browser.quit() delattr(world, 'browser') @around.each_step @contextmanager def print_source(step): try: yield except: print(world.browser.page_source) print(world.browser.get_screenshot_as_base64()) raise
from __future__ import print_function import os from contextlib import contextmanager from selenium import webdriver from aloe import around, world here = os.path.dirname(__file__) html_pages = os.path.join(here, 'html_pages') @around.each_feature @contextmanager def with_browser(feature): world.browser = webdriver.Firefox() world.browser.get('') yield world.browser.quit() delattr(world, 'browser') @around.each_step @contextmanager def print_source(step): try: yield except: try: step_container = step.scenario except AttributeError: step_container = step.background print(step_container.feature.name) print(step_container.name) print(step.sentence) print(world.browser.page_source) print(world.browser.get_screenshot_as_base64()) raise
Print scenario/step names on failure
Print scenario/step names on failure
Python
mit
koterpillar/aloe_webdriver,infoxchange/aloe_webdriver,infoxchange/aloe_webdriver,aloetesting/aloe_webdriver,aloetesting/aloe_webdriver,aloetesting/aloe_webdriver,koterpillar/aloe_webdriver
from __future__ import print_function import os from contextlib import contextmanager from selenium import webdriver from aloe import around, world here = os.path.dirname(__file__) html_pages = os.path.join(here, 'html_pages') @around.each_feature @contextmanager def with_browser(feature): world.browser = webdriver.Firefox() world.browser.get('') yield world.browser.quit() delattr(world, 'browser') @around.each_step @contextmanager def print_source(step): try: yield except: + try: + step_container = step.scenario + except AttributeError: + step_container = step.background + + print(step_container.feature.name) + print(step_container.name) + print(step.sentence) print(world.browser.page_source) print(world.browser.get_screenshot_as_base64()) raise
Print scenario/step names on failure
## Code Before: from __future__ import print_function import os from contextlib import contextmanager from selenium import webdriver from aloe import around, world here = os.path.dirname(__file__) html_pages = os.path.join(here, 'html_pages') @around.each_feature @contextmanager def with_browser(feature): world.browser = webdriver.Firefox() world.browser.get('') yield world.browser.quit() delattr(world, 'browser') @around.each_step @contextmanager def print_source(step): try: yield except: print(world.browser.page_source) print(world.browser.get_screenshot_as_base64()) raise ## Instruction: Print scenario/step names on failure ## Code After: from __future__ import print_function import os from contextlib import contextmanager from selenium import webdriver from aloe import around, world here = os.path.dirname(__file__) html_pages = os.path.join(here, 'html_pages') @around.each_feature @contextmanager def with_browser(feature): world.browser = webdriver.Firefox() world.browser.get('') yield world.browser.quit() delattr(world, 'browser') @around.each_step @contextmanager def print_source(step): try: yield except: try: step_container = step.scenario except AttributeError: step_container = step.background print(step_container.feature.name) print(step_container.name) print(step.sentence) print(world.browser.page_source) print(world.browser.get_screenshot_as_base64()) raise
# ... existing code ... except: try: step_container = step.scenario except AttributeError: step_container = step.background print(step_container.feature.name) print(step_container.name) print(step.sentence) print(world.browser.page_source) # ... rest of the code ...
0fae7ce68a531b2c27e03a854fba3319d041ee45
mezzanine/twitter/managers.py
mezzanine/twitter/managers.py
from django.db.models import Manager from mezzanine.utils.cache import cache_installed class TweetManager(Manager): """ Manager that handles generating the initial ``Query`` instance for a user, list or search term. """ def get_for(self, user_name=None, list_name=None, search_term=None): """ Create a query and run it for the given arg if it doesn't exist, and return the tweets for the query. """ if user_name is not None: type, value = "user", user_name elif list_name is not None: type, value = "list", list_name elif search_term is not None: type, value = "search", search_term else: return from mezzanine.twitter.models import Query query, created = Query.objects.get_or_create(type=type, value=value) if created or cache_installed(): query.run() elif not query.interested: query.interested = True query.save() return query.tweets.all()
from django.db.models import Manager class TweetManager(Manager): """ Manager that handles generating the initial ``Query`` instance for a user, list or search term. """ def get_for(self, user_name=None, list_name=None, search_term=None): """ Create a query and run it for the given arg if it doesn't exist, and return the tweets for the query. """ if user_name is not None: type, value = "user", user_name elif list_name is not None: type, value = "list", list_name elif search_term is not None: type, value = "search", search_term else: return from mezzanine.twitter.models import Query query, created = Query.objects.get_or_create(type=type, value=value) if created: query.run() elif not query.interested: query.interested = True query.save() return query.tweets.all()
Revert cache changes to Twitter queries - since authenticated users bypass the cache, and the Twitter call will generate a lot of queries.
Revert cache changes to Twitter queries - since authenticated users bypass the cache, and the Twitter call will generate a lot of queries.
Python
bsd-2-clause
viaregio/mezzanine,promil23/mezzanine,biomassives/mezzanine,dekomote/mezzanine-modeltranslation-backport,sjuxax/mezzanine,AlexHill/mezzanine,dustinrb/mezzanine,ZeroXn/mezzanine,theclanks/mezzanine,theclanks/mezzanine,SoLoHiC/mezzanine,guibernardino/mezzanine,dekomote/mezzanine-modeltranslation-backport,emile2016/mezzanine,sjdines/mezzanine,promil23/mezzanine,eino-makitalo/mezzanine,dsanders11/mezzanine,PegasusWang/mezzanine,nikolas/mezzanine,ZeroXn/mezzanine,biomassives/mezzanine,saintbird/mezzanine,cccs-web/mezzanine,jerivas/mezzanine,stbarnabas/mezzanine,Kniyl/mezzanine,gbosh/mezzanine,frankier/mezzanine,douglaskastle/mezzanine,jjz/mezzanine,agepoly/mezzanine,molokov/mezzanine,mush42/mezzanine,saintbird/mezzanine,geodesign/mezzanine,scarcry/snm-mezzanine,industrydive/mezzanine,ryneeverett/mezzanine,molokov/mezzanine,Skytorn86/mezzanine,spookylukey/mezzanine,dustinrb/mezzanine,orlenko/plei,fusionbox/mezzanine,emile2016/mezzanine,geodesign/mezzanine,frankier/mezzanine,dsanders11/mezzanine,SoLoHiC/mezzanine,spookylukey/mezzanine,ryneeverett/mezzanine,dovydas/mezzanine,webounty/mezzanine,Kniyl/mezzanine,SoLoHiC/mezzanine,Cicero-Zhao/mezzanine,dustinrb/mezzanine,ryneeverett/mezzanine,adrian-the-git/mezzanine,fusionbox/mezzanine,gbosh/mezzanine,wbtuomela/mezzanine,stephenmcd/mezzanine,stephenmcd/mezzanine,jjz/mezzanine,molokov/mezzanine,readevalprint/mezzanine,eino-makitalo/mezzanine,sjdines/mezzanine,wyzex/mezzanine,orlenko/sfpirg,ZeroXn/mezzanine,damnfine/mezzanine,Cajoline/mezzanine,wbtuomela/mezzanine,stephenmcd/mezzanine,wrwrwr/mezzanine,wyzex/mezzanine,Cajoline/mezzanine,frankchin/mezzanine,promil23/mezzanine,tuxinhang1989/mezzanine,orlenko/sfpirg,saintbird/mezzanine,agepoly/mezzanine,joshcartme/mezzanine,PegasusWang/mezzanine,christianwgd/mezzanine,webounty/mezzanine,theclanks/mezzanine,sjdines/mezzanine,emile2016/mezzanine,readevalprint/mezzanine,orlenko/plei,Kniyl/mezzanine,gradel/mezzanine,jerivas/mezzanine,geodesign/mezzanine,sjuxax/mezzanine,christianwgd/mezzanine,spookylukey/mezzanine,scarcry/snm-mezzanine,cccs-web/mezzanine,jjz/mezzanine,adrian-the-git/mezzanine,wrwrwr/mezzanine,vladir/mezzanine,AlexHill/mezzanine,wbtuomela/mezzanine,dekomote/mezzanine-modeltranslation-backport,guibernardino/mezzanine,jerivas/mezzanine,scarcry/snm-mezzanine,nikolas/mezzanine,viaregio/mezzanine,frankchin/mezzanine,damnfine/mezzanine,gbosh/mezzanine,dsanders11/mezzanine,joshcartme/mezzanine,Skytorn86/mezzanine,frankier/mezzanine,adrian-the-git/mezzanine,frankchin/mezzanine,readevalprint/mezzanine,Cajoline/mezzanine,douglaskastle/mezzanine,gradel/mezzanine,damnfine/mezzanine,PegasusWang/mezzanine,vladir/mezzanine,wyzex/mezzanine,tuxinhang1989/mezzanine,Cicero-Zhao/mezzanine,batpad/mezzanine,orlenko/plei,eino-makitalo/mezzanine,orlenko/sfpirg,dovydas/mezzanine,stbarnabas/mezzanine,Skytorn86/mezzanine,dovydas/mezzanine,christianwgd/mezzanine,mush42/mezzanine,vladir/mezzanine,douglaskastle/mezzanine,viaregio/mezzanine,webounty/mezzanine,gradel/mezzanine,batpad/mezzanine,sjuxax/mezzanine,industrydive/mezzanine,tuxinhang1989/mezzanine,nikolas/mezzanine,industrydive/mezzanine,joshcartme/mezzanine,agepoly/mezzanine,mush42/mezzanine,biomassives/mezzanine
from django.db.models import Manager - - from mezzanine.utils.cache import cache_installed class TweetManager(Manager): """ Manager that handles generating the initial ``Query`` instance for a user, list or search term. """ def get_for(self, user_name=None, list_name=None, search_term=None): """ Create a query and run it for the given arg if it doesn't exist, and return the tweets for the query. """ if user_name is not None: type, value = "user", user_name elif list_name is not None: type, value = "list", list_name elif search_term is not None: type, value = "search", search_term else: return from mezzanine.twitter.models import Query query, created = Query.objects.get_or_create(type=type, value=value) - if created or cache_installed(): + if created: query.run() elif not query.interested: query.interested = True query.save() return query.tweets.all()
Revert cache changes to Twitter queries - since authenticated users bypass the cache, and the Twitter call will generate a lot of queries.
## Code Before: from django.db.models import Manager from mezzanine.utils.cache import cache_installed class TweetManager(Manager): """ Manager that handles generating the initial ``Query`` instance for a user, list or search term. """ def get_for(self, user_name=None, list_name=None, search_term=None): """ Create a query and run it for the given arg if it doesn't exist, and return the tweets for the query. """ if user_name is not None: type, value = "user", user_name elif list_name is not None: type, value = "list", list_name elif search_term is not None: type, value = "search", search_term else: return from mezzanine.twitter.models import Query query, created = Query.objects.get_or_create(type=type, value=value) if created or cache_installed(): query.run() elif not query.interested: query.interested = True query.save() return query.tweets.all() ## Instruction: Revert cache changes to Twitter queries - since authenticated users bypass the cache, and the Twitter call will generate a lot of queries. ## Code After: from django.db.models import Manager class TweetManager(Manager): """ Manager that handles generating the initial ``Query`` instance for a user, list or search term. """ def get_for(self, user_name=None, list_name=None, search_term=None): """ Create a query and run it for the given arg if it doesn't exist, and return the tweets for the query. """ if user_name is not None: type, value = "user", user_name elif list_name is not None: type, value = "list", list_name elif search_term is not None: type, value = "search", search_term else: return from mezzanine.twitter.models import Query query, created = Query.objects.get_or_create(type=type, value=value) if created: query.run() elif not query.interested: query.interested = True query.save() return query.tweets.all()
# ... existing code ... from django.db.models import Manager # ... modified code ... query, created = Query.objects.get_or_create(type=type, value=value) if created: query.run() # ... rest of the code ...
0ef968528f31da5dd09f016134b4a1ffa6377f84
scripts/slave/chromium/package_source.py
scripts/slave/chromium/package_source.py
"""A tool to package a checkout's source and upload it to Google Storage.""" import sys if '__main__' == __name__: sys.exit(0)
"""A tool to package a checkout's source and upload it to Google Storage.""" import os import sys from common import chromium_utils from slave import slave_utils FILENAME = 'chromium-src.tgz' GSBASE = 'chromium-browser-csindex' def main(argv): if not os.path.exists('src'): raise Exception('ERROR: no src directory to package, exiting') chromium_utils.RunCommand(['rm', '-f', FILENAME]) if os.path.exists(FILENAME): raise Exception('ERROR: %s cannot be removed, exiting' % FILENAME) if chromium_utils.RunCommand(['tar', 'czf', FILENAME, 'src/']) != 0: raise Exception('ERROR: failed to create %s, exiting' % FILENAME) status = slave_utils.GSUtilCopyFile(FILENAME, GSBASE) if status != 0: raise Exception('ERROR: GSUtilCopyFile error %d. "%s" -> "%s"' % ( status, FILENAME, GSBASE)) return 0 if '__main__' == __name__: sys.exit(main(None))
Create source snapshot and upload to GS.
Create source snapshot and upload to GS. BUG=79198 Review URL: http://codereview.chromium.org/7129020 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@88372 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
eunchong/build,eunchong/build,eunchong/build,eunchong/build
"""A tool to package a checkout's source and upload it to Google Storage.""" + import os import sys + + from common import chromium_utils + from slave import slave_utils + + + FILENAME = 'chromium-src.tgz' + GSBASE = 'chromium-browser-csindex' + + + def main(argv): + if not os.path.exists('src'): + raise Exception('ERROR: no src directory to package, exiting') + + chromium_utils.RunCommand(['rm', '-f', FILENAME]) + if os.path.exists(FILENAME): + raise Exception('ERROR: %s cannot be removed, exiting' % FILENAME) + + if chromium_utils.RunCommand(['tar', 'czf', FILENAME, 'src/']) != 0: + raise Exception('ERROR: failed to create %s, exiting' % FILENAME) + + status = slave_utils.GSUtilCopyFile(FILENAME, GSBASE) + if status != 0: + raise Exception('ERROR: GSUtilCopyFile error %d. "%s" -> "%s"' % ( + status, FILENAME, GSBASE)) + + return 0 if '__main__' == __name__: - sys.exit(0) + sys.exit(main(None))
Create source snapshot and upload to GS.
## Code Before: """A tool to package a checkout's source and upload it to Google Storage.""" import sys if '__main__' == __name__: sys.exit(0) ## Instruction: Create source snapshot and upload to GS. ## Code After: """A tool to package a checkout's source and upload it to Google Storage.""" import os import sys from common import chromium_utils from slave import slave_utils FILENAME = 'chromium-src.tgz' GSBASE = 'chromium-browser-csindex' def main(argv): if not os.path.exists('src'): raise Exception('ERROR: no src directory to package, exiting') chromium_utils.RunCommand(['rm', '-f', FILENAME]) if os.path.exists(FILENAME): raise Exception('ERROR: %s cannot be removed, exiting' % FILENAME) if chromium_utils.RunCommand(['tar', 'czf', FILENAME, 'src/']) != 0: raise Exception('ERROR: failed to create %s, exiting' % FILENAME) status = slave_utils.GSUtilCopyFile(FILENAME, GSBASE) if status != 0: raise Exception('ERROR: GSUtilCopyFile error %d. "%s" -> "%s"' % ( status, FILENAME, GSBASE)) return 0 if '__main__' == __name__: sys.exit(main(None))
# ... existing code ... import os import sys from common import chromium_utils from slave import slave_utils FILENAME = 'chromium-src.tgz' GSBASE = 'chromium-browser-csindex' def main(argv): if not os.path.exists('src'): raise Exception('ERROR: no src directory to package, exiting') chromium_utils.RunCommand(['rm', '-f', FILENAME]) if os.path.exists(FILENAME): raise Exception('ERROR: %s cannot be removed, exiting' % FILENAME) if chromium_utils.RunCommand(['tar', 'czf', FILENAME, 'src/']) != 0: raise Exception('ERROR: failed to create %s, exiting' % FILENAME) status = slave_utils.GSUtilCopyFile(FILENAME, GSBASE) if status != 0: raise Exception('ERROR: GSUtilCopyFile error %d. "%s" -> "%s"' % ( status, FILENAME, GSBASE)) return 0 # ... modified code ... if '__main__' == __name__: sys.exit(main(None)) # ... rest of the code ...
be5541bd16a84c37b973cf77cb0f4d5c5e83e39a
spacy/tests/regression/test_issue768.py
spacy/tests/regression/test_issue768.py
from __future__ import unicode_literals from ...language import Language from ...attrs import LANG from ...fr.language_data import get_tokenizer_exceptions, STOP_WORDS from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA import pytest @pytest.fixture def fr_tokenizer_w_infix(): SPLIT_INFIX = r'(?<=[{a}]\')(?=[{a}])'.format(a=ALPHA) # create new Language subclass to add to default infixes class French(Language): lang = 'fr' class Defaults(Language.Defaults): lex_attr_getters = dict(Language.Defaults.lex_attr_getters) lex_attr_getters[LANG] = lambda text: 'fr' tokenizer_exceptions = get_tokenizer_exceptions() stop_words = STOP_WORDS infixes = TOKENIZER_INFIXES + [SPLIT_INFIX] return French.Defaults.create_tokenizer() @pytest.mark.parametrize('text,expected_tokens', [("l'avion", ["l'", "avion"]), ("j'ai", ["j'", "ai"])]) def test_issue768(fr_tokenizer_w_infix, text, expected_tokens): """Allow zero-width 'infix' token during the tokenization process.""" tokens = fr_tokenizer_w_infix(text) assert len(tokens) == 2 assert [t.text for t in tokens] == expected_tokens
from __future__ import unicode_literals from ...language import Language from ...attrs import LANG from ...fr.stop_words import STOP_WORDS from ...fr.tokenizer_exceptions import TOKENIZER_EXCEPTIONS from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA from ...util import update_exc import pytest @pytest.fixture def fr_tokenizer_w_infix(): SPLIT_INFIX = r'(?<=[{a}]\')(?=[{a}])'.format(a=ALPHA) # create new Language subclass to add to default infixes class French(Language): lang = 'fr' class Defaults(Language.Defaults): lex_attr_getters = dict(Language.Defaults.lex_attr_getters) lex_attr_getters[LANG] = lambda text: 'fr' tokenizer_exceptions = update_exc(TOKENIZER_EXCEPTIONS) stop_words = STOP_WORDS infixes = TOKENIZER_INFIXES + [SPLIT_INFIX] return French.Defaults.create_tokenizer() @pytest.mark.parametrize('text,expected_tokens', [("l'avion", ["l'", "avion"]), ("j'ai", ["j'", "ai"])]) def test_issue768(fr_tokenizer_w_infix, text, expected_tokens): """Allow zero-width 'infix' token during the tokenization process.""" tokens = fr_tokenizer_w_infix(text) assert len(tokens) == 2 assert [t.text for t in tokens] == expected_tokens
Fix import and tokenizer exceptions
Fix import and tokenizer exceptions
Python
mit
explosion/spaCy,honnibal/spaCy,recognai/spaCy,explosion/spaCy,explosion/spaCy,aikramer2/spaCy,aikramer2/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,aikramer2/spaCy,recognai/spaCy,recognai/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,aikramer2/spaCy,spacy-io/spaCy,explosion/spaCy,recognai/spaCy,recognai/spaCy,explosion/spaCy,aikramer2/spaCy,honnibal/spaCy,honnibal/spaCy,spacy-io/spaCy
from __future__ import unicode_literals from ...language import Language from ...attrs import LANG - from ...fr.language_data import get_tokenizer_exceptions, STOP_WORDS + from ...fr.stop_words import STOP_WORDS + from ...fr.tokenizer_exceptions import TOKENIZER_EXCEPTIONS from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA + from ...util import update_exc import pytest @pytest.fixture def fr_tokenizer_w_infix(): SPLIT_INFIX = r'(?<=[{a}]\')(?=[{a}])'.format(a=ALPHA) # create new Language subclass to add to default infixes class French(Language): lang = 'fr' class Defaults(Language.Defaults): lex_attr_getters = dict(Language.Defaults.lex_attr_getters) lex_attr_getters[LANG] = lambda text: 'fr' - tokenizer_exceptions = get_tokenizer_exceptions() + tokenizer_exceptions = update_exc(TOKENIZER_EXCEPTIONS) stop_words = STOP_WORDS infixes = TOKENIZER_INFIXES + [SPLIT_INFIX] return French.Defaults.create_tokenizer() @pytest.mark.parametrize('text,expected_tokens', [("l'avion", ["l'", "avion"]), ("j'ai", ["j'", "ai"])]) def test_issue768(fr_tokenizer_w_infix, text, expected_tokens): """Allow zero-width 'infix' token during the tokenization process.""" tokens = fr_tokenizer_w_infix(text) assert len(tokens) == 2 assert [t.text for t in tokens] == expected_tokens
Fix import and tokenizer exceptions
## Code Before: from __future__ import unicode_literals from ...language import Language from ...attrs import LANG from ...fr.language_data import get_tokenizer_exceptions, STOP_WORDS from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA import pytest @pytest.fixture def fr_tokenizer_w_infix(): SPLIT_INFIX = r'(?<=[{a}]\')(?=[{a}])'.format(a=ALPHA) # create new Language subclass to add to default infixes class French(Language): lang = 'fr' class Defaults(Language.Defaults): lex_attr_getters = dict(Language.Defaults.lex_attr_getters) lex_attr_getters[LANG] = lambda text: 'fr' tokenizer_exceptions = get_tokenizer_exceptions() stop_words = STOP_WORDS infixes = TOKENIZER_INFIXES + [SPLIT_INFIX] return French.Defaults.create_tokenizer() @pytest.mark.parametrize('text,expected_tokens', [("l'avion", ["l'", "avion"]), ("j'ai", ["j'", "ai"])]) def test_issue768(fr_tokenizer_w_infix, text, expected_tokens): """Allow zero-width 'infix' token during the tokenization process.""" tokens = fr_tokenizer_w_infix(text) assert len(tokens) == 2 assert [t.text for t in tokens] == expected_tokens ## Instruction: Fix import and tokenizer exceptions ## Code After: from __future__ import unicode_literals from ...language import Language from ...attrs import LANG from ...fr.stop_words import STOP_WORDS from ...fr.tokenizer_exceptions import TOKENIZER_EXCEPTIONS from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA from ...util import update_exc import pytest @pytest.fixture def fr_tokenizer_w_infix(): SPLIT_INFIX = r'(?<=[{a}]\')(?=[{a}])'.format(a=ALPHA) # create new Language subclass to add to default infixes class French(Language): lang = 'fr' class Defaults(Language.Defaults): lex_attr_getters = dict(Language.Defaults.lex_attr_getters) lex_attr_getters[LANG] = lambda text: 'fr' tokenizer_exceptions = update_exc(TOKENIZER_EXCEPTIONS) stop_words = STOP_WORDS infixes = TOKENIZER_INFIXES + [SPLIT_INFIX] return French.Defaults.create_tokenizer() @pytest.mark.parametrize('text,expected_tokens', [("l'avion", ["l'", "avion"]), ("j'ai", ["j'", "ai"])]) def test_issue768(fr_tokenizer_w_infix, text, expected_tokens): """Allow zero-width 'infix' token during the tokenization process.""" tokens = fr_tokenizer_w_infix(text) assert len(tokens) == 2 assert [t.text for t in tokens] == expected_tokens
... from ...attrs import LANG from ...fr.stop_words import STOP_WORDS from ...fr.tokenizer_exceptions import TOKENIZER_EXCEPTIONS from ...language_data.punctuation import TOKENIZER_INFIXES, ALPHA from ...util import update_exc ... lex_attr_getters[LANG] = lambda text: 'fr' tokenizer_exceptions = update_exc(TOKENIZER_EXCEPTIONS) stop_words = STOP_WORDS ...
13f9a48166aed2f6d09e1a27c60568d2318ceee2
src/ocspdash/custom_columns.py
src/ocspdash/custom_columns.py
"""Implements custom SQLAlchemy TypeDecorators.""" import uuid import sqlalchemy.dialects.postgresql from sqlalchemy.types import BINARY, TypeDecorator __all__ = [ 'UUID', ] class UUID(TypeDecorator): """Platform-independent UUID type. Uses Postgresql's UUID type, otherwise uses BINARY(16). Based on http://docs.sqlalchemy.org/en/rel_0_9/core/custom_types.html?highlight=guid#backend-agnostic-guid-type """ impl = BINARY def load_dialect_impl(self, dialect): if dialect.name == 'postgresql': return dialect.type_descriptor(sqlalchemy.dialects.postgresql.UUID()) return dialect.type_descriptor(BINARY) def process_bind_param(self, value, dialect): if value is None: return if dialect.name == 'postgresql': return str(value) if isinstance(value, uuid.UUID): # hex string return value.bytes value_uuid = uuid.UUID(value) return value_uuid.bytes def process_result_value(self, value, dialect): if value is None: return return uuid.UUID(bytes=value)
"""Implements custom SQLAlchemy TypeDecorators.""" import uuid import sqlalchemy.dialects.postgresql from sqlalchemy.types import BINARY, TypeDecorator __all__ = [ 'UUID', ] class UUID(TypeDecorator): """Platform-independent UUID type. Uses Postgresql's UUID type, otherwise uses BINARY(16). Based on http://docs.sqlalchemy.org/en/rel_0_9/core/custom_types.html?highlight=guid#backend-agnostic-guid-type """ impl = BINARY def load_dialect_impl(self, dialect): if dialect.name == 'postgresql': return dialect.type_descriptor(sqlalchemy.dialects.postgresql.UUID()) return dialect.type_descriptor(BINARY) def process_bind_param(self, value, dialect): if value is None: return if dialect.name == 'postgresql': return str(value) if isinstance(value, uuid.UUID): # raw UUID bytes return value.bytes value_uuid = uuid.UUID(value) return value_uuid.bytes def process_result_value(self, value, dialect): if value is None: return if dialect.name == 'postgresql': return uuid.UUID(value) return uuid.UUID(bytes=value)
Change the custom UUID column to work right
Change the custom UUID column to work right
Python
mit
scolby33/OCSPdash,scolby33/OCSPdash,scolby33/OCSPdash
"""Implements custom SQLAlchemy TypeDecorators.""" import uuid import sqlalchemy.dialects.postgresql from sqlalchemy.types import BINARY, TypeDecorator __all__ = [ 'UUID', ] class UUID(TypeDecorator): """Platform-independent UUID type. Uses Postgresql's UUID type, otherwise uses BINARY(16). Based on http://docs.sqlalchemy.org/en/rel_0_9/core/custom_types.html?highlight=guid#backend-agnostic-guid-type """ impl = BINARY def load_dialect_impl(self, dialect): if dialect.name == 'postgresql': return dialect.type_descriptor(sqlalchemy.dialects.postgresql.UUID()) return dialect.type_descriptor(BINARY) def process_bind_param(self, value, dialect): if value is None: return if dialect.name == 'postgresql': return str(value) if isinstance(value, uuid.UUID): - # hex string + # raw UUID bytes return value.bytes value_uuid = uuid.UUID(value) return value_uuid.bytes def process_result_value(self, value, dialect): if value is None: return + if dialect.name == 'postgresql': + return uuid.UUID(value) + return uuid.UUID(bytes=value)
Change the custom UUID column to work right
## Code Before: """Implements custom SQLAlchemy TypeDecorators.""" import uuid import sqlalchemy.dialects.postgresql from sqlalchemy.types import BINARY, TypeDecorator __all__ = [ 'UUID', ] class UUID(TypeDecorator): """Platform-independent UUID type. Uses Postgresql's UUID type, otherwise uses BINARY(16). Based on http://docs.sqlalchemy.org/en/rel_0_9/core/custom_types.html?highlight=guid#backend-agnostic-guid-type """ impl = BINARY def load_dialect_impl(self, dialect): if dialect.name == 'postgresql': return dialect.type_descriptor(sqlalchemy.dialects.postgresql.UUID()) return dialect.type_descriptor(BINARY) def process_bind_param(self, value, dialect): if value is None: return if dialect.name == 'postgresql': return str(value) if isinstance(value, uuid.UUID): # hex string return value.bytes value_uuid = uuid.UUID(value) return value_uuid.bytes def process_result_value(self, value, dialect): if value is None: return return uuid.UUID(bytes=value) ## Instruction: Change the custom UUID column to work right ## Code After: """Implements custom SQLAlchemy TypeDecorators.""" import uuid import sqlalchemy.dialects.postgresql from sqlalchemy.types import BINARY, TypeDecorator __all__ = [ 'UUID', ] class UUID(TypeDecorator): """Platform-independent UUID type. Uses Postgresql's UUID type, otherwise uses BINARY(16). Based on http://docs.sqlalchemy.org/en/rel_0_9/core/custom_types.html?highlight=guid#backend-agnostic-guid-type """ impl = BINARY def load_dialect_impl(self, dialect): if dialect.name == 'postgresql': return dialect.type_descriptor(sqlalchemy.dialects.postgresql.UUID()) return dialect.type_descriptor(BINARY) def process_bind_param(self, value, dialect): if value is None: return if dialect.name == 'postgresql': return str(value) if isinstance(value, uuid.UUID): # raw UUID bytes return value.bytes value_uuid = uuid.UUID(value) return value_uuid.bytes def process_result_value(self, value, dialect): if value is None: return if dialect.name == 'postgresql': return uuid.UUID(value) return uuid.UUID(bytes=value)
# ... existing code ... if isinstance(value, uuid.UUID): # raw UUID bytes return value.bytes # ... modified code ... if dialect.name == 'postgresql': return uuid.UUID(value) return uuid.UUID(bytes=value) # ... rest of the code ...
cc00cc1c2539eb7dbeed2656e1929c8c53c4dd98
pyverdict/pyverdict/datatype_converters/impala_converter.py
pyverdict/pyverdict/datatype_converters/impala_converter.py
from .converter_base import DatatypeConverterBase import dateutil def _str_to_datetime(java_obj, idx): return dateutil.parser.parse(java_obj.getString(idx)) _typename_to_converter_fxn = {'timestamp': _str_to_datetime} class ImpalaConverter(DatatypeConverterBase): @staticmethod def read_value(result_set, index, col_typename): if col_typename in _typename_to_converter_fxn: if result_set.getString(index) is None: return None return _typename_to_converter_fxn[col_typename](result_set, index) else: return result_set.getValue(index)
from .converter_base import DatatypeConverterBase import dateutil def _str_to_datetime(java_obj, idx): return dateutil.parser.parse(java_obj.getString(idx)) _typename_to_converter_fxn = {'timestamp': _str_to_datetime} class ImpalaConverter(DatatypeConverterBase): ''' Type conversion rule: BIGINT => int, BOOLEAN => bool, CHAR => str, DECIMAL => decimal.Decimal, DOUBLE => float, FLOAT => float, REAL => float, SMALLINT => int, STRING => str, TIMESTAMP => datetime.datetime, TINYINT => int, VARCHAR => str ''' @staticmethod def read_value(result_set, index, col_typename): if col_typename in _typename_to_converter_fxn: if result_set.getString(index) is None: return None return _typename_to_converter_fxn[col_typename](result_set, index) else: return result_set.getValue(index)
Add type conversion rule comment
Add type conversion rule comment
Python
apache-2.0
mozafari/verdict,mozafari/verdict,mozafari/verdict,mozafari/verdict,mozafari/verdict
from .converter_base import DatatypeConverterBase import dateutil def _str_to_datetime(java_obj, idx): return dateutil.parser.parse(java_obj.getString(idx)) _typename_to_converter_fxn = {'timestamp': _str_to_datetime} class ImpalaConverter(DatatypeConverterBase): + ''' + Type conversion rule: + + BIGINT => int, + BOOLEAN => bool, + CHAR => str, + DECIMAL => decimal.Decimal, + DOUBLE => float, + FLOAT => float, + REAL => float, + SMALLINT => int, + STRING => str, + TIMESTAMP => datetime.datetime, + TINYINT => int, + VARCHAR => str + + ''' + @staticmethod def read_value(result_set, index, col_typename): if col_typename in _typename_to_converter_fxn: if result_set.getString(index) is None: return None return _typename_to_converter_fxn[col_typename](result_set, index) else: return result_set.getValue(index)
Add type conversion rule comment
## Code Before: from .converter_base import DatatypeConverterBase import dateutil def _str_to_datetime(java_obj, idx): return dateutil.parser.parse(java_obj.getString(idx)) _typename_to_converter_fxn = {'timestamp': _str_to_datetime} class ImpalaConverter(DatatypeConverterBase): @staticmethod def read_value(result_set, index, col_typename): if col_typename in _typename_to_converter_fxn: if result_set.getString(index) is None: return None return _typename_to_converter_fxn[col_typename](result_set, index) else: return result_set.getValue(index) ## Instruction: Add type conversion rule comment ## Code After: from .converter_base import DatatypeConverterBase import dateutil def _str_to_datetime(java_obj, idx): return dateutil.parser.parse(java_obj.getString(idx)) _typename_to_converter_fxn = {'timestamp': _str_to_datetime} class ImpalaConverter(DatatypeConverterBase): ''' Type conversion rule: BIGINT => int, BOOLEAN => bool, CHAR => str, DECIMAL => decimal.Decimal, DOUBLE => float, FLOAT => float, REAL => float, SMALLINT => int, STRING => str, TIMESTAMP => datetime.datetime, TINYINT => int, VARCHAR => str ''' @staticmethod def read_value(result_set, index, col_typename): if col_typename in _typename_to_converter_fxn: if result_set.getString(index) is None: return None return _typename_to_converter_fxn[col_typename](result_set, index) else: return result_set.getValue(index)
// ... existing code ... class ImpalaConverter(DatatypeConverterBase): ''' Type conversion rule: BIGINT => int, BOOLEAN => bool, CHAR => str, DECIMAL => decimal.Decimal, DOUBLE => float, FLOAT => float, REAL => float, SMALLINT => int, STRING => str, TIMESTAMP => datetime.datetime, TINYINT => int, VARCHAR => str ''' @staticmethod // ... rest of the code ...
56a8b900570200e63ee460dd7e2962cba2450b16
preparation/tools/build_assets.py
preparation/tools/build_assets.py
from copy import copy import argparse from preparation.resources.Resource import names_registered, resource_by_name from hb_res.storage import get_storage, ExplanationStorage def generate_asset(resource, out_storage: ExplanationStorage): out_storage.clear() for explanation in resource: r = copy(explanation) for functor in resource.modifiers: if r is None: break r = functor(r) if r is not None: out_storage.add_entry(r) def rebuild_trunk(trunk: str): resource = resource_by_name(trunk + 'Resource')() with get_storage(trunk) as out_storage: print("Starting {} generation".format(trunk)) generate_asset(resource, out_storage) print("Finished {} generation".format(trunk)) def make_argparser(): parser = argparse.ArgumentParser(description='Rebuild some asset') names = [name.replace('Resource', '') for name in names_registered()] parser.add_argument('resources', metavar='RESOURCE', nargs='+', choices=names + ['all'], help='One of registered resources ({}) or just \'all\'.'.format(', '.join(names))) return parser def main(args=None): if not isinstance(args, argparse.Namespace): parser = make_argparser() args = parser.parse_args(args) assert all not in args.resources or len(args.resources) == 1 for name in args.resources: rebuild_trunk(name) if __name__ == '__main__': main()
from copy import copy import argparse from preparation.resources.Resource import names_registered, resource_by_name from hb_res.storage import get_storage, ExplanationStorage def generate_asset(resource, out_storage: ExplanationStorage): out_storage.clear() for explanation in resource: r = copy(explanation) for functor in resource.modifiers: if r is None: break r = functor(r) if r is not None: out_storage.add_entry(r) def rebuild_trunk(trunk: str): resource = resource_by_name(trunk + 'Resource')() with get_storage(trunk) as out_storage: print("Starting {} generation".format(trunk)) generate_asset(resource, out_storage) print("Finished {} generation".format(trunk)) def make_argparser(): parser = argparse.ArgumentParser(description='Rebuild some asset') names = [name.replace('Resource', '') for name in names_registered()] parser.add_argument('resources', metavar='RESOURCE', nargs='+', choices=names + ['all'], help='One of registered resources ({}) or just \'all\'.'.format(', '.join(names))) return parser def main(args=None): if not isinstance(args, argparse.Namespace): parser = make_argparser() args = parser.parse_args(args) assert 'all' not in args.resources or len(args.resources) == 1 if 'all' in args.resources: args.resources = [name.replace('Resource', '') for name in names_registered()] for name in args.resources: rebuild_trunk(name) if __name__ == '__main__': main()
Fix bug with 'all' argument
Fix bug with 'all' argument
Python
mit
hatbot-team/hatbot_resources
from copy import copy import argparse from preparation.resources.Resource import names_registered, resource_by_name from hb_res.storage import get_storage, ExplanationStorage def generate_asset(resource, out_storage: ExplanationStorage): out_storage.clear() for explanation in resource: r = copy(explanation) for functor in resource.modifiers: if r is None: break r = functor(r) if r is not None: out_storage.add_entry(r) def rebuild_trunk(trunk: str): resource = resource_by_name(trunk + 'Resource')() with get_storage(trunk) as out_storage: print("Starting {} generation".format(trunk)) generate_asset(resource, out_storage) print("Finished {} generation".format(trunk)) def make_argparser(): parser = argparse.ArgumentParser(description='Rebuild some asset') names = [name.replace('Resource', '') for name in names_registered()] parser.add_argument('resources', metavar='RESOURCE', nargs='+', choices=names + ['all'], help='One of registered resources ({}) or just \'all\'.'.format(', '.join(names))) return parser def main(args=None): if not isinstance(args, argparse.Namespace): parser = make_argparser() args = parser.parse_args(args) - assert all not in args.resources or len(args.resources) == 1 + assert 'all' not in args.resources or len(args.resources) == 1 + if 'all' in args.resources: + args.resources = [name.replace('Resource', '') for name in names_registered()] for name in args.resources: rebuild_trunk(name) if __name__ == '__main__': main()
Fix bug with 'all' argument
## Code Before: from copy import copy import argparse from preparation.resources.Resource import names_registered, resource_by_name from hb_res.storage import get_storage, ExplanationStorage def generate_asset(resource, out_storage: ExplanationStorage): out_storage.clear() for explanation in resource: r = copy(explanation) for functor in resource.modifiers: if r is None: break r = functor(r) if r is not None: out_storage.add_entry(r) def rebuild_trunk(trunk: str): resource = resource_by_name(trunk + 'Resource')() with get_storage(trunk) as out_storage: print("Starting {} generation".format(trunk)) generate_asset(resource, out_storage) print("Finished {} generation".format(trunk)) def make_argparser(): parser = argparse.ArgumentParser(description='Rebuild some asset') names = [name.replace('Resource', '') for name in names_registered()] parser.add_argument('resources', metavar='RESOURCE', nargs='+', choices=names + ['all'], help='One of registered resources ({}) or just \'all\'.'.format(', '.join(names))) return parser def main(args=None): if not isinstance(args, argparse.Namespace): parser = make_argparser() args = parser.parse_args(args) assert all not in args.resources or len(args.resources) == 1 for name in args.resources: rebuild_trunk(name) if __name__ == '__main__': main() ## Instruction: Fix bug with 'all' argument ## Code After: from copy import copy import argparse from preparation.resources.Resource import names_registered, resource_by_name from hb_res.storage import get_storage, ExplanationStorage def generate_asset(resource, out_storage: ExplanationStorage): out_storage.clear() for explanation in resource: r = copy(explanation) for functor in resource.modifiers: if r is None: break r = functor(r) if r is not None: out_storage.add_entry(r) def rebuild_trunk(trunk: str): resource = resource_by_name(trunk + 'Resource')() with get_storage(trunk) as out_storage: print("Starting {} generation".format(trunk)) generate_asset(resource, out_storage) print("Finished {} generation".format(trunk)) def make_argparser(): parser = argparse.ArgumentParser(description='Rebuild some asset') names = [name.replace('Resource', '') for name in names_registered()] parser.add_argument('resources', metavar='RESOURCE', nargs='+', choices=names + ['all'], help='One of registered resources ({}) or just \'all\'.'.format(', '.join(names))) return parser def main(args=None): if not isinstance(args, argparse.Namespace): parser = make_argparser() args = parser.parse_args(args) assert 'all' not in args.resources or len(args.resources) == 1 if 'all' in args.resources: args.resources = [name.replace('Resource', '') for name in names_registered()] for name in args.resources: rebuild_trunk(name) if __name__ == '__main__': main()
# ... existing code ... args = parser.parse_args(args) assert 'all' not in args.resources or len(args.resources) == 1 if 'all' in args.resources: args.resources = [name.replace('Resource', '') for name in names_registered()] for name in args.resources: # ... rest of the code ...
c220c0a474a660c4c1167d42fdd0d48599b1b593
tests/test_pathutils.py
tests/test_pathutils.py
from os.path import join import sublime import sys from unittest import TestCase version = sublime.version() try: from libsass import pathutils except ImportError: from sublime_libsass.libsass import pathutils class TestPathutils(TestCase): def test_subpaths(self): path = join('/foo','bar','baz') exprmt = pathutils.subpaths(path) expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ] self.assertEqual(exprmt, expect) def test_grep_r(self): pathutils.os.walk = lambda x: [('/tmp','',['file.scss'])] self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp']) self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp']) self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), []) self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
from os.path import join, realpath import os import sublime import sys from unittest import TestCase from functools import wraps def subl_patch(pkg, obj=None): def subl_deco(fn): @wraps(fn) def wrap(*args): nonlocal pkg o = [] if obj != None: o += [obj] pkg = pkg + '.' + obj try: mock = __import__(pkg, globals(), locals(), o, 0) except ImportError: pkg = realpath(__file__).split(os.sep)[-3] + '.' + pkg mock = __import__(pkg, globals(), locals(), o, 0) args += (mock,) fn(*args) return wrap return subl_deco class TestPathutils(TestCase): @subl_patch('libsass', 'pathutils') def test_subpaths(self, pathutils): path = join('/foo','bar','baz') exprmt = pathutils.subpaths(path) expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ] self.assertEqual(exprmt, expect) @subl_patch('libsass', 'pathutils') def test_grep_r(self, pathutils): pathutils.os.walk = lambda x: [('/tmp','',['file.scss'])] self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp']) self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp']) self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), []) self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
Make custom patch in package to test
Make custom patch in package to test
Python
mit
blitzrk/sublime_libsass,blitzrk/sublime_libsass
- from os.path import join + from os.path import join, realpath + import os import sublime import sys from unittest import TestCase + from functools import wraps - version = sublime.version() - try: - from libsass import pathutils + def subl_patch(pkg, obj=None): + def subl_deco(fn): + @wraps(fn) + def wrap(*args): + nonlocal pkg + o = [] + if obj != None: + o += [obj] + pkg = pkg + '.' + obj + try: + mock = __import__(pkg, globals(), locals(), o, 0) - except ImportError: + except ImportError: - from sublime_libsass.libsass import pathutils + pkg = realpath(__file__).split(os.sep)[-3] + '.' + pkg + mock = __import__(pkg, globals(), locals(), o, 0) + args += (mock,) + fn(*args) + return wrap + return subl_deco class TestPathutils(TestCase): + @subl_patch('libsass', 'pathutils') - def test_subpaths(self): + def test_subpaths(self, pathutils): path = join('/foo','bar','baz') exprmt = pathutils.subpaths(path) expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ] self.assertEqual(exprmt, expect) + @subl_patch('libsass', 'pathutils') - def test_grep_r(self): + def test_grep_r(self, pathutils): pathutils.os.walk = lambda x: [('/tmp','',['file.scss'])] self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp']) self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp']) self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), []) self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
Make custom patch in package to test
## Code Before: from os.path import join import sublime import sys from unittest import TestCase version = sublime.version() try: from libsass import pathutils except ImportError: from sublime_libsass.libsass import pathutils class TestPathutils(TestCase): def test_subpaths(self): path = join('/foo','bar','baz') exprmt = pathutils.subpaths(path) expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ] self.assertEqual(exprmt, expect) def test_grep_r(self): pathutils.os.walk = lambda x: [('/tmp','',['file.scss'])] self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp']) self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp']) self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), []) self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), []) ## Instruction: Make custom patch in package to test ## Code After: from os.path import join, realpath import os import sublime import sys from unittest import TestCase from functools import wraps def subl_patch(pkg, obj=None): def subl_deco(fn): @wraps(fn) def wrap(*args): nonlocal pkg o = [] if obj != None: o += [obj] pkg = pkg + '.' + obj try: mock = __import__(pkg, globals(), locals(), o, 0) except ImportError: pkg = realpath(__file__).split(os.sep)[-3] + '.' + pkg mock = __import__(pkg, globals(), locals(), o, 0) args += (mock,) fn(*args) return wrap return subl_deco class TestPathutils(TestCase): @subl_patch('libsass', 'pathutils') def test_subpaths(self, pathutils): path = join('/foo','bar','baz') exprmt = pathutils.subpaths(path) expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ] self.assertEqual(exprmt, expect) @subl_patch('libsass', 'pathutils') def test_grep_r(self, pathutils): pathutils.os.walk = lambda x: [('/tmp','',['file.scss'])] self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp']) self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp']) self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), []) self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
# ... existing code ... from os.path import join, realpath import os import sublime # ... modified code ... from unittest import TestCase from functools import wraps def subl_patch(pkg, obj=None): def subl_deco(fn): @wraps(fn) def wrap(*args): nonlocal pkg o = [] if obj != None: o += [obj] pkg = pkg + '.' + obj try: mock = __import__(pkg, globals(), locals(), o, 0) except ImportError: pkg = realpath(__file__).split(os.sep)[-3] + '.' + pkg mock = __import__(pkg, globals(), locals(), o, 0) args += (mock,) fn(*args) return wrap return subl_deco ... class TestPathutils(TestCase): @subl_patch('libsass', 'pathutils') def test_subpaths(self, pathutils): path = join('/foo','bar','baz') ... @subl_patch('libsass', 'pathutils') def test_grep_r(self, pathutils): pathutils.os.walk = lambda x: [('/tmp','',['file.scss'])] # ... rest of the code ...
900ab180a8e255cc46e0583d251c5a71fc27f5d6
src/waldur_mastermind/marketplace_rancher/processors.py
src/waldur_mastermind/marketplace_rancher/processors.py
from waldur_mastermind.marketplace import processors from waldur_rancher import views as rancher_views class RancherCreateProcessor(processors.BaseCreateResourceProcessor): viewset = rancher_views.ClusterViewSet fields = ( 'name', 'description', 'nodes', 'tenant_settings', 'ssh_public_key', ) class RancherDeleteProcessor(processors.DeleteResourceProcessor): viewset = rancher_views.ClusterViewSet
from waldur_mastermind.marketplace import processors from waldur_rancher import views as rancher_views class RancherCreateProcessor(processors.BaseCreateResourceProcessor): viewset = rancher_views.ClusterViewSet fields = ( 'name', 'description', 'nodes', 'tenant_settings', 'ssh_public_key', 'install_longhorn', ) class RancherDeleteProcessor(processors.DeleteResourceProcessor): viewset = rancher_views.ClusterViewSet
Add new field to Processor
Add new field to Processor
Python
mit
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur
from waldur_mastermind.marketplace import processors from waldur_rancher import views as rancher_views class RancherCreateProcessor(processors.BaseCreateResourceProcessor): viewset = rancher_views.ClusterViewSet fields = ( 'name', 'description', 'nodes', 'tenant_settings', 'ssh_public_key', + 'install_longhorn', ) class RancherDeleteProcessor(processors.DeleteResourceProcessor): viewset = rancher_views.ClusterViewSet
Add new field to Processor
## Code Before: from waldur_mastermind.marketplace import processors from waldur_rancher import views as rancher_views class RancherCreateProcessor(processors.BaseCreateResourceProcessor): viewset = rancher_views.ClusterViewSet fields = ( 'name', 'description', 'nodes', 'tenant_settings', 'ssh_public_key', ) class RancherDeleteProcessor(processors.DeleteResourceProcessor): viewset = rancher_views.ClusterViewSet ## Instruction: Add new field to Processor ## Code After: from waldur_mastermind.marketplace import processors from waldur_rancher import views as rancher_views class RancherCreateProcessor(processors.BaseCreateResourceProcessor): viewset = rancher_views.ClusterViewSet fields = ( 'name', 'description', 'nodes', 'tenant_settings', 'ssh_public_key', 'install_longhorn', ) class RancherDeleteProcessor(processors.DeleteResourceProcessor): viewset = rancher_views.ClusterViewSet
... 'ssh_public_key', 'install_longhorn', ) ...
f1af7dad41992b53e90a5f8dd20e1635f11a7ce1
pstats_print2list/__init__.py
pstats_print2list/__init__.py
__author__ = 'Vauxoo' __email__ = '[email protected]' __version__ = '0.1.0'
from pstats_print2list import print_stats __author__ = 'Vauxoo' __email__ = '[email protected]' __version__ = '0.1.0'
Add print_stats to init file
[REF] pstats_print2list: Add print_stats to init file
Python
isc
Vauxoo/pstats-print2list
+ + from pstats_print2list import print_stats __author__ = 'Vauxoo' __email__ = '[email protected]' __version__ = '0.1.0'
Add print_stats to init file
## Code Before: __author__ = 'Vauxoo' __email__ = '[email protected]' __version__ = '0.1.0' ## Instruction: Add print_stats to init file ## Code After: from pstats_print2list import print_stats __author__ = 'Vauxoo' __email__ = '[email protected]' __version__ = '0.1.0'
... from pstats_print2list import print_stats ...
57bb37d7579620005a49613ff90f0a2eec55a77e
backend/offers_web.py
backend/offers_web.py
import falcon import json import rethinkdb as r MAX_OFFERS = 100 class OfferListResource: def __init__(self): self._db = r.connect('localhost', 28015) def on_get(self, req, resp): """Returns all offers available""" try: limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1))) except ValueError as e: raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page") if page < 1: raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page") elif limit < 1: raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page") else: cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db) count = r.db('voyageavecmoi').table('offers').count() resp.body = json.dumps(list(cursor)) resp.append_header('X-Max-Elements', count) app = falcon.API() app.add_route('/api/offers', OfferListResource())
import falcon import json import rethinkdb as r MAX_OFFERS = 100 class OfferListResource: def __init__(self): self._db = r.connect('localhost', 28015) def on_get(self, req, resp): """Returns all offers available""" try: limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1))) except ValueError as e: raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page") if page < 1: raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page") elif limit < 1: raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page") else: cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db) count = r.db('voyageavecmoi').table('offers').count().run(self._db) resp.body = json.dumps(list(cursor)) resp.append_header('X-Max-Elements', count) app = falcon.API() app.add_route('/api/offers', OfferListResource())
Fix max elements in header
Fix max elements in header
Python
agpl-3.0
jilljenn/voyageavecmoi,jilljenn/voyageavecmoi,jilljenn/voyageavecmoi
import falcon import json import rethinkdb as r MAX_OFFERS = 100 class OfferListResource: def __init__(self): self._db = r.connect('localhost', 28015) def on_get(self, req, resp): """Returns all offers available""" try: limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1))) except ValueError as e: raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page") if page < 1: raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page") elif limit < 1: raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page") else: cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db) - count = r.db('voyageavecmoi').table('offers').count() + count = r.db('voyageavecmoi').table('offers').count().run(self._db) resp.body = json.dumps(list(cursor)) resp.append_header('X-Max-Elements', count) app = falcon.API() app.add_route('/api/offers', OfferListResource())
Fix max elements in header
## Code Before: import falcon import json import rethinkdb as r MAX_OFFERS = 100 class OfferListResource: def __init__(self): self._db = r.connect('localhost', 28015) def on_get(self, req, resp): """Returns all offers available""" try: limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1))) except ValueError as e: raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page") if page < 1: raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page") elif limit < 1: raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page") else: cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db) count = r.db('voyageavecmoi').table('offers').count() resp.body = json.dumps(list(cursor)) resp.append_header('X-Max-Elements', count) app = falcon.API() app.add_route('/api/offers', OfferListResource()) ## Instruction: Fix max elements in header ## Code After: import falcon import json import rethinkdb as r MAX_OFFERS = 100 class OfferListResource: def __init__(self): self._db = r.connect('localhost', 28015) def on_get(self, req, resp): """Returns all offers available""" try: limit, page = map(int, (req.params.get('limit', MAX_OFFERS), req.params.get('page', 1))) except ValueError as e: raise falcon.HTTPInvalidParam("Limit or page should be a number", "limit or page") if page < 1: raise falcon.HTTPInvalidParam("Page cannot be negative or null", "page") elif limit < 1: raise falcon.HTTPInvalidParam("Limit cannot be negative or null", "page") else: cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db) count = r.db('voyageavecmoi').table('offers').count().run(self._db) resp.body = json.dumps(list(cursor)) resp.append_header('X-Max-Elements', count) app = falcon.API() app.add_route('/api/offers', OfferListResource())
// ... existing code ... cursor = r.db('voyageavecmoi').table('offers').slice(page - 1).limit(limit).run(self._db) count = r.db('voyageavecmoi').table('offers').count().run(self._db) resp.body = json.dumps(list(cursor)) // ... rest of the code ...
64bc8ff452d03c7bb026be0b2edd9a047a88b386
foyer/forcefields/forcefields.py
foyer/forcefields/forcefields.py
import os import glob from pkg_resources import resource_filename from foyer import Forcefield def get_ff_path(): return [resource_filename('foyer', 'forcefields')] def get_forcefield_paths(forcefield_name=None): for dir_path in get_ff_path(): file_pattern = os.path.join(dir_path, 'xml/*.xml') file_paths = [file_path for file_path in glob.glob(file_pattern)] return file_paths def get_forcefield(name=None): if name is None: raise ValueError('Need a force field name') file_paths = get_forcefield_paths() try: ff_path = next(val for val in file_paths if name in val) except StopIteration: raise ValueError('Could not find force field with name {}' ' in path {}'.format(name, get_ff_path())) return Forcefield(ff_path) load_OPLSAA = get_forcefield(name='oplsaa') load_TRAPPE_UA = get_forcefield(name='trappe-ua')
import os import glob from pkg_resources import resource_filename from foyer import Forcefield def get_ff_path(): return [resource_filename('foyer', 'forcefields')] def get_forcefield_paths(forcefield_name=None): for dir_path in get_ff_path(): file_pattern = os.path.join(dir_path, 'xml/*.xml') file_paths = [file_path for file_path in glob.glob(file_pattern)] return file_paths def get_forcefield(name=None): if name is None: raise ValueError('Need a force field name') file_paths = get_forcefield_paths() try: ff_path = next(val for val in file_paths if name in val) except StopIteration: raise ValueError('Could not find force field with name {}' ' in path {}'.format(name, get_ff_path())) return Forcefield(ff_path) def load_OPLSAA(): return get_forcefield(name='oplsaa') def load_TRAPPE_UA(): return get_forcefield(name='trappe-ua') load_OPLSAA = load_OPLSAA load_TRAPPE_UA = load_TRAPPE_UA
Make discrete functions for each force field
Make discrete functions for each force field
Python
mit
mosdef-hub/foyer,mosdef-hub/foyer,iModels/foyer,iModels/foyer
import os import glob from pkg_resources import resource_filename from foyer import Forcefield def get_ff_path(): return [resource_filename('foyer', 'forcefields')] def get_forcefield_paths(forcefield_name=None): for dir_path in get_ff_path(): file_pattern = os.path.join(dir_path, 'xml/*.xml') file_paths = [file_path for file_path in glob.glob(file_pattern)] return file_paths def get_forcefield(name=None): if name is None: raise ValueError('Need a force field name') file_paths = get_forcefield_paths() try: ff_path = next(val for val in file_paths if name in val) except StopIteration: raise ValueError('Could not find force field with name {}' ' in path {}'.format(name, get_ff_path())) return Forcefield(ff_path) - load_OPLSAA = get_forcefield(name='oplsaa') - load_TRAPPE_UA = get_forcefield(name='trappe-ua') + def load_OPLSAA(): + return get_forcefield(name='oplsaa') + + + def load_TRAPPE_UA(): + return get_forcefield(name='trappe-ua') + + + load_OPLSAA = load_OPLSAA + load_TRAPPE_UA = load_TRAPPE_UA +
Make discrete functions for each force field
## Code Before: import os import glob from pkg_resources import resource_filename from foyer import Forcefield def get_ff_path(): return [resource_filename('foyer', 'forcefields')] def get_forcefield_paths(forcefield_name=None): for dir_path in get_ff_path(): file_pattern = os.path.join(dir_path, 'xml/*.xml') file_paths = [file_path for file_path in glob.glob(file_pattern)] return file_paths def get_forcefield(name=None): if name is None: raise ValueError('Need a force field name') file_paths = get_forcefield_paths() try: ff_path = next(val for val in file_paths if name in val) except StopIteration: raise ValueError('Could not find force field with name {}' ' in path {}'.format(name, get_ff_path())) return Forcefield(ff_path) load_OPLSAA = get_forcefield(name='oplsaa') load_TRAPPE_UA = get_forcefield(name='trappe-ua') ## Instruction: Make discrete functions for each force field ## Code After: import os import glob from pkg_resources import resource_filename from foyer import Forcefield def get_ff_path(): return [resource_filename('foyer', 'forcefields')] def get_forcefield_paths(forcefield_name=None): for dir_path in get_ff_path(): file_pattern = os.path.join(dir_path, 'xml/*.xml') file_paths = [file_path for file_path in glob.glob(file_pattern)] return file_paths def get_forcefield(name=None): if name is None: raise ValueError('Need a force field name') file_paths = get_forcefield_paths() try: ff_path = next(val for val in file_paths if name in val) except StopIteration: raise ValueError('Could not find force field with name {}' ' in path {}'.format(name, get_ff_path())) return Forcefield(ff_path) def load_OPLSAA(): return get_forcefield(name='oplsaa') def load_TRAPPE_UA(): return get_forcefield(name='trappe-ua') load_OPLSAA = load_OPLSAA load_TRAPPE_UA = load_TRAPPE_UA
# ... existing code ... def load_OPLSAA(): return get_forcefield(name='oplsaa') def load_TRAPPE_UA(): return get_forcefield(name='trappe-ua') load_OPLSAA = load_OPLSAA load_TRAPPE_UA = load_TRAPPE_UA # ... rest of the code ...
80e4caad24bceabd8e15133a96a6aaddd9a97c07
code/type_null_true_false.py
code/type_null_true_false.py
def if_value(values): print('"if value":') for k, v in values: print("%s - %s" % (k, 'true' if v else 'false')) print() def nil_value(values): print('"if value is None":') for k, v in values: print("%s - %s" % (k, 'true' if v is None else 'false')) print() def empty_value(values): print('"if len(value)":') for k, v in values: try: print("%s - %s" % (k, 'true' if len(v) else 'false')) except TypeError as e: print("%s - %s" % (k, e)) values = [ ("'string'", 'string'), ("''", ''), ('[1, 2, 3]', [1, 2, 3]), ('[]', []), ('5', 5), ('0', 0), (True, True), (False, False), (None, None), ] if_value(values) nil_value(values) empty_value(values)
def check(label, fn, values): print(label) for value in values: try: result = 'true' if fn(value) else 'false' except TypeError as e: result = 'error: %s' % e print(" %-9r - %s" % (value, result)) print() values = ['string', '', [1, 2, 3], [], 5, 0, True, False, None] check('if value:', lambda v: v, values) check('if value is None:', lambda v: v is None, values) check('if len(value):', lambda v: len(v), values)
Refactor Null/True/False to look more pythonic
Refactor Null/True/False to look more pythonic
Python
mit
evmorov/lang-compare,Evmorov/ruby-coffeescript,evmorov/lang-compare,evmorov/lang-compare,Evmorov/ruby-coffeescript,evmorov/lang-compare,Evmorov/ruby-coffeescript,evmorov/lang-compare,evmorov/lang-compare
- def if_value(values): - print('"if value":') + def check(label, fn, values): + print(label) - for k, v in values: + for value in values: - print("%s - %s" % (k, 'true' if v else 'false')) + try: + result = 'true' if fn(value) else 'false' + except TypeError as e: + result = 'error: %s' % e + print(" %-9r - %s" % (value, result)) - print() + print() + values = ['string', '', [1, 2, 3], [], 5, 0, True, False, None] - def nil_value(values): - print('"if value is None":') - for k, v in values: - print("%s - %s" % (k, 'true' if v is None else 'false')) - print() + check('if value:', lambda v: v, values) + check('if value is None:', lambda v: v is None, values) + check('if len(value):', lambda v: len(v), values) - def empty_value(values): - print('"if len(value)":') - for k, v in values: - try: - print("%s - %s" % (k, 'true' if len(v) else 'false')) - except TypeError as e: - print("%s - %s" % (k, e)) - values = [ - ("'string'", 'string'), - ("''", ''), - ('[1, 2, 3]', [1, 2, 3]), - ('[]', []), - ('5', 5), - ('0', 0), - (True, True), - (False, False), - (None, None), - ] - - if_value(values) - nil_value(values) - empty_value(values) -
Refactor Null/True/False to look more pythonic
## Code Before: def if_value(values): print('"if value":') for k, v in values: print("%s - %s" % (k, 'true' if v else 'false')) print() def nil_value(values): print('"if value is None":') for k, v in values: print("%s - %s" % (k, 'true' if v is None else 'false')) print() def empty_value(values): print('"if len(value)":') for k, v in values: try: print("%s - %s" % (k, 'true' if len(v) else 'false')) except TypeError as e: print("%s - %s" % (k, e)) values = [ ("'string'", 'string'), ("''", ''), ('[1, 2, 3]', [1, 2, 3]), ('[]', []), ('5', 5), ('0', 0), (True, True), (False, False), (None, None), ] if_value(values) nil_value(values) empty_value(values) ## Instruction: Refactor Null/True/False to look more pythonic ## Code After: def check(label, fn, values): print(label) for value in values: try: result = 'true' if fn(value) else 'false' except TypeError as e: result = 'error: %s' % e print(" %-9r - %s" % (value, result)) print() values = ['string', '', [1, 2, 3], [], 5, 0, True, False, None] check('if value:', lambda v: v, values) check('if value is None:', lambda v: v is None, values) check('if len(value):', lambda v: len(v), values)
# ... existing code ... def check(label, fn, values): print(label) for value in values: try: result = 'true' if fn(value) else 'false' except TypeError as e: result = 'error: %s' % e print(" %-9r - %s" % (value, result)) print() values = ['string', '', [1, 2, 3], [], 5, 0, True, False, None] check('if value:', lambda v: v, values) check('if value is None:', lambda v: v is None, values) check('if len(value):', lambda v: len(v), values) # ... rest of the code ...
7e407d1185235f4a89bddcaffcde240a33b522f4
expand_region_handler.py
expand_region_handler.py
try: import javascript import html except: from . import javascript from . import html def expand(string, start, end, extension=None): if(extension in ["html", "htm", "xml"]): return html.expand(string, start, end) return javascript.expand(string, start, end)
import re try: import javascript import html except: from . import javascript from . import html def expand(string, start, end, extension=None): if(re.compile("html|htm|xml").search(extension)): return html.expand(string, start, end) return javascript.expand(string, start, end)
Use html strategy for any file that has xml/html in file extension. This will will match shtml, xhtml and so on.
Use html strategy for any file that has xml/html in file extension. This will will match shtml, xhtml and so on.
Python
mit
aronwoost/sublime-expand-region,johyphenel/sublime-expand-region,johyphenel/sublime-expand-region
+ import re + try: import javascript import html except: from . import javascript from . import html def expand(string, start, end, extension=None): - if(extension in ["html", "htm", "xml"]): + if(re.compile("html|htm|xml").search(extension)): return html.expand(string, start, end) return javascript.expand(string, start, end)
Use html strategy for any file that has xml/html in file extension. This will will match shtml, xhtml and so on.
## Code Before: try: import javascript import html except: from . import javascript from . import html def expand(string, start, end, extension=None): if(extension in ["html", "htm", "xml"]): return html.expand(string, start, end) return javascript.expand(string, start, end) ## Instruction: Use html strategy for any file that has xml/html in file extension. This will will match shtml, xhtml and so on. ## Code After: import re try: import javascript import html except: from . import javascript from . import html def expand(string, start, end, extension=None): if(re.compile("html|htm|xml").search(extension)): return html.expand(string, start, end) return javascript.expand(string, start, end)
# ... existing code ... import re try: # ... modified code ... if(re.compile("html|htm|xml").search(extension)): return html.expand(string, start, end) # ... rest of the code ...
24b78a4d510606563106da24d568d5fb79ddca2b
IPython/__main__.py
IPython/__main__.py
#----------------------------------------------------------------------------- # Copyright (c) 2012, IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- from IPython.terminal.ipapp import launch_new_instance launch_new_instance()
#----------------------------------------------------------------------------- # Copyright (c) 2012, IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- from IPython import start_ipython start_ipython()
Use new entry point for python -m IPython
Use new entry point for python -m IPython
Python
bsd-3-clause
ipython/ipython,ipython/ipython
#----------------------------------------------------------------------------- # Copyright (c) 2012, IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- - from IPython.terminal.ipapp import launch_new_instance + from IPython import start_ipython - launch_new_instance() + start_ipython()
Use new entry point for python -m IPython
## Code Before: #----------------------------------------------------------------------------- # Copyright (c) 2012, IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- from IPython.terminal.ipapp import launch_new_instance launch_new_instance() ## Instruction: Use new entry point for python -m IPython ## Code After: #----------------------------------------------------------------------------- # Copyright (c) 2012, IPython Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- from IPython import start_ipython start_ipython()
... from IPython import start_ipython start_ipython() ...
df18229b38a01d87076f3b13aee5bfd1f0f989c2
tunobase/blog/models.py
tunobase/blog/models.py
''' Blog App This module determines how to display the Blog app in Django's admin and lists other model functions. ''' from django.conf import settings from django.core.urlresolvers import reverse from django.db import models from tunobase.core import models as core_models class Blog(core_models.ContentModel): ''' Blogs the Site has ''' class BlogEntry(core_models.ContentModel): ''' Entries per Blog ''' blog = models.ForeignKey(Blog, related_name='entries') author_users = models.ManyToManyField( settings.AUTH_USER_MODEL, related_name='blog_entries_authored', null=True, blank=True ) authors_alternate = models.CharField( max_length=512, blank=True, null=True ) class Meta: verbose_name_plural = 'Blog entries' def get_absolute_url(self): return reverse('blog_entry_detail', args=(self.slug,)) @property def authors(self): ''' Return a list of authors selected as users on the system and a list of alternate authors as not users on the system if either exist ''' authors_dict = {} auth_users = self.author_users.all() if auth_users: authors_dict.update({ 'users': auth_users }) if self.authors_alternate: authors_dict.update({ 'alternate': self.authors_alternate.split(',') }) return authors_dict
''' Blog App This module determines how to display the Blog app in Django's admin and lists other model functions. ''' from django.conf import settings from django.core.urlresolvers import reverse from django.db import models from tunobase.core import models as core_models class Blog(core_models.ContentModel): ''' Blogs the Site has ''' class Meta: verbose_name = 'Blog Category' verbose_name_plural = 'Blog Categories' class BlogEntry(core_models.ContentModel): ''' Entries per Blog ''' blog = models.ForeignKey(Blog, related_name='entries') author_users = models.ManyToManyField( settings.AUTH_USER_MODEL, related_name='blog_entries_authored', null=True, blank=True ) authors_alternate = models.CharField( max_length=512, blank=True, null=True ) class Meta: verbose_name_plural = 'Blog entries' def get_absolute_url(self): return reverse('blog_entry_detail', args=(self.slug,)) @property def authors(self): ''' Return a list of authors selected as users on the system and a list of alternate authors as not users on the system if either exist ''' authors_dict = {} auth_users = self.author_users.all() if auth_users: authors_dict.update({ 'users': auth_users }) if self.authors_alternate: authors_dict.update({ 'alternate': self.authors_alternate.split(',') }) return authors_dict
Update blog model with a more descriptive name
Update blog model with a more descriptive name
Python
bsd-3-clause
unomena/tunobase,unomena/tunobase
''' Blog App This module determines how to display the Blog app in Django's admin and lists other model functions. ''' from django.conf import settings from django.core.urlresolvers import reverse from django.db import models from tunobase.core import models as core_models class Blog(core_models.ContentModel): ''' Blogs the Site has ''' + + class Meta: + verbose_name = 'Blog Category' + verbose_name_plural = 'Blog Categories' class BlogEntry(core_models.ContentModel): ''' Entries per Blog ''' blog = models.ForeignKey(Blog, related_name='entries') author_users = models.ManyToManyField( settings.AUTH_USER_MODEL, related_name='blog_entries_authored', null=True, blank=True ) authors_alternate = models.CharField( max_length=512, blank=True, null=True ) class Meta: verbose_name_plural = 'Blog entries' def get_absolute_url(self): return reverse('blog_entry_detail', args=(self.slug,)) @property def authors(self): ''' Return a list of authors selected as users on the system and a list of alternate authors as not users on the system if either exist ''' authors_dict = {} auth_users = self.author_users.all() if auth_users: authors_dict.update({ 'users': auth_users }) if self.authors_alternate: authors_dict.update({ 'alternate': self.authors_alternate.split(',') }) return authors_dict
Update blog model with a more descriptive name
## Code Before: ''' Blog App This module determines how to display the Blog app in Django's admin and lists other model functions. ''' from django.conf import settings from django.core.urlresolvers import reverse from django.db import models from tunobase.core import models as core_models class Blog(core_models.ContentModel): ''' Blogs the Site has ''' class BlogEntry(core_models.ContentModel): ''' Entries per Blog ''' blog = models.ForeignKey(Blog, related_name='entries') author_users = models.ManyToManyField( settings.AUTH_USER_MODEL, related_name='blog_entries_authored', null=True, blank=True ) authors_alternate = models.CharField( max_length=512, blank=True, null=True ) class Meta: verbose_name_plural = 'Blog entries' def get_absolute_url(self): return reverse('blog_entry_detail', args=(self.slug,)) @property def authors(self): ''' Return a list of authors selected as users on the system and a list of alternate authors as not users on the system if either exist ''' authors_dict = {} auth_users = self.author_users.all() if auth_users: authors_dict.update({ 'users': auth_users }) if self.authors_alternate: authors_dict.update({ 'alternate': self.authors_alternate.split(',') }) return authors_dict ## Instruction: Update blog model with a more descriptive name ## Code After: ''' Blog App This module determines how to display the Blog app in Django's admin and lists other model functions. ''' from django.conf import settings from django.core.urlresolvers import reverse from django.db import models from tunobase.core import models as core_models class Blog(core_models.ContentModel): ''' Blogs the Site has ''' class Meta: verbose_name = 'Blog Category' verbose_name_plural = 'Blog Categories' class BlogEntry(core_models.ContentModel): ''' Entries per Blog ''' blog = models.ForeignKey(Blog, related_name='entries') author_users = models.ManyToManyField( settings.AUTH_USER_MODEL, related_name='blog_entries_authored', null=True, blank=True ) authors_alternate = models.CharField( max_length=512, blank=True, null=True ) class Meta: verbose_name_plural = 'Blog entries' def get_absolute_url(self): return reverse('blog_entry_detail', args=(self.slug,)) @property def authors(self): ''' Return a list of authors selected as users on the system and a list of alternate authors as not users on the system if either exist ''' authors_dict = {} auth_users = self.author_users.all() if auth_users: authors_dict.update({ 'users': auth_users }) if self.authors_alternate: authors_dict.update({ 'alternate': self.authors_alternate.split(',') }) return authors_dict
// ... existing code ... ''' class Meta: verbose_name = 'Blog Category' verbose_name_plural = 'Blog Categories' // ... rest of the code ...
5b8edee2b6fa13fc1b05e15961d8b7920e6f9739
csunplugged/general/urls.py
csunplugged/general/urls.py
"""URL routing for the general application.""" from django.conf.urls import url from . import views urlpatterns = [ url(r"^$", views.GeneralIndexView.as_view(), name="home"), url(r"^about/$", views.GeneralAboutView.as_view(), name="about"), url(r"^computational-thinking/$", views.ComputationalThinkingView.as_view(), name="computational_thinking"), url(r"^contact/$", views.GeneralContactView.as_view(), name="contact"), url(r"^people/$", views.GeneralPeopleView.as_view(), name="people"), url(r"^principles/$", views.GeneralPrinciplesView.as_view(), name="principles"), ]
"""URL routing for the general application.""" from django.conf.urls import url from . import views urlpatterns = [ url( r"^$", views.GeneralIndexView.as_view(), name="home" ), url( r"^about/$", views.GeneralAboutView.as_view(), name="about" ), url( r"^computational-thinking/$", views.ComputationalThinkingView.as_view(), name="computational_thinking" ), url( r"^contact/$", views.GeneralContactView.as_view(), name="contact" ), url( r"^people/$", views.GeneralPeopleView.as_view(), name="people" ), url( r"^principles/$", views.GeneralPrinciplesView.as_view(), name="principles" ), ]
Update URLs file to match layout of other URLs files
Update URLs file to match layout of other URLs files
Python
mit
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
"""URL routing for the general application.""" from django.conf.urls import url from . import views urlpatterns = [ - url(r"^$", views.GeneralIndexView.as_view(), name="home"), - url(r"^about/$", views.GeneralAboutView.as_view(), name="about"), - url(r"^computational-thinking/$", views.ComputationalThinkingView.as_view(), name="computational_thinking"), - url(r"^contact/$", views.GeneralContactView.as_view(), name="contact"), - url(r"^people/$", views.GeneralPeopleView.as_view(), name="people"), - url(r"^principles/$", views.GeneralPrinciplesView.as_view(), name="principles"), + url( + r"^$", + views.GeneralIndexView.as_view(), + name="home" + ), + url( + r"^about/$", + views.GeneralAboutView.as_view(), + name="about" + ), + url( + r"^computational-thinking/$", + views.ComputationalThinkingView.as_view(), + name="computational_thinking" + ), + url( + r"^contact/$", + views.GeneralContactView.as_view(), + name="contact" + ), + url( + r"^people/$", + views.GeneralPeopleView.as_view(), + name="people" + ), + url( + r"^principles/$", + views.GeneralPrinciplesView.as_view(), + name="principles" + ), ]
Update URLs file to match layout of other URLs files
## Code Before: """URL routing for the general application.""" from django.conf.urls import url from . import views urlpatterns = [ url(r"^$", views.GeneralIndexView.as_view(), name="home"), url(r"^about/$", views.GeneralAboutView.as_view(), name="about"), url(r"^computational-thinking/$", views.ComputationalThinkingView.as_view(), name="computational_thinking"), url(r"^contact/$", views.GeneralContactView.as_view(), name="contact"), url(r"^people/$", views.GeneralPeopleView.as_view(), name="people"), url(r"^principles/$", views.GeneralPrinciplesView.as_view(), name="principles"), ] ## Instruction: Update URLs file to match layout of other URLs files ## Code After: """URL routing for the general application.""" from django.conf.urls import url from . import views urlpatterns = [ url( r"^$", views.GeneralIndexView.as_view(), name="home" ), url( r"^about/$", views.GeneralAboutView.as_view(), name="about" ), url( r"^computational-thinking/$", views.ComputationalThinkingView.as_view(), name="computational_thinking" ), url( r"^contact/$", views.GeneralContactView.as_view(), name="contact" ), url( r"^people/$", views.GeneralPeopleView.as_view(), name="people" ), url( r"^principles/$", views.GeneralPrinciplesView.as_view(), name="principles" ), ]
// ... existing code ... urlpatterns = [ url( r"^$", views.GeneralIndexView.as_view(), name="home" ), url( r"^about/$", views.GeneralAboutView.as_view(), name="about" ), url( r"^computational-thinking/$", views.ComputationalThinkingView.as_view(), name="computational_thinking" ), url( r"^contact/$", views.GeneralContactView.as_view(), name="contact" ), url( r"^people/$", views.GeneralPeopleView.as_view(), name="people" ), url( r"^principles/$", views.GeneralPrinciplesView.as_view(), name="principles" ), ] // ... rest of the code ...
6ab01b1e26184bf296cf58939db5299f07cd68f5
malcolm/modules/pmac/parts/__init__.py
malcolm/modules/pmac/parts/__init__.py
from .compoundmotorsinkportspart import CompoundMotorSinkPortsPart, \ APartName, ARbv, AGroup from .cssourceportspart import CSSourcePortsPart, APartName, ARbv, AGroup from .cspart import CSPart, AMri from .pmacchildpart import PmacChildPart, AMri, APartName from .pmacstatuspart import PmacStatusPart from .pmactrajectorypart import PmacTrajectoryPart, AMri, APartName from .rawmotorsinkportspart import RawMotorSinkPortsPart, AGroup from .motorpremovepart import MotorPreMovePart, APartName, AMri # Expose a nice namespace from malcolm.core import submodule_all __all__ = submodule_all(globals())
from .compoundmotorsinkportspart import CompoundMotorSinkPortsPart, \ APartName, ARbv, AGroup from .cssourceportspart import CSSourcePortsPart, APartName, ARbv, AGroup from .cspart import CSPart, AMri from .pmacchildpart import PmacChildPart, AMri, APartName from .pmacstatuspart import PmacStatusPart from .pmactrajectorypart import PmacTrajectoryPart, AMri, APartName from .rawmotorsinkportspart import RawMotorSinkPortsPart, AGroup from .motorpremovepart import MotorPreMovePart, APartName, AMri from .beamselectorpart import BeamSelectorPart # Expose a nice namespace from malcolm.core import submodule_all __all__ = submodule_all(globals())
Add beamselectorpart to the PMAC module
Add beamselectorpart to the PMAC module
Python
apache-2.0
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
from .compoundmotorsinkportspart import CompoundMotorSinkPortsPart, \ APartName, ARbv, AGroup from .cssourceportspart import CSSourcePortsPart, APartName, ARbv, AGroup from .cspart import CSPart, AMri from .pmacchildpart import PmacChildPart, AMri, APartName from .pmacstatuspart import PmacStatusPart from .pmactrajectorypart import PmacTrajectoryPart, AMri, APartName from .rawmotorsinkportspart import RawMotorSinkPortsPart, AGroup from .motorpremovepart import MotorPreMovePart, APartName, AMri + from .beamselectorpart import BeamSelectorPart # Expose a nice namespace from malcolm.core import submodule_all __all__ = submodule_all(globals())
Add beamselectorpart to the PMAC module
## Code Before: from .compoundmotorsinkportspart import CompoundMotorSinkPortsPart, \ APartName, ARbv, AGroup from .cssourceportspart import CSSourcePortsPart, APartName, ARbv, AGroup from .cspart import CSPart, AMri from .pmacchildpart import PmacChildPart, AMri, APartName from .pmacstatuspart import PmacStatusPart from .pmactrajectorypart import PmacTrajectoryPart, AMri, APartName from .rawmotorsinkportspart import RawMotorSinkPortsPart, AGroup from .motorpremovepart import MotorPreMovePart, APartName, AMri # Expose a nice namespace from malcolm.core import submodule_all __all__ = submodule_all(globals()) ## Instruction: Add beamselectorpart to the PMAC module ## Code After: from .compoundmotorsinkportspart import CompoundMotorSinkPortsPart, \ APartName, ARbv, AGroup from .cssourceportspart import CSSourcePortsPart, APartName, ARbv, AGroup from .cspart import CSPart, AMri from .pmacchildpart import PmacChildPart, AMri, APartName from .pmacstatuspart import PmacStatusPart from .pmactrajectorypart import PmacTrajectoryPart, AMri, APartName from .rawmotorsinkportspart import RawMotorSinkPortsPart, AGroup from .motorpremovepart import MotorPreMovePart, APartName, AMri from .beamselectorpart import BeamSelectorPart # Expose a nice namespace from malcolm.core import submodule_all __all__ = submodule_all(globals())
# ... existing code ... from .motorpremovepart import MotorPreMovePart, APartName, AMri from .beamselectorpart import BeamSelectorPart # ... rest of the code ...
9dc253b79d885ca205b557f88fca6fa35bd8fe21
tests/test_selector.py
tests/test_selector.py
from contextlib import contextmanager from scell import Selector from pytest import raises, fixture def test_select(selector): res = list(selector.select()) assert res for event in res: assert event.ready def test_select_empty(): sel = Selector() assert list(sel.select()) == [] def test_unregister(selector): for fp in list(selector): selector.unregister(fp) assert not selector def test_info(selector): for fp in selector: assert selector.info(fp).wants_read assert selector.info(0) is None def test_callbacks(selector): res = selector.select() exp = len(selector) assert sum(m.callback() for m in res) == exp def test_ready(selector): ready = list(selector.ready()) assert ready for event in ready: assert event.ready class TestScoped(object): @fixture def sel(self): return Selector() def test_peaceful(self, sel, handles): with sel.scoped(handles) as monitors: r = list(sel.ready()) for ev in r: assert ev.monitored in monitors assert ev.fp in handles assert r assert not sel def test_exception(self, sel, handles): with raises(NameError): with sel.scoped(handles) as _: raise NameError assert not sel
from contextlib import contextmanager from scell import Selector from pytest import raises, fixture def test_select(selector): res = list(selector.select()) assert res for event in res: assert event.ready def test_select_empty(): sel = Selector() assert list(sel.select()) == [] def test_unregister(selector): for fp in list(selector): selector.unregister(fp) assert not selector def test_info(selector): for fp in selector: assert selector.info(fp).wants_read assert selector.info(0) is None def test_callbacks(selector): res = selector.select() exp = len(selector) assert sum(m.callback() for m in res) == exp def test_ready(selector): ready = list(selector.ready()) assert ready for event in ready: assert event.ready class TestScoped(object): @fixture def sel(self): return Selector() def test_peaceful(self, sel, handles): with sel.scoped(handles) as monitors: r = set(k.fp for k in sel.ready()) assert r == set(handles) assert not sel def test_exception(self, sel, handles): with raises(NameError): with sel.scoped(handles) as _: raise NameError assert not sel
Make Selector.scope test more rigorous
Make Selector.scope test more rigorous
Python
mit
eugene-eeo/scell
from contextlib import contextmanager from scell import Selector from pytest import raises, fixture def test_select(selector): res = list(selector.select()) assert res for event in res: assert event.ready def test_select_empty(): sel = Selector() assert list(sel.select()) == [] def test_unregister(selector): for fp in list(selector): selector.unregister(fp) assert not selector def test_info(selector): for fp in selector: assert selector.info(fp).wants_read assert selector.info(0) is None def test_callbacks(selector): res = selector.select() exp = len(selector) assert sum(m.callback() for m in res) == exp def test_ready(selector): ready = list(selector.ready()) assert ready for event in ready: assert event.ready class TestScoped(object): @fixture def sel(self): return Selector() def test_peaceful(self, sel, handles): with sel.scoped(handles) as monitors: - r = list(sel.ready()) + r = set(k.fp for k in sel.ready()) + assert r == set(handles) - for ev in r: - assert ev.monitored in monitors - assert ev.fp in handles - assert r assert not sel def test_exception(self, sel, handles): with raises(NameError): with sel.scoped(handles) as _: raise NameError assert not sel
Make Selector.scope test more rigorous
## Code Before: from contextlib import contextmanager from scell import Selector from pytest import raises, fixture def test_select(selector): res = list(selector.select()) assert res for event in res: assert event.ready def test_select_empty(): sel = Selector() assert list(sel.select()) == [] def test_unregister(selector): for fp in list(selector): selector.unregister(fp) assert not selector def test_info(selector): for fp in selector: assert selector.info(fp).wants_read assert selector.info(0) is None def test_callbacks(selector): res = selector.select() exp = len(selector) assert sum(m.callback() for m in res) == exp def test_ready(selector): ready = list(selector.ready()) assert ready for event in ready: assert event.ready class TestScoped(object): @fixture def sel(self): return Selector() def test_peaceful(self, sel, handles): with sel.scoped(handles) as monitors: r = list(sel.ready()) for ev in r: assert ev.monitored in monitors assert ev.fp in handles assert r assert not sel def test_exception(self, sel, handles): with raises(NameError): with sel.scoped(handles) as _: raise NameError assert not sel ## Instruction: Make Selector.scope test more rigorous ## Code After: from contextlib import contextmanager from scell import Selector from pytest import raises, fixture def test_select(selector): res = list(selector.select()) assert res for event in res: assert event.ready def test_select_empty(): sel = Selector() assert list(sel.select()) == [] def test_unregister(selector): for fp in list(selector): selector.unregister(fp) assert not selector def test_info(selector): for fp in selector: assert selector.info(fp).wants_read assert selector.info(0) is None def test_callbacks(selector): res = selector.select() exp = len(selector) assert sum(m.callback() for m in res) == exp def test_ready(selector): ready = list(selector.ready()) assert ready for event in ready: assert event.ready class TestScoped(object): @fixture def sel(self): return Selector() def test_peaceful(self, sel, handles): with sel.scoped(handles) as monitors: r = set(k.fp for k in sel.ready()) assert r == set(handles) assert not sel def test_exception(self, sel, handles): with raises(NameError): with sel.scoped(handles) as _: raise NameError assert not sel
... with sel.scoped(handles) as monitors: r = set(k.fp for k in sel.ready()) assert r == set(handles) assert not sel ...
8d7f3320a9d3fd3b7365cad7631835a0a46f374e
planner/signals.py
planner/signals.py
from django.db.models.signals import m2m_changed from django.dispatch import receiver from django.core.exceptions import ValidationError from django.utils.translation import ugettext_lazy as _ from .models import Step @receiver(m2m_changed, sender=Step.passengers.through) def check_passengers(sender, **kwargs): step = kwargs['instance'] if step.passengers.count() >= 8: raise ValidationError(_("You exceeded passenger maximum number"))
from django.db.models.signals import m2m_changed from django.dispatch import receiver from .models import Step @receiver(m2m_changed, sender=Step.passengers.through) def check_passengers(sender, **kwargs): step = kwargs['instance'] if kwargs['action'] == 'post_add': if step.passengers.count() >= step.trip.max_num_passengers: step.trip.is_joinable = False elif kwargs['action'] == 'post_remove': step.trip.is_joinable = True
Make is_joinable automatic based of passenger number
Make is_joinable automatic based of passenger number
Python
mit
livingsilver94/getaride,livingsilver94/getaride,livingsilver94/getaride
from django.db.models.signals import m2m_changed from django.dispatch import receiver - from django.core.exceptions import ValidationError - from django.utils.translation import ugettext_lazy as _ from .models import Step @receiver(m2m_changed, sender=Step.passengers.through) def check_passengers(sender, **kwargs): step = kwargs['instance'] - if step.passengers.count() >= 8: - raise ValidationError(_("You exceeded passenger maximum number")) + if kwargs['action'] == 'post_add': + if step.passengers.count() >= step.trip.max_num_passengers: + step.trip.is_joinable = False + elif kwargs['action'] == 'post_remove': + step.trip.is_joinable = True
Make is_joinable automatic based of passenger number
## Code Before: from django.db.models.signals import m2m_changed from django.dispatch import receiver from django.core.exceptions import ValidationError from django.utils.translation import ugettext_lazy as _ from .models import Step @receiver(m2m_changed, sender=Step.passengers.through) def check_passengers(sender, **kwargs): step = kwargs['instance'] if step.passengers.count() >= 8: raise ValidationError(_("You exceeded passenger maximum number")) ## Instruction: Make is_joinable automatic based of passenger number ## Code After: from django.db.models.signals import m2m_changed from django.dispatch import receiver from .models import Step @receiver(m2m_changed, sender=Step.passengers.through) def check_passengers(sender, **kwargs): step = kwargs['instance'] if kwargs['action'] == 'post_add': if step.passengers.count() >= step.trip.max_num_passengers: step.trip.is_joinable = False elif kwargs['action'] == 'post_remove': step.trip.is_joinable = True
... from django.dispatch import receiver from .models import Step ... step = kwargs['instance'] if kwargs['action'] == 'post_add': if step.passengers.count() >= step.trip.max_num_passengers: step.trip.is_joinable = False elif kwargs['action'] == 'post_remove': step.trip.is_joinable = True ...
f7e2bcf941e2a15a3bc28ebf3f15244df6f0d758
posts/versatileimagefield.py
posts/versatileimagefield.py
from django.conf import settings from versatileimagefield.datastructures.filteredimage import FilteredImage from versatileimagefield.registry import versatileimagefield_registry from PIL import Image, ImageDraw, ImageFont from io import BytesIO class Watermark(FilteredImage): def process_image(self, image, image_format, save_kwargs={}): """ Returns a BytesIO instance of `image` with inverted colors """ if image.mode != 'RGBA': image = image.convert('RGBA') txt = Image.new('RGBA', image.size, (255,255,255,0)) fontsize = int(image.size[1] * 0.1) # get a font fnt = ImageFont.truetype( '/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf', fontsize, ) # get a drawing context d = ImageDraw.Draw(txt) # draw text, half opacity d.text( (10, image.size[1] - 10 - fontsize), settings.WATERMARK_TEXT, font=fnt, fill=(255,255,255,30) ) out = Image.alpha_composite(image, txt) out = out.convert('RGB') imagefile = BytesIO() out.save( imagefile, **save_kwargs ) return imagefile versatileimagefield_registry.register_filter('watermark', Watermark)
import os.path from django.conf import settings from versatileimagefield.datastructures.filteredimage import FilteredImage from versatileimagefield.registry import versatileimagefield_registry from PIL import Image, ImageDraw, ImageFont from io import BytesIO class Watermark(FilteredImage): def process_image(self, image, image_format, save_kwargs={}): """ Returns a BytesIO instance of `image` with inverted colors """ if image.mode != 'RGBA': image = image.convert('RGBA') txt = Image.new('RGBA', image.size, (255,255,255,0)) height = image.size[1] fontsize = int(image.size[1] * 0.1) # get a font fnt = ImageFont.truetype( os.path.join( os.path.dirname(os.path.dirname(__file__)), 'font', 'conthrax-sb.ttf' ), fontsize, ) # get a drawing context d = ImageDraw.Draw(txt) # draw text, half opacity d.text( (10 + fontsize * .2, height - 10 - fontsize - fontsize * .2), settings.WATERMARK_TEXT, font=fnt, fill=(255,255,255,30) ) out = Image.alpha_composite(image, txt) out = out.convert('RGB') imagefile = BytesIO() out.save( imagefile, **save_kwargs ) return imagefile versatileimagefield_registry.register_filter('watermark', Watermark)
Use custom font for watermark
Use custom font for watermark Signed-off-by: Michal Čihař <[email protected]>
Python
agpl-3.0
nijel/photoblog,nijel/photoblog
+ import os.path from django.conf import settings from versatileimagefield.datastructures.filteredimage import FilteredImage from versatileimagefield.registry import versatileimagefield_registry from PIL import Image, ImageDraw, ImageFont from io import BytesIO class Watermark(FilteredImage): def process_image(self, image, image_format, save_kwargs={}): """ Returns a BytesIO instance of `image` with inverted colors """ if image.mode != 'RGBA': image = image.convert('RGBA') txt = Image.new('RGBA', image.size, (255,255,255,0)) + height = image.size[1] fontsize = int(image.size[1] * 0.1) # get a font fnt = ImageFont.truetype( - '/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf', + os.path.join( + os.path.dirname(os.path.dirname(__file__)), + 'font', 'conthrax-sb.ttf' + ), fontsize, ) # get a drawing context d = ImageDraw.Draw(txt) # draw text, half opacity d.text( - (10, image.size[1] - 10 - fontsize), + (10 + fontsize * .2, height - 10 - fontsize - fontsize * .2), settings.WATERMARK_TEXT, font=fnt, fill=(255,255,255,30) ) out = Image.alpha_composite(image, txt) out = out.convert('RGB') imagefile = BytesIO() out.save( imagefile, **save_kwargs ) return imagefile versatileimagefield_registry.register_filter('watermark', Watermark)
Use custom font for watermark
## Code Before: from django.conf import settings from versatileimagefield.datastructures.filteredimage import FilteredImage from versatileimagefield.registry import versatileimagefield_registry from PIL import Image, ImageDraw, ImageFont from io import BytesIO class Watermark(FilteredImage): def process_image(self, image, image_format, save_kwargs={}): """ Returns a BytesIO instance of `image` with inverted colors """ if image.mode != 'RGBA': image = image.convert('RGBA') txt = Image.new('RGBA', image.size, (255,255,255,0)) fontsize = int(image.size[1] * 0.1) # get a font fnt = ImageFont.truetype( '/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf', fontsize, ) # get a drawing context d = ImageDraw.Draw(txt) # draw text, half opacity d.text( (10, image.size[1] - 10 - fontsize), settings.WATERMARK_TEXT, font=fnt, fill=(255,255,255,30) ) out = Image.alpha_composite(image, txt) out = out.convert('RGB') imagefile = BytesIO() out.save( imagefile, **save_kwargs ) return imagefile versatileimagefield_registry.register_filter('watermark', Watermark) ## Instruction: Use custom font for watermark ## Code After: import os.path from django.conf import settings from versatileimagefield.datastructures.filteredimage import FilteredImage from versatileimagefield.registry import versatileimagefield_registry from PIL import Image, ImageDraw, ImageFont from io import BytesIO class Watermark(FilteredImage): def process_image(self, image, image_format, save_kwargs={}): """ Returns a BytesIO instance of `image` with inverted colors """ if image.mode != 'RGBA': image = image.convert('RGBA') txt = Image.new('RGBA', image.size, (255,255,255,0)) height = image.size[1] fontsize = int(image.size[1] * 0.1) # get a font fnt = ImageFont.truetype( os.path.join( os.path.dirname(os.path.dirname(__file__)), 'font', 'conthrax-sb.ttf' ), fontsize, ) # get a drawing context d = ImageDraw.Draw(txt) # draw text, half opacity d.text( (10 + fontsize * .2, height - 10 - fontsize - fontsize * .2), settings.WATERMARK_TEXT, font=fnt, fill=(255,255,255,30) ) out = Image.alpha_composite(image, txt) out = out.convert('RGB') imagefile = BytesIO() out.save( imagefile, **save_kwargs ) return imagefile versatileimagefield_registry.register_filter('watermark', Watermark)
# ... existing code ... import os.path from django.conf import settings # ... modified code ... height = image.size[1] fontsize = int(image.size[1] * 0.1) ... fnt = ImageFont.truetype( os.path.join( os.path.dirname(os.path.dirname(__file__)), 'font', 'conthrax-sb.ttf' ), fontsize, ... d.text( (10 + fontsize * .2, height - 10 - fontsize - fontsize * .2), settings.WATERMARK_TEXT, # ... rest of the code ...
d3f5e0e2d6104963237a0626d608cc1b0949b762
zounds/learn/functional.py
zounds/learn/functional.py
import numpy as np def hyperplanes(means, stds, n_planes): if len(means) != len(stds): raise ValueError('means and stds must have the same length') n_features = len(means) a = np.random.normal(means, stds, (n_planes, n_features)) b = np.random.normal(means, stds, (n_planes, n_features)) plane_vectors = a - b return plane_vectors def simhash(plane_vectors, data): output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8) flattened = data.reshape((len(data), -1)) x = np.dot(plane_vectors, flattened.T).T output[np.where(x > 0)] = 1 return output def example_wise_unit_norm(x): original_shape = x.shape x = x.reshape((len(x), -1)) norms = np.linalg.norm(x, axis=-1, keepdims=True) normed = np.divide(x, norms, where=norms != 0) return normed.reshape(original_shape)
import numpy as np def hyperplanes(means, stds, n_planes): if len(means) != len(stds): raise ValueError('means and stds must have the same length') n_features = len(means) a = np.random.normal(means, stds, (n_planes, n_features)) b = np.random.normal(means, stds, (n_planes, n_features)) plane_vectors = a - b return plane_vectors def simhash(plane_vectors, data): output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8) flattened = data.reshape((len(data), -1)) x = np.dot(plane_vectors, flattened.T).T output[np.where(x > 0)] = 1 return output def example_wise_unit_norm(x, return_norms=False): original_shape = x.shape # flatten all dimensions of x, treating the first axis as examples and all # other axes as features x = x.reshape((len(x), -1)) norms = np.linalg.norm(x, axis=-1, keepdims=True) normed = np.divide(x, norms, where=norms != 0) normed = normed.reshape(original_shape) if return_norms: return normed, norms else: return normed
Add an option to also return intermediate example norms
Add an option to also return intermediate example norms
Python
mit
JohnVinyard/zounds,JohnVinyard/zounds,JohnVinyard/zounds,JohnVinyard/zounds
import numpy as np def hyperplanes(means, stds, n_planes): if len(means) != len(stds): raise ValueError('means and stds must have the same length') n_features = len(means) a = np.random.normal(means, stds, (n_planes, n_features)) b = np.random.normal(means, stds, (n_planes, n_features)) plane_vectors = a - b return plane_vectors def simhash(plane_vectors, data): output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8) flattened = data.reshape((len(data), -1)) x = np.dot(plane_vectors, flattened.T).T output[np.where(x > 0)] = 1 return output - def example_wise_unit_norm(x): + def example_wise_unit_norm(x, return_norms=False): original_shape = x.shape + + # flatten all dimensions of x, treating the first axis as examples and all + # other axes as features x = x.reshape((len(x), -1)) norms = np.linalg.norm(x, axis=-1, keepdims=True) normed = np.divide(x, norms, where=norms != 0) - return normed.reshape(original_shape) + normed = normed.reshape(original_shape) + if return_norms: + return normed, norms + else: + return normed +
Add an option to also return intermediate example norms
## Code Before: import numpy as np def hyperplanes(means, stds, n_planes): if len(means) != len(stds): raise ValueError('means and stds must have the same length') n_features = len(means) a = np.random.normal(means, stds, (n_planes, n_features)) b = np.random.normal(means, stds, (n_planes, n_features)) plane_vectors = a - b return plane_vectors def simhash(plane_vectors, data): output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8) flattened = data.reshape((len(data), -1)) x = np.dot(plane_vectors, flattened.T).T output[np.where(x > 0)] = 1 return output def example_wise_unit_norm(x): original_shape = x.shape x = x.reshape((len(x), -1)) norms = np.linalg.norm(x, axis=-1, keepdims=True) normed = np.divide(x, norms, where=norms != 0) return normed.reshape(original_shape) ## Instruction: Add an option to also return intermediate example norms ## Code After: import numpy as np def hyperplanes(means, stds, n_planes): if len(means) != len(stds): raise ValueError('means and stds must have the same length') n_features = len(means) a = np.random.normal(means, stds, (n_planes, n_features)) b = np.random.normal(means, stds, (n_planes, n_features)) plane_vectors = a - b return plane_vectors def simhash(plane_vectors, data): output = np.zeros((len(data), len(plane_vectors)), dtype=np.uint8) flattened = data.reshape((len(data), -1)) x = np.dot(plane_vectors, flattened.T).T output[np.where(x > 0)] = 1 return output def example_wise_unit_norm(x, return_norms=False): original_shape = x.shape # flatten all dimensions of x, treating the first axis as examples and all # other axes as features x = x.reshape((len(x), -1)) norms = np.linalg.norm(x, axis=-1, keepdims=True) normed = np.divide(x, norms, where=norms != 0) normed = normed.reshape(original_shape) if return_norms: return normed, norms else: return normed
# ... existing code ... def example_wise_unit_norm(x, return_norms=False): original_shape = x.shape # flatten all dimensions of x, treating the first axis as examples and all # other axes as features x = x.reshape((len(x), -1)) # ... modified code ... normed = np.divide(x, norms, where=norms != 0) normed = normed.reshape(original_shape) if return_norms: return normed, norms else: return normed # ... rest of the code ...
16b3dc1f8c762a751e1476d679391f3bbc82cd5d
python-prefix.py
python-prefix.py
import sys import os.path import site def main(): '''\ Check if the given prefix is included in sys.path for the given python version; if not find an alternate valid prefix. Print the result to standard out. ''' if len(sys.argv) != 3: msg = 'usage: %s <prefix> <python version>\n' % \ os.path.basename(sys.argv[0]) sys.stderr.write(msg) return 1 python_prefix = sys.argv[1] python_version = sys.argv[2] path = '%s/lib/python%s' % (python_prefix, python_version) path = os.path.normpath(path) if path[-1] != '/': path = path + '/' prefix = None for p in sys.path: if p.startswith(path): prefix = path break if not prefix: prefix = site.PREFIXES[-1] sys.stdout.write('%s\n' % prefix) return 0 if __name__ == '__main__': sys.exit(main())
import sys import os.path import site def main(): '''\ Check if the given prefix is included in sys.path for the given python version; if not find an alternate valid prefix. Print the result to standard out. ''' if len(sys.argv) != 3: msg = 'usage: %s <prefix> <python version>\n' % \ os.path.basename(sys.argv[0]) sys.stderr.write(msg) return 1 python_prefix = sys.argv[1] python_version = sys.argv[2] path = '%s/lib/python%s' % (python_prefix, python_version) path = os.path.normpath(path) if path[-1] != '/': path = path + '/' prefix = None for p in sys.path: if p.startswith(path): prefix = python_prefix break if not prefix: prefix = site.PREFIXES[-1] sys.stdout.write('%s\n' % prefix) return 0 if __name__ == '__main__': sys.exit(main())
Fix typo in previous commit.
Fix typo in previous commit.
Python
bsd-2-clause
marek-sezemsky/coreemu,tectronics/coreemu,marek-sezemsky/coreemu,guidotack/coreemu,guidotack/coreemu,tectronics/coreemu,tectronics/coreemu,gregtampa/coreemu,guidotack/coreemu,gregtampa/coreemu,marek-sezemsky/coreemu,gregtampa/coreemu
import sys import os.path import site def main(): '''\ Check if the given prefix is included in sys.path for the given python version; if not find an alternate valid prefix. Print the result to standard out. ''' if len(sys.argv) != 3: msg = 'usage: %s <prefix> <python version>\n' % \ os.path.basename(sys.argv[0]) sys.stderr.write(msg) return 1 python_prefix = sys.argv[1] python_version = sys.argv[2] path = '%s/lib/python%s' % (python_prefix, python_version) path = os.path.normpath(path) if path[-1] != '/': path = path + '/' prefix = None for p in sys.path: if p.startswith(path): - prefix = path + prefix = python_prefix break if not prefix: prefix = site.PREFIXES[-1] sys.stdout.write('%s\n' % prefix) return 0 if __name__ == '__main__': sys.exit(main())
Fix typo in previous commit.
## Code Before: import sys import os.path import site def main(): '''\ Check if the given prefix is included in sys.path for the given python version; if not find an alternate valid prefix. Print the result to standard out. ''' if len(sys.argv) != 3: msg = 'usage: %s <prefix> <python version>\n' % \ os.path.basename(sys.argv[0]) sys.stderr.write(msg) return 1 python_prefix = sys.argv[1] python_version = sys.argv[2] path = '%s/lib/python%s' % (python_prefix, python_version) path = os.path.normpath(path) if path[-1] != '/': path = path + '/' prefix = None for p in sys.path: if p.startswith(path): prefix = path break if not prefix: prefix = site.PREFIXES[-1] sys.stdout.write('%s\n' % prefix) return 0 if __name__ == '__main__': sys.exit(main()) ## Instruction: Fix typo in previous commit. ## Code After: import sys import os.path import site def main(): '''\ Check if the given prefix is included in sys.path for the given python version; if not find an alternate valid prefix. Print the result to standard out. ''' if len(sys.argv) != 3: msg = 'usage: %s <prefix> <python version>\n' % \ os.path.basename(sys.argv[0]) sys.stderr.write(msg) return 1 python_prefix = sys.argv[1] python_version = sys.argv[2] path = '%s/lib/python%s' % (python_prefix, python_version) path = os.path.normpath(path) if path[-1] != '/': path = path + '/' prefix = None for p in sys.path: if p.startswith(path): prefix = python_prefix break if not prefix: prefix = site.PREFIXES[-1] sys.stdout.write('%s\n' % prefix) return 0 if __name__ == '__main__': sys.exit(main())
# ... existing code ... if p.startswith(path): prefix = python_prefix break # ... rest of the code ...
f4dfcf91c11fd06b5b71135f888b6979548a5147
conveyor/__main__.py
conveyor/__main__.py
from __future__ import absolute_import from .core import Conveyor def main(): Conveyor().run() if __name__ == "__main__": main()
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals from .core import Conveyor def main(): Conveyor().run() if __name__ == "__main__": main()
Bring the standard __future__ imports over
Bring the standard __future__ imports over
Python
bsd-2-clause
crateio/carrier
from __future__ import absolute_import + from __future__ import division + from __future__ import unicode_literals from .core import Conveyor def main(): Conveyor().run() + if __name__ == "__main__": main()
Bring the standard __future__ imports over
## Code Before: from __future__ import absolute_import from .core import Conveyor def main(): Conveyor().run() if __name__ == "__main__": main() ## Instruction: Bring the standard __future__ imports over ## Code After: from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals from .core import Conveyor def main(): Conveyor().run() if __name__ == "__main__": main()
// ... existing code ... from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals // ... modified code ... if __name__ == "__main__": // ... rest of the code ...
f0af944db962bdb8ea764737860ce9168f779977
perfkitbenchmarker/linux_packages/azure_credentials.py
perfkitbenchmarker/linux_packages/azure_credentials.py
"""Package for installing the Azure credentials.""" import os from perfkitbenchmarker import object_storage_service AZURE_CREDENTIAL_LOCATION = '.azure' AZURE_CREDENTIAL_TOKENS_FILE = os.path.join( AZURE_CREDENTIAL_LOCATION, 'accessTokens.json') AZURE_CREDENTIAL_PROFILE_FILE = os.path.join( AZURE_CREDENTIAL_LOCATION, 'azureProfile.json') def Install(vm): """Copies Azure credentials to the VM.""" vm.PushFile( object_storage_service.FindCredentialFile( os.path.join('~', AZURE_CREDENTIAL_TOKENS_FILE)), AZURE_CREDENTIAL_LOCATION) vm.PushFile( object_storage_service.FindCredentialFile( os.path.join('~', AZURE_CREDENTIAL_PROFILE_FILE)), AZURE_CREDENTIAL_LOCATION)
"""Package for installing the Azure credentials.""" import os from perfkitbenchmarker import object_storage_service AZURE_CREDENTIAL_LOCATION = '.azure' AZURE_CREDENTIAL_TOKENS_FILE = os.path.join( AZURE_CREDENTIAL_LOCATION, 'accessTokens.json') AZURE_CREDENTIAL_PROFILE_FILE = os.path.join( AZURE_CREDENTIAL_LOCATION, 'azureProfile.json') def Install(vm): """Copies Azure credentials to the VM.""" vm.RemoteCommand('mkdir -p {0}'.format(AZURE_CREDENTIAL_LOCATION)) vm.PushFile( object_storage_service.FindCredentialFile( os.path.join('~', AZURE_CREDENTIAL_TOKENS_FILE)), AZURE_CREDENTIAL_TOKENS_FILE) vm.PushFile( object_storage_service.FindCredentialFile( os.path.join('~', AZURE_CREDENTIAL_PROFILE_FILE)), AZURE_CREDENTIAL_PROFILE_FILE)
Fix a bug in the Azure credentials package in which they would overwrite the directory.
Fix a bug in the Azure credentials package in which they would overwrite the directory. ------------- Created by MOE: https://github.com/google/moe MOE_MIGRATED_REVID=248750675
Python
apache-2.0
GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker,GoogleCloudPlatform/PerfKitBenchmarker
"""Package for installing the Azure credentials.""" import os from perfkitbenchmarker import object_storage_service AZURE_CREDENTIAL_LOCATION = '.azure' AZURE_CREDENTIAL_TOKENS_FILE = os.path.join( AZURE_CREDENTIAL_LOCATION, 'accessTokens.json') AZURE_CREDENTIAL_PROFILE_FILE = os.path.join( AZURE_CREDENTIAL_LOCATION, 'azureProfile.json') def Install(vm): """Copies Azure credentials to the VM.""" + vm.RemoteCommand('mkdir -p {0}'.format(AZURE_CREDENTIAL_LOCATION)) vm.PushFile( object_storage_service.FindCredentialFile( os.path.join('~', AZURE_CREDENTIAL_TOKENS_FILE)), - AZURE_CREDENTIAL_LOCATION) + AZURE_CREDENTIAL_TOKENS_FILE) vm.PushFile( object_storage_service.FindCredentialFile( os.path.join('~', AZURE_CREDENTIAL_PROFILE_FILE)), - AZURE_CREDENTIAL_LOCATION) + AZURE_CREDENTIAL_PROFILE_FILE)
Fix a bug in the Azure credentials package in which they would overwrite the directory.
## Code Before: """Package for installing the Azure credentials.""" import os from perfkitbenchmarker import object_storage_service AZURE_CREDENTIAL_LOCATION = '.azure' AZURE_CREDENTIAL_TOKENS_FILE = os.path.join( AZURE_CREDENTIAL_LOCATION, 'accessTokens.json') AZURE_CREDENTIAL_PROFILE_FILE = os.path.join( AZURE_CREDENTIAL_LOCATION, 'azureProfile.json') def Install(vm): """Copies Azure credentials to the VM.""" vm.PushFile( object_storage_service.FindCredentialFile( os.path.join('~', AZURE_CREDENTIAL_TOKENS_FILE)), AZURE_CREDENTIAL_LOCATION) vm.PushFile( object_storage_service.FindCredentialFile( os.path.join('~', AZURE_CREDENTIAL_PROFILE_FILE)), AZURE_CREDENTIAL_LOCATION) ## Instruction: Fix a bug in the Azure credentials package in which they would overwrite the directory. ## Code After: """Package for installing the Azure credentials.""" import os from perfkitbenchmarker import object_storage_service AZURE_CREDENTIAL_LOCATION = '.azure' AZURE_CREDENTIAL_TOKENS_FILE = os.path.join( AZURE_CREDENTIAL_LOCATION, 'accessTokens.json') AZURE_CREDENTIAL_PROFILE_FILE = os.path.join( AZURE_CREDENTIAL_LOCATION, 'azureProfile.json') def Install(vm): """Copies Azure credentials to the VM.""" vm.RemoteCommand('mkdir -p {0}'.format(AZURE_CREDENTIAL_LOCATION)) vm.PushFile( object_storage_service.FindCredentialFile( os.path.join('~', AZURE_CREDENTIAL_TOKENS_FILE)), AZURE_CREDENTIAL_TOKENS_FILE) vm.PushFile( object_storage_service.FindCredentialFile( os.path.join('~', AZURE_CREDENTIAL_PROFILE_FILE)), AZURE_CREDENTIAL_PROFILE_FILE)
// ... existing code ... """Copies Azure credentials to the VM.""" vm.RemoteCommand('mkdir -p {0}'.format(AZURE_CREDENTIAL_LOCATION)) vm.PushFile( // ... modified code ... os.path.join('~', AZURE_CREDENTIAL_TOKENS_FILE)), AZURE_CREDENTIAL_TOKENS_FILE) vm.PushFile( ... os.path.join('~', AZURE_CREDENTIAL_PROFILE_FILE)), AZURE_CREDENTIAL_PROFILE_FILE) // ... rest of the code ...
15ebd5a3509b20bad4cf0123dfac9be6878fa91c
app/models/bookmarks.py
app/models/bookmarks.py
from flask import current_app from .. import db, login_manager class Bookmarks(db.Model): id = db.Column(db.Integer, primary_key=True) listing_id = db.Column(db.Integer, unique=True) merchant_id = db.Column(db.Integer, db.ForeignKey('user.id')) #also needs to be added to Hunter's Vendors model def __init__(self, listing_id, merchant_id): self.listing_id = listing_id self.merchant_id = merchant_id def __repr__(self): return "<User: {} Bookmarked Listing: {}".format(self.merchant_id, self.listing_id)
from flask import current_app from .. import db, login_manager class Bookmarks(db.Model): id = db.Column(db.Integer, primary_key=True) listing_id = db.Column(db.Integer, unique=True) merchant_id = db.Column(db.Integer, db.ForeignKey('user.id')) merchant = db.relationship('User', backref=db.backref('bookmarks', lazy='dynamic')) def __init__(self, listing_id, merchant): self.listing_id = listing_id self.merchant = merchant def __repr__(self): return "<User: {} Bookmarked Listing: {}".format(self.merchant_id, self.listing_id)
Set up the relationship between bookmark and merchant
Set up the relationship between bookmark and merchant
Python
mit
hack4impact/reading-terminal-market,hack4impact/reading-terminal-market,hack4impact/reading-terminal-market
from flask import current_app from .. import db, login_manager class Bookmarks(db.Model): id = db.Column(db.Integer, primary_key=True) listing_id = db.Column(db.Integer, unique=True) - merchant_id = db.Column(db.Integer, db.ForeignKey('user.id')) #also needs to be added to Hunter's Vendors model + merchant_id = db.Column(db.Integer, db.ForeignKey('user.id')) + merchant = db.relationship('User', backref=db.backref('bookmarks', lazy='dynamic')) - def __init__(self, listing_id, merchant_id): + def __init__(self, listing_id, merchant): self.listing_id = listing_id - self.merchant_id = merchant_id + self.merchant = merchant def __repr__(self): return "<User: {} Bookmarked Listing: {}".format(self.merchant_id, self.listing_id)
Set up the relationship between bookmark and merchant
## Code Before: from flask import current_app from .. import db, login_manager class Bookmarks(db.Model): id = db.Column(db.Integer, primary_key=True) listing_id = db.Column(db.Integer, unique=True) merchant_id = db.Column(db.Integer, db.ForeignKey('user.id')) #also needs to be added to Hunter's Vendors model def __init__(self, listing_id, merchant_id): self.listing_id = listing_id self.merchant_id = merchant_id def __repr__(self): return "<User: {} Bookmarked Listing: {}".format(self.merchant_id, self.listing_id) ## Instruction: Set up the relationship between bookmark and merchant ## Code After: from flask import current_app from .. import db, login_manager class Bookmarks(db.Model): id = db.Column(db.Integer, primary_key=True) listing_id = db.Column(db.Integer, unique=True) merchant_id = db.Column(db.Integer, db.ForeignKey('user.id')) merchant = db.relationship('User', backref=db.backref('bookmarks', lazy='dynamic')) def __init__(self, listing_id, merchant): self.listing_id = listing_id self.merchant = merchant def __repr__(self): return "<User: {} Bookmarked Listing: {}".format(self.merchant_id, self.listing_id)
... listing_id = db.Column(db.Integer, unique=True) merchant_id = db.Column(db.Integer, db.ForeignKey('user.id')) merchant = db.relationship('User', backref=db.backref('bookmarks', lazy='dynamic')) def __init__(self, listing_id, merchant): self.listing_id = listing_id self.merchant = merchant ...
d2250ac74b0797d1662c054d2357573578caa251
core/tasks.py
core/tasks.py
import os import gzip import urllib.request from celery import shared_task from django.core.mail import EmailMessage from celery.task import periodic_task from celery.schedules import crontab @shared_task(name='deliver_email') def deliver_email(subject=None, body=None, recipients=None): #print("Entering core.tasks.deliver_email for ...", recipients) if recipients: for recipient in recipients: #print("sending email to recipient: ", recipient) email = EmailMessage(subject, body, to=[recipient]) email.send() @periodic_task(bind=True, run_every=crontab(0, 0, day_of_month='7')) def update_geolocation(self): # Establish desired paths and directories current_directory = os.path.dirname(__file__) compressed_filepath = os.path.join(current_directory, 'GeoLite2-City.mmdb.gz') uncompressed_filepath = os.path.join(current_directory, 'GeoLite2-City.mmdb') # Pull down current database file url = "http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz" urllib.request.urlretrieve(url, compressed_filepath) # Read and unzip compressed file to current directory zipped = gzip.open(compressed_filepath, "rb") uncompressed = open(uncompressed_filepath, "wb") uncompressed.write(zipped.read()) zipped.close() uncompressed.close() # Remove zipped file os.remove(compressed_filepath)
import os import gzip import urllib.request from celery import shared_task from django.core.mail import EmailMessage from celery.task import periodic_task from celery.schedules import crontab @shared_task(name='deliver_email') def deliver_email(subject=None, body=None, recipients=None): if recipients: for recipient in recipients: email = EmailMessage(subject, body, to=[recipient]) email.send() @periodic_task(bind=True, run_every=crontab(0, 0, day_of_month='7')) def update_geolocation(self): # Establish desired paths and directories current_directory = os.path.dirname(__file__) compressed_filepath = os.path.join(current_directory, 'GeoLite2-City.mmdb.gz') uncompressed_filepath = os.path.join(current_directory, 'GeoLite2-City.mmdb') # Pull down current database file url = "http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz" urllib.request.urlretrieve(url, compressed_filepath) # Read and unzip compressed file to current directory zipped = gzip.open(compressed_filepath, "rb") uncompressed = open(uncompressed_filepath, "wb") uncompressed.write(zipped.read()) zipped.close() uncompressed.close() # Remove zipped file os.remove(compressed_filepath)
Clean up code and remove print statements
Clean up code and remove print statements
Python
mit
LindaTNguyen/RAPID,gdit-cnd/RAPID,LindaTNguyen/RAPID,gdit-cnd/RAPID,LindaTNguyen/RAPID,gdit-cnd/RAPID,gdit-cnd/RAPID,gdit-cnd/RAPID,LindaTNguyen/RAPID,LindaTNguyen/RAPID
import os import gzip import urllib.request from celery import shared_task from django.core.mail import EmailMessage from celery.task import periodic_task from celery.schedules import crontab @shared_task(name='deliver_email') def deliver_email(subject=None, body=None, recipients=None): - #print("Entering core.tasks.deliver_email for ...", recipients) if recipients: for recipient in recipients: - #print("sending email to recipient: ", recipient) email = EmailMessage(subject, body, to=[recipient]) email.send() + @periodic_task(bind=True, run_every=crontab(0, 0, day_of_month='7')) def update_geolocation(self): + # Establish desired paths and directories current_directory = os.path.dirname(__file__) compressed_filepath = os.path.join(current_directory, 'GeoLite2-City.mmdb.gz') uncompressed_filepath = os.path.join(current_directory, 'GeoLite2-City.mmdb') # Pull down current database file url = "http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz" urllib.request.urlretrieve(url, compressed_filepath) # Read and unzip compressed file to current directory zipped = gzip.open(compressed_filepath, "rb") uncompressed = open(uncompressed_filepath, "wb") uncompressed.write(zipped.read()) zipped.close() uncompressed.close() # Remove zipped file os.remove(compressed_filepath) -
Clean up code and remove print statements
## Code Before: import os import gzip import urllib.request from celery import shared_task from django.core.mail import EmailMessage from celery.task import periodic_task from celery.schedules import crontab @shared_task(name='deliver_email') def deliver_email(subject=None, body=None, recipients=None): #print("Entering core.tasks.deliver_email for ...", recipients) if recipients: for recipient in recipients: #print("sending email to recipient: ", recipient) email = EmailMessage(subject, body, to=[recipient]) email.send() @periodic_task(bind=True, run_every=crontab(0, 0, day_of_month='7')) def update_geolocation(self): # Establish desired paths and directories current_directory = os.path.dirname(__file__) compressed_filepath = os.path.join(current_directory, 'GeoLite2-City.mmdb.gz') uncompressed_filepath = os.path.join(current_directory, 'GeoLite2-City.mmdb') # Pull down current database file url = "http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz" urllib.request.urlretrieve(url, compressed_filepath) # Read and unzip compressed file to current directory zipped = gzip.open(compressed_filepath, "rb") uncompressed = open(uncompressed_filepath, "wb") uncompressed.write(zipped.read()) zipped.close() uncompressed.close() # Remove zipped file os.remove(compressed_filepath) ## Instruction: Clean up code and remove print statements ## Code After: import os import gzip import urllib.request from celery import shared_task from django.core.mail import EmailMessage from celery.task import periodic_task from celery.schedules import crontab @shared_task(name='deliver_email') def deliver_email(subject=None, body=None, recipients=None): if recipients: for recipient in recipients: email = EmailMessage(subject, body, to=[recipient]) email.send() @periodic_task(bind=True, run_every=crontab(0, 0, day_of_month='7')) def update_geolocation(self): # Establish desired paths and directories current_directory = os.path.dirname(__file__) compressed_filepath = os.path.join(current_directory, 'GeoLite2-City.mmdb.gz') uncompressed_filepath = os.path.join(current_directory, 'GeoLite2-City.mmdb') # Pull down current database file url = "http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.mmdb.gz" urllib.request.urlretrieve(url, compressed_filepath) # Read and unzip compressed file to current directory zipped = gzip.open(compressed_filepath, "rb") uncompressed = open(uncompressed_filepath, "wb") uncompressed.write(zipped.read()) zipped.close() uncompressed.close() # Remove zipped file os.remove(compressed_filepath)
# ... existing code ... def deliver_email(subject=None, body=None, recipients=None): # ... modified code ... for recipient in recipients: email = EmailMessage(subject, body, to=[recipient]) ... @periodic_task(bind=True, run_every=crontab(0, 0, day_of_month='7')) ... def update_geolocation(self): # Establish desired paths and directories # ... rest of the code ...
1934229ace3bd35b98e3eaa9b8ec75a1000dea78
djkombu/transport.py
djkombu/transport.py
from Queue import Empty from anyjson import serialize, deserialize from kombu.transport import virtual from django.conf import settings from django.core import exceptions as errors from djkombu.models import Queue POLLING_INTERVAL = getattr(settings, "DJKOMBU_POLLING_INTERVAL", 5.0) class Channel(virtual.Channel): def _new_queue(self, queue, **kwargs): Queue.objects.get_or_create(name=queue) def _put(self, queue, message, **kwargs): Queue.objects.publish(queue, serialize(message)) def basic_consume(self, queue, *args, **kwargs): exchange, _ , _ = self.state.bindings[queue] if self.typeof(exchange).type == "fanout": return super(Channel, self).basic_consume(queue, *args, **kwargs) def _get(self, queue): #self.refresh_connection() m = Queue.objects.fetch(queue) if m: return deserialize(m) raise Empty() def _size(self, queue): return Queue.objects.size(queue) def _purge(self, queue): return Queue.objects.purge(queue) def refresh_connection(self): from django import db db.close_connection() class DatabaseTransport(virtual.Transport): Channel = Channel default_port = 0 polling_interval = POLLING_INTERVAL connection_errors = () channel_errors = (errors.ObjectDoesNotExist, errors.MultipleObjectsReturned)
from Queue import Empty from anyjson import serialize, deserialize from kombu.transport import virtual from django.conf import settings from django.core import exceptions as errors from djkombu.models import Queue POLLING_INTERVAL = getattr(settings, "DJKOMBU_POLLING_INTERVAL", 5.0) class Channel(virtual.Channel): def _new_queue(self, queue, **kwargs): Queue.objects.get_or_create(name=queue) def _put(self, queue, message, **kwargs): Queue.objects.publish(queue, serialize(message)) def basic_consume(self, queue, *args, **kwargs): qinfo = self.state.bindings[queue] exchange = qinfo[0] if self.typeof(exchange).type == "fanout": return super(Channel, self).basic_consume(queue, *args, **kwargs) def _get(self, queue): #self.refresh_connection() m = Queue.objects.fetch(queue) if m: return deserialize(m) raise Empty() def _size(self, queue): return Queue.objects.size(queue) def _purge(self, queue): return Queue.objects.purge(queue) def refresh_connection(self): from django import db db.close_connection() class DatabaseTransport(virtual.Transport): Channel = Channel default_port = 0 polling_interval = POLLING_INTERVAL connection_errors = () channel_errors = (errors.ObjectDoesNotExist, errors.MultipleObjectsReturned)
Work with new and *older* kombu versions
Work with new and *older* kombu versions
Python
bsd-3-clause
ask/django-kombu
from Queue import Empty from anyjson import serialize, deserialize from kombu.transport import virtual from django.conf import settings from django.core import exceptions as errors from djkombu.models import Queue POLLING_INTERVAL = getattr(settings, "DJKOMBU_POLLING_INTERVAL", 5.0) class Channel(virtual.Channel): def _new_queue(self, queue, **kwargs): Queue.objects.get_or_create(name=queue) def _put(self, queue, message, **kwargs): Queue.objects.publish(queue, serialize(message)) def basic_consume(self, queue, *args, **kwargs): - exchange, _ , _ = self.state.bindings[queue] + qinfo = self.state.bindings[queue] + exchange = qinfo[0] if self.typeof(exchange).type == "fanout": return super(Channel, self).basic_consume(queue, *args, **kwargs) def _get(self, queue): #self.refresh_connection() m = Queue.objects.fetch(queue) if m: return deserialize(m) raise Empty() def _size(self, queue): return Queue.objects.size(queue) def _purge(self, queue): return Queue.objects.purge(queue) def refresh_connection(self): from django import db db.close_connection() class DatabaseTransport(virtual.Transport): Channel = Channel default_port = 0 polling_interval = POLLING_INTERVAL connection_errors = () channel_errors = (errors.ObjectDoesNotExist, errors.MultipleObjectsReturned)
Work with new and *older* kombu versions
## Code Before: from Queue import Empty from anyjson import serialize, deserialize from kombu.transport import virtual from django.conf import settings from django.core import exceptions as errors from djkombu.models import Queue POLLING_INTERVAL = getattr(settings, "DJKOMBU_POLLING_INTERVAL", 5.0) class Channel(virtual.Channel): def _new_queue(self, queue, **kwargs): Queue.objects.get_or_create(name=queue) def _put(self, queue, message, **kwargs): Queue.objects.publish(queue, serialize(message)) def basic_consume(self, queue, *args, **kwargs): exchange, _ , _ = self.state.bindings[queue] if self.typeof(exchange).type == "fanout": return super(Channel, self).basic_consume(queue, *args, **kwargs) def _get(self, queue): #self.refresh_connection() m = Queue.objects.fetch(queue) if m: return deserialize(m) raise Empty() def _size(self, queue): return Queue.objects.size(queue) def _purge(self, queue): return Queue.objects.purge(queue) def refresh_connection(self): from django import db db.close_connection() class DatabaseTransport(virtual.Transport): Channel = Channel default_port = 0 polling_interval = POLLING_INTERVAL connection_errors = () channel_errors = (errors.ObjectDoesNotExist, errors.MultipleObjectsReturned) ## Instruction: Work with new and *older* kombu versions ## Code After: from Queue import Empty from anyjson import serialize, deserialize from kombu.transport import virtual from django.conf import settings from django.core import exceptions as errors from djkombu.models import Queue POLLING_INTERVAL = getattr(settings, "DJKOMBU_POLLING_INTERVAL", 5.0) class Channel(virtual.Channel): def _new_queue(self, queue, **kwargs): Queue.objects.get_or_create(name=queue) def _put(self, queue, message, **kwargs): Queue.objects.publish(queue, serialize(message)) def basic_consume(self, queue, *args, **kwargs): qinfo = self.state.bindings[queue] exchange = qinfo[0] if self.typeof(exchange).type == "fanout": return super(Channel, self).basic_consume(queue, *args, **kwargs) def _get(self, queue): #self.refresh_connection() m = Queue.objects.fetch(queue) if m: return deserialize(m) raise Empty() def _size(self, queue): return Queue.objects.size(queue) def _purge(self, queue): return Queue.objects.purge(queue) def refresh_connection(self): from django import db db.close_connection() class DatabaseTransport(virtual.Transport): Channel = Channel default_port = 0 polling_interval = POLLING_INTERVAL connection_errors = () channel_errors = (errors.ObjectDoesNotExist, errors.MultipleObjectsReturned)
# ... existing code ... def basic_consume(self, queue, *args, **kwargs): qinfo = self.state.bindings[queue] exchange = qinfo[0] if self.typeof(exchange).type == "fanout": # ... rest of the code ...