commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ca042edc7f9709f2217b669fb5a68e9aac3ab61c
|
cbv/management/commands/cbv_dumpversion.py
|
cbv/management/commands/cbv_dumpversion.py
|
from django.core.management import call_command
from django.core.management.commands import LabelCommand
class Command(LabelCommand):
def handle_label(self, label, **options):
# Because django will use the default manager of each model, we
# monkeypatch the manager to filter by our label before calling
# the dumpdata command to dump only the subset of data we want.
# Set the
# Call the dumpdata command.
call_command('dumpdata', 'cbv')
|
import json
from django.db.models.query import QuerySet
from django.core.management import call_command
from django.core.management.base import LabelCommand
from django.core import serializers
from cbv import models
class Command(LabelCommand):
"""Dump the django cbv app data for a specific version."""
def handle_label(self, label, **options):
filtered_models = {
models.ProjectVersion: 'version_number',
models.Module: 'project_version__version_number',
models.ModuleAttribute: 'module__project_version__version_number',
models.Function: 'module__project_version__version_number',
models.Klass: 'module__project_version__version_number',
models.KlassAttribute: 'klass__module__project_version__version_number',
models.Method: 'klass__module__project_version__version_number',
}
objects = []
for model, version_arg in filtered_models.items():
filter_kwargs = {version_arg: label}
result = model.objects.filter(**filter_kwargs)
objects = objects + list(result)
for obj in objects:
obj.pk = None
dump = serializers.serialize('json', objects, indent=1, use_natural_keys=True)
self.stdout.write(dump)
|
Allow dumpdata of specific version of cbv.
|
Allow dumpdata of specific version of cbv.
|
Python
|
bsd-2-clause
|
abhijo89/django-cbv-inspector,refreshoxford/django-cbv-inspector,abhijo89/django-cbv-inspector,refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector,abhijo89/django-cbv-inspector,abhijo89/django-cbv-inspector,refreshoxford/django-cbv-inspector
|
+ import json
+
+ from django.db.models.query import QuerySet
from django.core.management import call_command
- from django.core.management.commands import LabelCommand
+ from django.core.management.base import LabelCommand
+ from django.core import serializers
+ from cbv import models
class Command(LabelCommand):
+ """Dump the django cbv app data for a specific version."""
def handle_label(self, label, **options):
- # Because django will use the default manager of each model, we
- # monkeypatch the manager to filter by our label before calling
- # the dumpdata command to dump only the subset of data we want.
+ filtered_models = {
+ models.ProjectVersion: 'version_number',
+ models.Module: 'project_version__version_number',
+ models.ModuleAttribute: 'module__project_version__version_number',
+ models.Function: 'module__project_version__version_number',
+ models.Klass: 'module__project_version__version_number',
+ models.KlassAttribute: 'klass__module__project_version__version_number',
+ models.Method: 'klass__module__project_version__version_number',
+ }
+ objects = []
+ for model, version_arg in filtered_models.items():
+ filter_kwargs = {version_arg: label}
+ result = model.objects.filter(**filter_kwargs)
+ objects = objects + list(result)
+ for obj in objects:
+ obj.pk = None
+ dump = serializers.serialize('json', objects, indent=1, use_natural_keys=True)
+ self.stdout.write(dump)
- # Set the
-
- # Call the dumpdata command.
- call_command('dumpdata', 'cbv')
-
|
Allow dumpdata of specific version of cbv.
|
## Code Before:
from django.core.management import call_command
from django.core.management.commands import LabelCommand
class Command(LabelCommand):
def handle_label(self, label, **options):
# Because django will use the default manager of each model, we
# monkeypatch the manager to filter by our label before calling
# the dumpdata command to dump only the subset of data we want.
# Set the
# Call the dumpdata command.
call_command('dumpdata', 'cbv')
## Instruction:
Allow dumpdata of specific version of cbv.
## Code After:
import json
from django.db.models.query import QuerySet
from django.core.management import call_command
from django.core.management.base import LabelCommand
from django.core import serializers
from cbv import models
class Command(LabelCommand):
"""Dump the django cbv app data for a specific version."""
def handle_label(self, label, **options):
filtered_models = {
models.ProjectVersion: 'version_number',
models.Module: 'project_version__version_number',
models.ModuleAttribute: 'module__project_version__version_number',
models.Function: 'module__project_version__version_number',
models.Klass: 'module__project_version__version_number',
models.KlassAttribute: 'klass__module__project_version__version_number',
models.Method: 'klass__module__project_version__version_number',
}
objects = []
for model, version_arg in filtered_models.items():
filter_kwargs = {version_arg: label}
result = model.objects.filter(**filter_kwargs)
objects = objects + list(result)
for obj in objects:
obj.pk = None
dump = serializers.serialize('json', objects, indent=1, use_natural_keys=True)
self.stdout.write(dump)
|
+ import json
+
+ from django.db.models.query import QuerySet
from django.core.management import call_command
- from django.core.management.commands import LabelCommand
? ^^^^ --
+ from django.core.management.base import LabelCommand
? ^ +
+ from django.core import serializers
+ from cbv import models
class Command(LabelCommand):
+ """Dump the django cbv app data for a specific version."""
def handle_label(self, label, **options):
- # Because django will use the default manager of each model, we
- # monkeypatch the manager to filter by our label before calling
- # the dumpdata command to dump only the subset of data we want.
-
- # Set the
-
- # Call the dumpdata command.
- call_command('dumpdata', 'cbv')
+ filtered_models = {
+ models.ProjectVersion: 'version_number',
+ models.Module: 'project_version__version_number',
+ models.ModuleAttribute: 'module__project_version__version_number',
+ models.Function: 'module__project_version__version_number',
+ models.Klass: 'module__project_version__version_number',
+ models.KlassAttribute: 'klass__module__project_version__version_number',
+ models.Method: 'klass__module__project_version__version_number',
+ }
+ objects = []
+ for model, version_arg in filtered_models.items():
+ filter_kwargs = {version_arg: label}
+ result = model.objects.filter(**filter_kwargs)
+ objects = objects + list(result)
+ for obj in objects:
+ obj.pk = None
+ dump = serializers.serialize('json', objects, indent=1, use_natural_keys=True)
+ self.stdout.write(dump)
|
a9844bad75c66e10f85be4555c9ad7aa2df15585
|
src/trajectory_server.py
|
src/trajectory_server.py
|
import rospy
from trajectory_tracking.srv import TrajectoryPoint, TrajectoryPointResponse
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
|
import rospy
from trajectory_tracking.srv import TrajectoryPoint
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
|
Remove import that was not used
|
Remove import that was not used
|
Python
|
mit
|
bit0001/trajectory_tracking,bit0001/trajectory_tracking
|
import rospy
- from trajectory_tracking.srv import TrajectoryPoint, TrajectoryPointResponse
+ from trajectory_tracking.srv import TrajectoryPoint
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
|
Remove import that was not used
|
## Code Before:
import rospy
from trajectory_tracking.srv import TrajectoryPoint, TrajectoryPointResponse
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
## Instruction:
Remove import that was not used
## Code After:
import rospy
from trajectory_tracking.srv import TrajectoryPoint
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
|
import rospy
- from trajectory_tracking.srv import TrajectoryPoint, TrajectoryPointResponse
? -------------------------
+ from trajectory_tracking.srv import TrajectoryPoint
from geometry_msgs.msg import Point
def compute_position(request):
t = request.t
position = Point()
position.x = 0.05 * t
position.y = 0.05 * t
position.z = 0.0
return position
if __name__ == '__main__':
rospy.init_node('trajectory_server')
service = rospy.Service('trajectory', TrajectoryPoint, compute_position)
rospy.spin()
|
9e7dc537d09555d9c77ff5e1f16f5577721910f9
|
runtests.py
|
runtests.py
|
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'wagtail.wagtailcore',
'wagtail.wagtailsites',
'wagtail.wagtailusers',
'wagtail.wagtailimages',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
|
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'wagtail.core',
'wagtail.sites',
'wagtail.users',
'wagtail.images',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
|
Fix issue with old wagtail core paths
|
Fix issue with old wagtail core paths
|
Python
|
mit
|
Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget
|
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
- 'wagtail.wagtailcore',
+ 'wagtail.core',
- 'wagtail.wagtailsites',
+ 'wagtail.sites',
- 'wagtail.wagtailusers',
+ 'wagtail.users',
- 'wagtail.wagtailimages',
+ 'wagtail.images',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
|
Fix issue with old wagtail core paths
|
## Code Before:
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'wagtail.wagtailcore',
'wagtail.wagtailsites',
'wagtail.wagtailusers',
'wagtail.wagtailimages',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
## Instruction:
Fix issue with old wagtail core paths
## Code After:
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
'wagtail.core',
'wagtail.sites',
'wagtail.users',
'wagtail.images',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
|
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
params = dict(
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'wagtailgeowidget': {
'handlers': ['console'],
'level': 'ERROR',
'propagate': True,
},
},
},
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
INSTALLED_APPS=[
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sites',
- 'wagtail.wagtailcore',
? -------
+ 'wagtail.core',
- 'wagtail.wagtailsites',
? -------
+ 'wagtail.sites',
- 'wagtail.wagtailusers',
? -------
+ 'wagtail.users',
- 'wagtail.wagtailimages',
? -------
+ 'wagtail.images',
'taggit',
'wagtailgeowidget',
"tests",
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='tests.urls',
)
settings.configure(**params)
def runtests():
argv = sys.argv[:1] + ["test"] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
|
7b0bd58c359f5ea21af907cb90234171a6cfca5c
|
photobox/photobox.py
|
photobox/photobox.py
|
from photofolder import Photofolder
from folder import RealFolder
from gphotocamera import Gphoto
from main import Photobox
from rcswitch import RCSwitch
##########
# config #
##########
photodirectory = '/var/www/html/'
cheesepicfolder = '/home/pi/cheesepics/'
windowwidth = 1024
windowheight = 768
camera = Gphoto()
switch = RCSwitch(2352753, 2352754, "NOT_IMPLEMENTED")
##########
filesystemFolder = RealFolder(photodirectory)
cheesepicFolder = RealFolder(cheesepicfolder)
photofolder = Photofolder(filesystemFolder)
photobox = Photobox((windowwidth, windowheight), photofolder, camera, switch)
photobox.start()
|
from cheesefolder import Cheesefolder
from photofolder import Photofolder
from folder import RealFolder
from gphotocamera import Gphoto
from main import Photobox
from rcswitch import RCSwitch
##########
# config #
##########
photodirectory = '/var/www/html/'
cheesepicpath = '/home/pi/cheesepics/'
windowwidth = 1024
windowheight = 768
camera = Gphoto()
switch = RCSwitch(2352753, 2352754, "NOT_IMPLEMENTED")
##########
filesystemFolder = RealFolder(photodirectory)
cheesepicFolder = RealFolder(cheesepicpath)
cheesef = Cheesefolder(cheesepicFolder)
photofolder = Photofolder(filesystemFolder)
photobox = Photobox((windowwidth, windowheight), photofolder, camera, switch, cheesef)
photobox.start()
|
Use the correct chesefolder objects
|
Use the correct chesefolder objects
|
Python
|
mit
|
MarkusAmshove/Photobox
|
+ from cheesefolder import Cheesefolder
from photofolder import Photofolder
from folder import RealFolder
from gphotocamera import Gphoto
from main import Photobox
from rcswitch import RCSwitch
##########
# config #
##########
photodirectory = '/var/www/html/'
- cheesepicfolder = '/home/pi/cheesepics/'
+ cheesepicpath = '/home/pi/cheesepics/'
windowwidth = 1024
windowheight = 768
camera = Gphoto()
switch = RCSwitch(2352753, 2352754, "NOT_IMPLEMENTED")
##########
filesystemFolder = RealFolder(photodirectory)
- cheesepicFolder = RealFolder(cheesepicfolder)
+ cheesepicFolder = RealFolder(cheesepicpath)
+ cheesef = Cheesefolder(cheesepicFolder)
photofolder = Photofolder(filesystemFolder)
- photobox = Photobox((windowwidth, windowheight), photofolder, camera, switch)
+ photobox = Photobox((windowwidth, windowheight), photofolder, camera, switch, cheesef)
photobox.start()
|
Use the correct chesefolder objects
|
## Code Before:
from photofolder import Photofolder
from folder import RealFolder
from gphotocamera import Gphoto
from main import Photobox
from rcswitch import RCSwitch
##########
# config #
##########
photodirectory = '/var/www/html/'
cheesepicfolder = '/home/pi/cheesepics/'
windowwidth = 1024
windowheight = 768
camera = Gphoto()
switch = RCSwitch(2352753, 2352754, "NOT_IMPLEMENTED")
##########
filesystemFolder = RealFolder(photodirectory)
cheesepicFolder = RealFolder(cheesepicfolder)
photofolder = Photofolder(filesystemFolder)
photobox = Photobox((windowwidth, windowheight), photofolder, camera, switch)
photobox.start()
## Instruction:
Use the correct chesefolder objects
## Code After:
from cheesefolder import Cheesefolder
from photofolder import Photofolder
from folder import RealFolder
from gphotocamera import Gphoto
from main import Photobox
from rcswitch import RCSwitch
##########
# config #
##########
photodirectory = '/var/www/html/'
cheesepicpath = '/home/pi/cheesepics/'
windowwidth = 1024
windowheight = 768
camera = Gphoto()
switch = RCSwitch(2352753, 2352754, "NOT_IMPLEMENTED")
##########
filesystemFolder = RealFolder(photodirectory)
cheesepicFolder = RealFolder(cheesepicpath)
cheesef = Cheesefolder(cheesepicFolder)
photofolder = Photofolder(filesystemFolder)
photobox = Photobox((windowwidth, windowheight), photofolder, camera, switch, cheesef)
photobox.start()
|
+ from cheesefolder import Cheesefolder
from photofolder import Photofolder
from folder import RealFolder
from gphotocamera import Gphoto
from main import Photobox
from rcswitch import RCSwitch
##########
# config #
##########
photodirectory = '/var/www/html/'
- cheesepicfolder = '/home/pi/cheesepics/'
? ^^^^^^
+ cheesepicpath = '/home/pi/cheesepics/'
? ^^^^
windowwidth = 1024
windowheight = 768
camera = Gphoto()
switch = RCSwitch(2352753, 2352754, "NOT_IMPLEMENTED")
##########
filesystemFolder = RealFolder(photodirectory)
- cheesepicFolder = RealFolder(cheesepicfolder)
? ^^^^^^
+ cheesepicFolder = RealFolder(cheesepicpath)
? ^^^^
+ cheesef = Cheesefolder(cheesepicFolder)
photofolder = Photofolder(filesystemFolder)
- photobox = Photobox((windowwidth, windowheight), photofolder, camera, switch)
+ photobox = Photobox((windowwidth, windowheight), photofolder, camera, switch, cheesef)
? +++++++++
photobox.start()
|
4e30a58386afb5b34bd83c8115c55e5d09b8f631
|
common/views.py
|
common/views.py
|
from django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
rooms = Plan.objects.filter(floor=floor)
furnitures = Furniture.objects.filter(floor=floor)
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
|
from django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
rooms = Plan.objects.select_related('room__id').filter(floor=floor)
furnitures = Furniture.objects.select_related('device').filter(floor=floor)
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
|
Improve performance by prefetching where needed
|
Improve performance by prefetching where needed
|
Python
|
agpl-3.0
|
Pajn/RAXA-Django,Pajn/RAXA-Django
|
from django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
- rooms = Plan.objects.filter(floor=floor)
+ rooms = Plan.objects.select_related('room__id').filter(floor=floor)
- furnitures = Furniture.objects.filter(floor=floor)
+ furnitures = Furniture.objects.select_related('device').filter(floor=floor)
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
|
Improve performance by prefetching where needed
|
## Code Before:
from django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
rooms = Plan.objects.filter(floor=floor)
furnitures = Furniture.objects.filter(floor=floor)
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
## Instruction:
Improve performance by prefetching where needed
## Code After:
from django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
rooms = Plan.objects.select_related('room__id').filter(floor=floor)
furnitures = Furniture.objects.select_related('device').filter(floor=floor)
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
|
from django.shortcuts import render
from common.models.Furniture import Furniture
from common.models.Plan import Plan
def overlay(request, floor=1):
edit_rooms = False
if request.method == 'POST':
if 'floor' in request.POST:
floor = request.POST['floor']
if 'edit_rooms' in request.POST:
edit_rooms = True
- rooms = Plan.objects.filter(floor=floor)
+ rooms = Plan.objects.select_related('room__id').filter(floor=floor)
? +++++++++++++++++++++++++++
- furnitures = Furniture.objects.filter(floor=floor)
+ furnitures = Furniture.objects.select_related('device').filter(floor=floor)
? +++++++++++++++++++++++++
radious=10
return render(request, 'common/floor.svg', {'rooms':rooms, 'furnitures':furnitures, 'radious':radious, 'edit_rooms':edit_rooms})
|
2ebb667b38b3d74003948347f411f177ca584834
|
boardinghouse/contrib/template/models.py
|
boardinghouse/contrib/template/models.py
|
from django.db import models
from django.utils import six
from boardinghouse.base import SharedSchemaMixin
from boardinghouse.schema import activate_schema, deactivate_schema
@six.python_2_unicode_compatible
class SchemaTemplate(SharedSchemaMixin, models.Model):
"""
A ``boardinghouse.contrib.template.models.SchemaTemplate`` can be used
for creating a new schema complete with some initial data.
"""
template_schema_id = models.AutoField(primary_key=True)
name = models.CharField(max_length=128, unique=True)
is_active = models.BooleanField(default=True)
description = models.TextField(null=True, blank=True)
class Meta:
default_permissions = ('add', 'change', 'delete', 'view', 'activate', 'clone')
verbose_name_plural = u'template schemata'
def __str__(self):
return self.name
@property
def schema(self):
return '__template_{}'.format(self.pk)
def activate(self):
activate_schema(self.schema)
def deactivate(self):
deactivate_schema()
|
from django.db import models
from django.utils import six
from django.utils.functional import lazy
from boardinghouse.base import SharedSchemaMixin
from boardinghouse.schema import activate_schema, deactivate_schema, get_schema_model
def verbose_name_plural():
return u'template {}'.format(get_schema_model()._meta.verbose_name_plural)
def verbose_name():
return u'template {}'.format(get_schema_model()._meta.verbose_name)
@six.python_2_unicode_compatible
class SchemaTemplate(SharedSchemaMixin, models.Model):
"""
A ``boardinghouse.contrib.template.models.SchemaTemplate`` can be used
for creating a new schema complete with some initial data.
"""
template_schema_id = models.AutoField(primary_key=True)
name = models.CharField(max_length=128, unique=True)
is_active = models.BooleanField(default=True)
description = models.TextField(null=True, blank=True)
class Meta:
default_permissions = ('add', 'change', 'delete', 'view', 'activate', 'clone')
verbose_name = lazy(verbose_name, six.text_type)()
verbose_name_plural = lazy(verbose_name_plural, six.text_type)()
def __str__(self):
return self.name
@property
def schema(self):
return '__template_{}'.format(self.pk)
def activate(self):
activate_schema(self.schema)
def deactivate(self):
deactivate_schema()
|
Use 'template ...' for the SchemaTemplate verbose_name*
|
Use 'template ...' for the SchemaTemplate verbose_name*
|
Python
|
bsd-3-clause
|
schinckel/django-boardinghouse,schinckel/django-boardinghouse,schinckel/django-boardinghouse
|
from django.db import models
from django.utils import six
+ from django.utils.functional import lazy
from boardinghouse.base import SharedSchemaMixin
- from boardinghouse.schema import activate_schema, deactivate_schema
+ from boardinghouse.schema import activate_schema, deactivate_schema, get_schema_model
+
+
+ def verbose_name_plural():
+ return u'template {}'.format(get_schema_model()._meta.verbose_name_plural)
+
+
+ def verbose_name():
+ return u'template {}'.format(get_schema_model()._meta.verbose_name)
@six.python_2_unicode_compatible
class SchemaTemplate(SharedSchemaMixin, models.Model):
"""
A ``boardinghouse.contrib.template.models.SchemaTemplate`` can be used
for creating a new schema complete with some initial data.
"""
template_schema_id = models.AutoField(primary_key=True)
name = models.CharField(max_length=128, unique=True)
is_active = models.BooleanField(default=True)
description = models.TextField(null=True, blank=True)
class Meta:
default_permissions = ('add', 'change', 'delete', 'view', 'activate', 'clone')
- verbose_name_plural = u'template schemata'
+ verbose_name = lazy(verbose_name, six.text_type)()
+ verbose_name_plural = lazy(verbose_name_plural, six.text_type)()
def __str__(self):
return self.name
@property
def schema(self):
return '__template_{}'.format(self.pk)
def activate(self):
activate_schema(self.schema)
def deactivate(self):
deactivate_schema()
|
Use 'template ...' for the SchemaTemplate verbose_name*
|
## Code Before:
from django.db import models
from django.utils import six
from boardinghouse.base import SharedSchemaMixin
from boardinghouse.schema import activate_schema, deactivate_schema
@six.python_2_unicode_compatible
class SchemaTemplate(SharedSchemaMixin, models.Model):
"""
A ``boardinghouse.contrib.template.models.SchemaTemplate`` can be used
for creating a new schema complete with some initial data.
"""
template_schema_id = models.AutoField(primary_key=True)
name = models.CharField(max_length=128, unique=True)
is_active = models.BooleanField(default=True)
description = models.TextField(null=True, blank=True)
class Meta:
default_permissions = ('add', 'change', 'delete', 'view', 'activate', 'clone')
verbose_name_plural = u'template schemata'
def __str__(self):
return self.name
@property
def schema(self):
return '__template_{}'.format(self.pk)
def activate(self):
activate_schema(self.schema)
def deactivate(self):
deactivate_schema()
## Instruction:
Use 'template ...' for the SchemaTemplate verbose_name*
## Code After:
from django.db import models
from django.utils import six
from django.utils.functional import lazy
from boardinghouse.base import SharedSchemaMixin
from boardinghouse.schema import activate_schema, deactivate_schema, get_schema_model
def verbose_name_plural():
return u'template {}'.format(get_schema_model()._meta.verbose_name_plural)
def verbose_name():
return u'template {}'.format(get_schema_model()._meta.verbose_name)
@six.python_2_unicode_compatible
class SchemaTemplate(SharedSchemaMixin, models.Model):
"""
A ``boardinghouse.contrib.template.models.SchemaTemplate`` can be used
for creating a new schema complete with some initial data.
"""
template_schema_id = models.AutoField(primary_key=True)
name = models.CharField(max_length=128, unique=True)
is_active = models.BooleanField(default=True)
description = models.TextField(null=True, blank=True)
class Meta:
default_permissions = ('add', 'change', 'delete', 'view', 'activate', 'clone')
verbose_name = lazy(verbose_name, six.text_type)()
verbose_name_plural = lazy(verbose_name_plural, six.text_type)()
def __str__(self):
return self.name
@property
def schema(self):
return '__template_{}'.format(self.pk)
def activate(self):
activate_schema(self.schema)
def deactivate(self):
deactivate_schema()
|
from django.db import models
from django.utils import six
+ from django.utils.functional import lazy
from boardinghouse.base import SharedSchemaMixin
- from boardinghouse.schema import activate_schema, deactivate_schema
+ from boardinghouse.schema import activate_schema, deactivate_schema, get_schema_model
? ++++++++++++++++++
+
+
+ def verbose_name_plural():
+ return u'template {}'.format(get_schema_model()._meta.verbose_name_plural)
+
+
+ def verbose_name():
+ return u'template {}'.format(get_schema_model()._meta.verbose_name)
@six.python_2_unicode_compatible
class SchemaTemplate(SharedSchemaMixin, models.Model):
"""
A ``boardinghouse.contrib.template.models.SchemaTemplate`` can be used
for creating a new schema complete with some initial data.
"""
template_schema_id = models.AutoField(primary_key=True)
name = models.CharField(max_length=128, unique=True)
is_active = models.BooleanField(default=True)
description = models.TextField(null=True, blank=True)
class Meta:
default_permissions = ('add', 'change', 'delete', 'view', 'activate', 'clone')
- verbose_name_plural = u'template schemata'
+ verbose_name = lazy(verbose_name, six.text_type)()
+ verbose_name_plural = lazy(verbose_name_plural, six.text_type)()
def __str__(self):
return self.name
@property
def schema(self):
return '__template_{}'.format(self.pk)
def activate(self):
activate_schema(self.schema)
def deactivate(self):
deactivate_schema()
|
e3c1819b6b5ddec1ff326c3693d48ec8a8b3a834
|
fantail/tests/__init__.py
|
fantail/tests/__init__.py
|
tests_require = [
'pytest',
'pytest-capturelog',
'pytest-cov',
]
|
tests_require = [
'coveralls',
'pytest',
'pytest-capturelog',
'pytest-cov',
]
|
Add coveralls to test requirements
|
Add coveralls to test requirements
|
Python
|
bsd-2-clause
|
sjkingo/fantail,sjkingo/fantail,sjkingo/fantail
|
tests_require = [
+ 'coveralls',
'pytest',
'pytest-capturelog',
'pytest-cov',
]
|
Add coveralls to test requirements
|
## Code Before:
tests_require = [
'pytest',
'pytest-capturelog',
'pytest-cov',
]
## Instruction:
Add coveralls to test requirements
## Code After:
tests_require = [
'coveralls',
'pytest',
'pytest-capturelog',
'pytest-cov',
]
|
tests_require = [
+ 'coveralls',
'pytest',
'pytest-capturelog',
'pytest-cov',
]
|
76e436daef154bdf6acd1b0569f6fa2baa61addd
|
pyxform/tests_v1/test_audit.py
|
pyxform/tests_v1/test_audit.py
|
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
|
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_blank_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
|
Add test for blank audit name.
|
Add test for blank audit name.
|
Python
|
bsd-2-clause
|
XLSForm/pyxform,XLSForm/pyxform
|
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
+
+ def test_audit_blank_name(self):
+ self.assertPyxformXform(
+ name="meta_audit",
+ md="""
+ | survey | | | |
+ | | type | name | label |
+ | | audit | | |
+ """,
+ xml__contains=[
+ '<meta>',
+ '<audit/>',
+ '</meta>',
+ '<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
+ )
|
Add test for blank audit name.
|
## Code Before:
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
## Instruction:
Add test for blank audit name.
## Code After:
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_blank_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
|
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
def test_audit_random_name(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | bobby | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
+
+ def test_audit_blank_name(self):
+ self.assertPyxformXform(
+ name="meta_audit",
+ md="""
+ | survey | | | |
+ | | type | name | label |
+ | | audit | | |
+ """,
+ xml__contains=[
+ '<meta>',
+ '<audit/>',
+ '</meta>',
+ '<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
+ )
|
979aada6964a5c8ef1f5c787ce84d72420626901
|
migrations/versions/36cbde703cc0_add_build_priority.py
|
migrations/versions/36cbde703cc0_add_build_priority.py
|
# revision identifiers, used by Alembic.
revision = '36cbde703cc0'
down_revision = 'fe743605e1a'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('build', sa.Column('priority', sa.Enum(), server_default='0', nullable=False))
def downgrade():
op.drop_column('build', 'priority')
|
# revision identifiers, used by Alembic.
revision = '36cbde703cc0'
down_revision = '2c6662281b66'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('build', sa.Column('priority', sa.Enum(), server_default='0', nullable=False))
def downgrade():
op.drop_column('build', 'priority')
|
Update build priority down revision
|
Update build priority down revision
2c6662281b66
|
Python
|
apache-2.0
|
dropbox/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes
|
# revision identifiers, used by Alembic.
revision = '36cbde703cc0'
- down_revision = 'fe743605e1a'
+ down_revision = '2c6662281b66'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('build', sa.Column('priority', sa.Enum(), server_default='0', nullable=False))
def downgrade():
op.drop_column('build', 'priority')
|
Update build priority down revision
|
## Code Before:
# revision identifiers, used by Alembic.
revision = '36cbde703cc0'
down_revision = 'fe743605e1a'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('build', sa.Column('priority', sa.Enum(), server_default='0', nullable=False))
def downgrade():
op.drop_column('build', 'priority')
## Instruction:
Update build priority down revision
## Code After:
# revision identifiers, used by Alembic.
revision = '36cbde703cc0'
down_revision = '2c6662281b66'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('build', sa.Column('priority', sa.Enum(), server_default='0', nullable=False))
def downgrade():
op.drop_column('build', 'priority')
|
# revision identifiers, used by Alembic.
revision = '36cbde703cc0'
- down_revision = 'fe743605e1a'
+ down_revision = '2c6662281b66'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('build', sa.Column('priority', sa.Enum(), server_default='0', nullable=False))
def downgrade():
op.drop_column('build', 'priority')
|
837a0e822905fa8c4e0dda33a03f8423b2f9cdb1
|
nova/policies/hosts.py
|
nova/policies/hosts.py
|
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-hosts'
hosts_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_API),
]
def list_rules():
return hosts_policies
|
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-hosts'
hosts_policies = [
base.create_rule_default(
BASE_POLICY_NAME,
base.RULE_ADMIN_API,
"""List, Show and Manage physical hosts.
These APIs are all deprecated in favor of os-hypervisors and os-services.""",
[
{
'method': 'GET',
'path': '/os-hosts'
},
{
'method': 'GET',
'path': '/os-hosts/{host_name}'
},
{
'method': 'PUT',
'path': '/os-hosts/{host_name}'
},
{
'method': 'GET',
'path': '/os-hosts/{host_name}/reboot'
},
{
'method': 'GET',
'path': '/os-hosts/{host_name}/shutdown'
},
{
'method': 'GET',
'path': '/os-hosts/{host_name}/startup'
}
]),
]
def list_rules():
return hosts_policies
|
Add policy description for os-host
|
Add policy description for os-host
This commit adds policy doc for os-host policies.
Partial implement blueprint policy-docs
Change-Id: Ie15125f025dbb4982ff27cfed12047e8fce3a3cf
|
Python
|
apache-2.0
|
rahulunair/nova,mahak/nova,gooddata/openstack-nova,Juniper/nova,gooddata/openstack-nova,rahulunair/nova,klmitch/nova,phenoxim/nova,phenoxim/nova,openstack/nova,openstack/nova,Juniper/nova,rahulunair/nova,mikalstill/nova,mahak/nova,vmturbo/nova,vmturbo/nova,openstack/nova,klmitch/nova,vmturbo/nova,mikalstill/nova,klmitch/nova,jianghuaw/nova,vmturbo/nova,mahak/nova,mikalstill/nova,jianghuaw/nova,jianghuaw/nova,gooddata/openstack-nova,gooddata/openstack-nova,jianghuaw/nova,Juniper/nova,Juniper/nova,klmitch/nova
|
-
- from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-hosts'
hosts_policies = [
- policy.RuleDefault(
+ base.create_rule_default(
- name=BASE_POLICY_NAME,
+ BASE_POLICY_NAME,
- check_str=base.RULE_ADMIN_API),
+ base.RULE_ADMIN_API,
+ """List, Show and Manage physical hosts.
+
+ These APIs are all deprecated in favor of os-hypervisors and os-services.""",
+ [
+ {
+ 'method': 'GET',
+ 'path': '/os-hosts'
+ },
+ {
+ 'method': 'GET',
+ 'path': '/os-hosts/{host_name}'
+ },
+ {
+ 'method': 'PUT',
+ 'path': '/os-hosts/{host_name}'
+ },
+ {
+ 'method': 'GET',
+ 'path': '/os-hosts/{host_name}/reboot'
+ },
+ {
+ 'method': 'GET',
+ 'path': '/os-hosts/{host_name}/shutdown'
+ },
+ {
+ 'method': 'GET',
+ 'path': '/os-hosts/{host_name}/startup'
+ }
+ ]),
]
def list_rules():
return hosts_policies
|
Add policy description for os-host
|
## Code Before:
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-hosts'
hosts_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_API),
]
def list_rules():
return hosts_policies
## Instruction:
Add policy description for os-host
## Code After:
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-hosts'
hosts_policies = [
base.create_rule_default(
BASE_POLICY_NAME,
base.RULE_ADMIN_API,
"""List, Show and Manage physical hosts.
These APIs are all deprecated in favor of os-hypervisors and os-services.""",
[
{
'method': 'GET',
'path': '/os-hosts'
},
{
'method': 'GET',
'path': '/os-hosts/{host_name}'
},
{
'method': 'PUT',
'path': '/os-hosts/{host_name}'
},
{
'method': 'GET',
'path': '/os-hosts/{host_name}/reboot'
},
{
'method': 'GET',
'path': '/os-hosts/{host_name}/shutdown'
},
{
'method': 'GET',
'path': '/os-hosts/{host_name}/startup'
}
]),
]
def list_rules():
return hosts_policies
|
-
- from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-hosts'
hosts_policies = [
- policy.RuleDefault(
+ base.create_rule_default(
- name=BASE_POLICY_NAME,
? -----
+ BASE_POLICY_NAME,
- check_str=base.RULE_ADMIN_API),
? ---------- -
+ base.RULE_ADMIN_API,
+ """List, Show and Manage physical hosts.
+
+ These APIs are all deprecated in favor of os-hypervisors and os-services.""",
+ [
+ {
+ 'method': 'GET',
+ 'path': '/os-hosts'
+ },
+ {
+ 'method': 'GET',
+ 'path': '/os-hosts/{host_name}'
+ },
+ {
+ 'method': 'PUT',
+ 'path': '/os-hosts/{host_name}'
+ },
+ {
+ 'method': 'GET',
+ 'path': '/os-hosts/{host_name}/reboot'
+ },
+ {
+ 'method': 'GET',
+ 'path': '/os-hosts/{host_name}/shutdown'
+ },
+ {
+ 'method': 'GET',
+ 'path': '/os-hosts/{host_name}/startup'
+ }
+ ]),
]
def list_rules():
return hosts_policies
|
7ed3a8452de8d75a09d2ee2265d7fa32b4a25c7c
|
pelicanconf.py
|
pelicanconf.py
|
from __future__ import unicode_literals
AUTHOR = 'Joao Moreira'
SITENAME = 'Joao Moreira'
SITEURL = ''
BIO = 'lorem ipsum doler umpalum paluuu'
PROFILE_IMAGE = "avatar.jpg"
PATH = 'content'
TIMEZONE = 'America/Chicago'
DEFAULT_LANG = 'en'
DEFAULT_DATE_FORMAT = '%B %-d, %Y'
THEME = "pelican-hyde"
DISPLAY_PAGES_ON_MENU = True
LOAD_CONTENT_CACHE = False
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Social widget
SOCIAL = (('github-square', 'https://github.com/jagmoreira'),
('linkedin', 'https://www.linkedin.com/in/joao-moreira'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
YEAR_ARCHIVE_SAVE_AS = 'posts/{date:%Y}/index.html'
|
from __future__ import unicode_literals
AUTHOR = 'Joao Moreira'
SITENAME = 'Joao Moreira'
SITEURL = ''
BIO = 'PhD student. Data scientist. Iron Man fan.'
PROFILE_IMAGE = "avatar.jpg"
PATH = 'content'
STATIC_PATHS = ['images', 'extra/CNAME']
EXTRA_PATH_METADATA = {'extra/CNAME': {'path': 'CNAME'},}
TIMEZONE = 'America/Chicago'
DEFAULT_LANG = 'en'
DEFAULT_DATE_FORMAT = '%B %-d, %Y'
THEME = "pelican-hyde"
DISPLAY_PAGES_ON_MENU = True
LOAD_CONTENT_CACHE = False
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Social widget
SOCIAL = (('github-square', 'https://github.com/jagmoreira'),
('linkedin', 'https://www.linkedin.com/in/joao-moreira'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
YEAR_ARCHIVE_SAVE_AS = 'posts/{date:%Y}/index.html'
|
Add publication date on github publish
|
Add publication date on github publish
|
Python
|
mit
|
jagmoreira/jagmoreira.github.io,jagmoreira/jagmoreira.github.io
|
from __future__ import unicode_literals
AUTHOR = 'Joao Moreira'
SITENAME = 'Joao Moreira'
SITEURL = ''
- BIO = 'lorem ipsum doler umpalum paluuu'
+ BIO = 'PhD student. Data scientist. Iron Man fan.'
PROFILE_IMAGE = "avatar.jpg"
PATH = 'content'
+ STATIC_PATHS = ['images', 'extra/CNAME']
+ EXTRA_PATH_METADATA = {'extra/CNAME': {'path': 'CNAME'},}
TIMEZONE = 'America/Chicago'
DEFAULT_LANG = 'en'
DEFAULT_DATE_FORMAT = '%B %-d, %Y'
THEME = "pelican-hyde"
DISPLAY_PAGES_ON_MENU = True
LOAD_CONTENT_CACHE = False
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Social widget
SOCIAL = (('github-square', 'https://github.com/jagmoreira'),
('linkedin', 'https://www.linkedin.com/in/joao-moreira'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
YEAR_ARCHIVE_SAVE_AS = 'posts/{date:%Y}/index.html'
|
Add publication date on github publish
|
## Code Before:
from __future__ import unicode_literals
AUTHOR = 'Joao Moreira'
SITENAME = 'Joao Moreira'
SITEURL = ''
BIO = 'lorem ipsum doler umpalum paluuu'
PROFILE_IMAGE = "avatar.jpg"
PATH = 'content'
TIMEZONE = 'America/Chicago'
DEFAULT_LANG = 'en'
DEFAULT_DATE_FORMAT = '%B %-d, %Y'
THEME = "pelican-hyde"
DISPLAY_PAGES_ON_MENU = True
LOAD_CONTENT_CACHE = False
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Social widget
SOCIAL = (('github-square', 'https://github.com/jagmoreira'),
('linkedin', 'https://www.linkedin.com/in/joao-moreira'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
YEAR_ARCHIVE_SAVE_AS = 'posts/{date:%Y}/index.html'
## Instruction:
Add publication date on github publish
## Code After:
from __future__ import unicode_literals
AUTHOR = 'Joao Moreira'
SITENAME = 'Joao Moreira'
SITEURL = ''
BIO = 'PhD student. Data scientist. Iron Man fan.'
PROFILE_IMAGE = "avatar.jpg"
PATH = 'content'
STATIC_PATHS = ['images', 'extra/CNAME']
EXTRA_PATH_METADATA = {'extra/CNAME': {'path': 'CNAME'},}
TIMEZONE = 'America/Chicago'
DEFAULT_LANG = 'en'
DEFAULT_DATE_FORMAT = '%B %-d, %Y'
THEME = "pelican-hyde"
DISPLAY_PAGES_ON_MENU = True
LOAD_CONTENT_CACHE = False
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Social widget
SOCIAL = (('github-square', 'https://github.com/jagmoreira'),
('linkedin', 'https://www.linkedin.com/in/joao-moreira'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
YEAR_ARCHIVE_SAVE_AS = 'posts/{date:%Y}/index.html'
|
from __future__ import unicode_literals
AUTHOR = 'Joao Moreira'
SITENAME = 'Joao Moreira'
SITEURL = ''
- BIO = 'lorem ipsum doler umpalum paluuu'
+ BIO = 'PhD student. Data scientist. Iron Man fan.'
PROFILE_IMAGE = "avatar.jpg"
PATH = 'content'
+ STATIC_PATHS = ['images', 'extra/CNAME']
+ EXTRA_PATH_METADATA = {'extra/CNAME': {'path': 'CNAME'},}
TIMEZONE = 'America/Chicago'
DEFAULT_LANG = 'en'
DEFAULT_DATE_FORMAT = '%B %-d, %Y'
THEME = "pelican-hyde"
DISPLAY_PAGES_ON_MENU = True
LOAD_CONTENT_CACHE = False
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Social widget
SOCIAL = (('github-square', 'https://github.com/jagmoreira'),
('linkedin', 'https://www.linkedin.com/in/joao-moreira'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
YEAR_ARCHIVE_SAVE_AS = 'posts/{date:%Y}/index.html'
|
94124a65b5aa540f9f997dfcdbd856207d011555
|
wafer/conf_registration/models.py
|
wafer/conf_registration/models.py
|
from django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
ConferenceOptionGroup, related_name='members')
requirements = models.ManyToManyField(
ConferenceOptionGroup, related_name='enables')
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
|
from django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
def __unicode__(self):
return u'%s' % self.name
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
ConferenceOptionGroup, related_name='members',
help_text='Groups this option belongs to.')
requirements = models.ManyToManyField(
ConferenceOptionGroup, related_name='enables',
help_text='Option groups that this relies on',
blank=True)
def __unicode__(self):
return u'%s (%.2f)' % (self.name, self.price)
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
|
Make requirements optional. Add help text and fix display in admin form
|
Make requirements optional. Add help text and fix display in admin form
|
Python
|
isc
|
CarlFK/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer
|
from django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
+ def __unicode__(self):
+ return u'%s' % self.name
+
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
- ConferenceOptionGroup, related_name='members')
+ ConferenceOptionGroup, related_name='members',
+ help_text='Groups this option belongs to.')
requirements = models.ManyToManyField(
- ConferenceOptionGroup, related_name='enables')
+ ConferenceOptionGroup, related_name='enables',
+ help_text='Option groups that this relies on',
+ blank=True)
+
+ def __unicode__(self):
+ return u'%s (%.2f)' % (self.name, self.price)
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
|
Make requirements optional. Add help text and fix display in admin form
|
## Code Before:
from django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
ConferenceOptionGroup, related_name='members')
requirements = models.ManyToManyField(
ConferenceOptionGroup, related_name='enables')
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
## Instruction:
Make requirements optional. Add help text and fix display in admin form
## Code After:
from django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
def __unicode__(self):
return u'%s' % self.name
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
ConferenceOptionGroup, related_name='members',
help_text='Groups this option belongs to.')
requirements = models.ManyToManyField(
ConferenceOptionGroup, related_name='enables',
help_text='Option groups that this relies on',
blank=True)
def __unicode__(self):
return u'%s (%.2f)' % (self.name, self.price)
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
|
from django.contrib.auth.models import User
from django.db import models
class ConferenceOptionGroup(models.Model):
"""Used to manage relationships"""
name = models.CharField(max_length=255)
+ def __unicode__(self):
+ return u'%s' % self.name
+
class ConferenceOption(models.Model):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=12, decimal_places=2)
groups = models.ManyToManyField(
- ConferenceOptionGroup, related_name='members')
? ^
+ ConferenceOptionGroup, related_name='members',
? ^
+ help_text='Groups this option belongs to.')
requirements = models.ManyToManyField(
- ConferenceOptionGroup, related_name='enables')
? ^
+ ConferenceOptionGroup, related_name='enables',
? ^
+ help_text='Option groups that this relies on',
+ blank=True)
+
+ def __unicode__(self):
+ return u'%s (%.2f)' % (self.name, self.price)
class RegisteredAttendee(models.Model):
name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
items = models.ManyToManyField(
ConferenceOption, related_name='attendees')
registered_by = models.ForeignKey(
User, related_name='registerations')
|
d21547637222d6bb2c3c9d03eae771d033ec47f4
|
lms/djangoapps/api_manager/management/commands/migrate_orgdata.py
|
lms/djangoapps/api_manager/management/commands/migrate_orgdata.py
|
import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
migrated_org = Organization.objects.create(
name=data['name'],
display_name=data['display_name'],
contact_name=data['contact_name'],
contact_email=data['contact_email'],
contact_phone=data['contact_phone']
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
|
import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
name = org.name
display_name = data.get('display_name', name)
contact_name = data.get('contact_name', None)
contact_email = data.get('email', None)
if contact_email is None:
contact_email = data.get('contact_email', None)
contact_phone = data.get('phone', None)
if contact_phone is None:
contact_phone = data.get('contact_phone', None)
migrated_org = Organization.objects.create(
name=name,
display_name=display_name,
contact_name=contact_name,
contact_email=contact_email,
contact_phone=contact_phone
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
|
Tweak to migration in order to accomodate old names for data fields and allow for if data fields were not present
|
Tweak to migration in order to accomodate old names for data fields and allow for if data fields were not present
|
Python
|
agpl-3.0
|
edx-solutions/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform,edx-solutions/edx-platform
|
import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
+
+ name = org.name
+ display_name = data.get('display_name', name)
+ contact_name = data.get('contact_name', None)
+ contact_email = data.get('email', None)
+ if contact_email is None:
+ contact_email = data.get('contact_email', None)
+ contact_phone = data.get('phone', None)
+ if contact_phone is None:
+ contact_phone = data.get('contact_phone', None)
+
migrated_org = Organization.objects.create(
- name=data['name'],
+ name=name,
- display_name=data['display_name'],
+ display_name=display_name,
- contact_name=data['contact_name'],
+ contact_name=contact_name,
- contact_email=data['contact_email'],
+ contact_email=contact_email,
- contact_phone=data['contact_phone']
+ contact_phone=contact_phone
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
|
Tweak to migration in order to accomodate old names for data fields and allow for if data fields were not present
|
## Code Before:
import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
migrated_org = Organization.objects.create(
name=data['name'],
display_name=data['display_name'],
contact_name=data['contact_name'],
contact_email=data['contact_email'],
contact_phone=data['contact_phone']
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
## Instruction:
Tweak to migration in order to accomodate old names for data fields and allow for if data fields were not present
## Code After:
import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
name = org.name
display_name = data.get('display_name', name)
contact_name = data.get('contact_name', None)
contact_email = data.get('email', None)
if contact_email is None:
contact_email = data.get('contact_email', None)
contact_phone = data.get('phone', None)
if contact_phone is None:
contact_phone = data.get('contact_phone', None)
migrated_org = Organization.objects.create(
name=name,
display_name=display_name,
contact_name=contact_name,
contact_email=contact_email,
contact_phone=contact_phone
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
|
import json
from django.contrib.auth.models import Group
from django.core.management.base import BaseCommand
from api_manager.models import GroupProfile, Organization
class Command(BaseCommand):
"""
Migrates legacy organization data and user relationships from older Group model approach to newer concrete Organization model
"""
def handle(self, *args, **options):
org_groups = GroupProfile.objects.filter(group_type='organization')
for org in org_groups:
data = json.loads(org.data)
+
+ name = org.name
+ display_name = data.get('display_name', name)
+ contact_name = data.get('contact_name', None)
+ contact_email = data.get('email', None)
+ if contact_email is None:
+ contact_email = data.get('contact_email', None)
+ contact_phone = data.get('phone', None)
+ if contact_phone is None:
+ contact_phone = data.get('contact_phone', None)
+
migrated_org = Organization.objects.create(
- name=data['name'],
? ------ --
+ name=name,
- display_name=data['display_name'],
? ------ --
+ display_name=display_name,
- contact_name=data['contact_name'],
? ------ --
+ contact_name=contact_name,
- contact_email=data['contact_email'],
? ------ --
+ contact_email=contact_email,
- contact_phone=data['contact_phone']
? ------ --
+ contact_phone=contact_phone
)
group = Group.objects.get(groupprofile=org.id)
users = group.user_set.all()
for user in users:
migrated_org.users.add(user)
linked_groups = group.grouprelationship.get_linked_group_relationships()
for linked_group in linked_groups:
if linked_group.to_group_relationship_id is not org.id: # Don't need to carry the symmetrical component
actual_group = Group.objects.get(id=linked_group.to_group_relationship_id)
migrated_org.groups.add(actual_group)
|
0bcecfdf33f42f85bb9a8e32e79686a41fb5226a
|
django_validator/exceptions.py
|
django_validator/exceptions.py
|
from rest_framework import status
import rest_framework.exceptions
class ValidationError(rest_framework.exceptions.ValidationError):
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
self.status_code = status_code
self.code = code
|
from rest_framework import status
import rest_framework.exceptions
class ValidationError(rest_framework.exceptions.APIException):
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
self.code = code
self.status_code = status_code
|
Fix Validation import error in older DRF.
|
Fix Validation import error in older DRF.
|
Python
|
mit
|
romain-li/django-validator,romain-li/django-validator
|
from rest_framework import status
import rest_framework.exceptions
- class ValidationError(rest_framework.exceptions.ValidationError):
+ class ValidationError(rest_framework.exceptions.APIException):
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
+ self.code = code
self.status_code = status_code
- self.code = code
|
Fix Validation import error in older DRF.
|
## Code Before:
from rest_framework import status
import rest_framework.exceptions
class ValidationError(rest_framework.exceptions.ValidationError):
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
self.status_code = status_code
self.code = code
## Instruction:
Fix Validation import error in older DRF.
## Code After:
from rest_framework import status
import rest_framework.exceptions
class ValidationError(rest_framework.exceptions.APIException):
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
self.code = code
self.status_code = status_code
|
from rest_framework import status
import rest_framework.exceptions
- class ValidationError(rest_framework.exceptions.ValidationError):
? ^^^^^^ -----
+ class ValidationError(rest_framework.exceptions.APIException):
? ^^^^^^^^
code = ''
def __init__(self, detail, code=None, status_code=status.HTTP_400_BAD_REQUEST):
super(ValidationError, self).__init__(detail)
+ self.code = code
self.status_code = status_code
- self.code = code
|
c4009fdedc1625fe3692c689242d9f32a1c89f97
|
tests/services/conftest.py
|
tests/services/conftest.py
|
import pytest
from responses import RequestsMock
from netvisor import Netvisor
@pytest.fixture
def netvisor():
kwargs = dict(
sender='Test client',
partner_id='xxx_yyy',
partner_key='E2CEBB1966C7016730C70CA92CBB93DD',
customer_id='xx_yyyy_zz',
customer_key='7767899D6F5FB333784A2520771E5871',
organization_id='1967543-8',
language='EN'
)
return Netvisor(host='http://koulutus.netvisor.fi', **kwargs)
@pytest.yield_fixture(autouse=True)
def responses():
requests_mock = RequestsMock()
requests_mock._start()
yield requests_mock
requests_mock._stop()
requests_mock.reset()
|
import pytest
from responses import RequestsMock
from netvisor import Netvisor
@pytest.fixture
def netvisor():
kwargs = dict(
sender='Test client',
partner_id='xxx_yyy',
partner_key='E2CEBB1966C7016730C70CA92CBB93DD',
customer_id='xx_yyyy_zz',
customer_key='7767899D6F5FB333784A2520771E5871',
organization_id='1967543-8',
language='EN'
)
return Netvisor(host='http://koulutus.netvisor.fi', **kwargs)
@pytest.yield_fixture(autouse=True)
def responses():
r = RequestsMock()
with r:
yield r
|
Fix tests to work with responses 0.3.0
|
Fix tests to work with responses 0.3.0
|
Python
|
mit
|
fastmonkeys/netvisor.py
|
import pytest
from responses import RequestsMock
from netvisor import Netvisor
@pytest.fixture
def netvisor():
kwargs = dict(
sender='Test client',
partner_id='xxx_yyy',
partner_key='E2CEBB1966C7016730C70CA92CBB93DD',
customer_id='xx_yyyy_zz',
customer_key='7767899D6F5FB333784A2520771E5871',
organization_id='1967543-8',
language='EN'
)
return Netvisor(host='http://koulutus.netvisor.fi', **kwargs)
@pytest.yield_fixture(autouse=True)
def responses():
- requests_mock = RequestsMock()
+ r = RequestsMock()
+ with r:
+ yield r
- requests_mock._start()
- yield requests_mock
- requests_mock._stop()
- requests_mock.reset()
|
Fix tests to work with responses 0.3.0
|
## Code Before:
import pytest
from responses import RequestsMock
from netvisor import Netvisor
@pytest.fixture
def netvisor():
kwargs = dict(
sender='Test client',
partner_id='xxx_yyy',
partner_key='E2CEBB1966C7016730C70CA92CBB93DD',
customer_id='xx_yyyy_zz',
customer_key='7767899D6F5FB333784A2520771E5871',
organization_id='1967543-8',
language='EN'
)
return Netvisor(host='http://koulutus.netvisor.fi', **kwargs)
@pytest.yield_fixture(autouse=True)
def responses():
requests_mock = RequestsMock()
requests_mock._start()
yield requests_mock
requests_mock._stop()
requests_mock.reset()
## Instruction:
Fix tests to work with responses 0.3.0
## Code After:
import pytest
from responses import RequestsMock
from netvisor import Netvisor
@pytest.fixture
def netvisor():
kwargs = dict(
sender='Test client',
partner_id='xxx_yyy',
partner_key='E2CEBB1966C7016730C70CA92CBB93DD',
customer_id='xx_yyyy_zz',
customer_key='7767899D6F5FB333784A2520771E5871',
organization_id='1967543-8',
language='EN'
)
return Netvisor(host='http://koulutus.netvisor.fi', **kwargs)
@pytest.yield_fixture(autouse=True)
def responses():
r = RequestsMock()
with r:
yield r
|
import pytest
from responses import RequestsMock
from netvisor import Netvisor
@pytest.fixture
def netvisor():
kwargs = dict(
sender='Test client',
partner_id='xxx_yyy',
partner_key='E2CEBB1966C7016730C70CA92CBB93DD',
customer_id='xx_yyyy_zz',
customer_key='7767899D6F5FB333784A2520771E5871',
organization_id='1967543-8',
language='EN'
)
return Netvisor(host='http://koulutus.netvisor.fi', **kwargs)
@pytest.yield_fixture(autouse=True)
def responses():
- requests_mock = RequestsMock()
? ------------
+ r = RequestsMock()
+ with r:
+ yield r
- requests_mock._start()
- yield requests_mock
- requests_mock._stop()
- requests_mock.reset()
|
9a43f573f2072051c64fc6da432aaad5d31e0023
|
PyMarkdownGen/test/block_code_test.py
|
PyMarkdownGen/test/block_code_test.py
|
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
def test_block_quote(self):
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
|
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
"""The test case (fixture) for testing block quotes."""
def test_block_quote(self):
"""Tests block quotes that contains a '>'
on every line.
"""
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
"""Tests block quotes that contain a '>'
only on the first line.
"""
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
|
Add docstrings for tests of block qotes
|
Add docstrings for tests of block qotes
|
Python
|
epl-1.0
|
LukasWoodtli/PyMarkdownGen
|
-
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
-
+ """The test case (fixture) for testing block quotes."""
def test_block_quote(self):
+ """Tests block quotes that contains a '>'
+ on every line.
+
+ """
+
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
+ """Tests block quotes that contain a '>'
+ only on the first line.
+
+ """
+
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
|
Add docstrings for tests of block qotes
|
## Code Before:
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
def test_block_quote(self):
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
## Instruction:
Add docstrings for tests of block qotes
## Code After:
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
"""The test case (fixture) for testing block quotes."""
def test_block_quote(self):
"""Tests block quotes that contains a '>'
on every line.
"""
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
"""Tests block quotes that contain a '>'
only on the first line.
"""
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
|
-
import unittest
import PyMarkdownGen.PyMarkdownGen as md
class BlockquoteTests(unittest.TestCase):
-
+ """The test case (fixture) for testing block quotes."""
def test_block_quote(self):
+ """Tests block quotes that contains a '>'
+ on every line.
+
+ """
+
expected = \
"""> this is a
> block quote
> on multiple
> lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a\nblock quote\n"
"on multiple\r\nlines."))
def test_block_quote_simple(self):
+ """Tests block quotes that contain a '>'
+ only on the first line.
+
+ """
+
expected = \
"""> this is a simple
block quote
on multiple
lines.
"""
self.assertEqual(expected,
md.gen_block_quote(
"this is a simple\nblock quote\n"
"on multiple\nlines.", True))
if __name__ == '__main__':
unittest.main()
|
184e726d44d113a46ddb9cf3a5762f453ed7b512
|
myuw/management/commands/clear_expired_sessions.py
|
myuw/management/commands/clear_expired_sessions.py
|
import logging
from datetime import timedelta
from django.core.management.base import BaseCommand, CommandError
from django.contrib.sessions.models import Session
from django.utils import timezone
from myuw.logger.timer import Timer
logger = logging.getLogger(__name__)
begin_delta = 1920
log_format = "Deleted django sessions expired before {}, Time={} seconds"
class Command(BaseCommand):
def handle(self, *args, **options):
now = timezone.now()
for ddelta in range(begin_delta, 0, -1):
timer = Timer()
cut_off_dt = now - timedelta(days=ddelta)
try:
qset = Session.objects.filter(expire_date__lt=cut_off_dt)
if qset.exists():
qset.delete()
logger.info(log_format.format(cut_off_dt.date(),
timer.get_elapsed()))
except Exception as ex:
logger.error(str(ex))
|
import logging
from datetime import timedelta
import time
from django.core.management.base import BaseCommand, CommandError
from django.contrib.sessions.models import Session
from django.utils import timezone
from myuw.logger.timer import Timer
logger = logging.getLogger(__name__)
begin_delta = 1920
log_format = "Deleted django sessions expired before {}, Time={} seconds"
class Command(BaseCommand):
def handle(self, *args, **options):
now = timezone.now()
for ddelta in range(begin_delta, 0, -1):
timer = Timer()
cut_off_dt = now - timedelta(days=ddelta)
try:
qset = Session.objects.filter(expire_date__lt=cut_off_dt)
if qset.exists():
qset.delete()
logger.info(log_format.format(cut_off_dt.date(),
timer.get_elapsed()))
time.sleep(5)
except Exception as ex:
logger.error(str(ex))
|
Add a 5 second pause
|
Add a 5 second pause
|
Python
|
apache-2.0
|
uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw
|
import logging
from datetime import timedelta
+ import time
from django.core.management.base import BaseCommand, CommandError
from django.contrib.sessions.models import Session
from django.utils import timezone
from myuw.logger.timer import Timer
logger = logging.getLogger(__name__)
begin_delta = 1920
log_format = "Deleted django sessions expired before {}, Time={} seconds"
class Command(BaseCommand):
def handle(self, *args, **options):
now = timezone.now()
for ddelta in range(begin_delta, 0, -1):
timer = Timer()
cut_off_dt = now - timedelta(days=ddelta)
try:
qset = Session.objects.filter(expire_date__lt=cut_off_dt)
if qset.exists():
qset.delete()
logger.info(log_format.format(cut_off_dt.date(),
timer.get_elapsed()))
-
+ time.sleep(5)
except Exception as ex:
logger.error(str(ex))
|
Add a 5 second pause
|
## Code Before:
import logging
from datetime import timedelta
from django.core.management.base import BaseCommand, CommandError
from django.contrib.sessions.models import Session
from django.utils import timezone
from myuw.logger.timer import Timer
logger = logging.getLogger(__name__)
begin_delta = 1920
log_format = "Deleted django sessions expired before {}, Time={} seconds"
class Command(BaseCommand):
def handle(self, *args, **options):
now = timezone.now()
for ddelta in range(begin_delta, 0, -1):
timer = Timer()
cut_off_dt = now - timedelta(days=ddelta)
try:
qset = Session.objects.filter(expire_date__lt=cut_off_dt)
if qset.exists():
qset.delete()
logger.info(log_format.format(cut_off_dt.date(),
timer.get_elapsed()))
except Exception as ex:
logger.error(str(ex))
## Instruction:
Add a 5 second pause
## Code After:
import logging
from datetime import timedelta
import time
from django.core.management.base import BaseCommand, CommandError
from django.contrib.sessions.models import Session
from django.utils import timezone
from myuw.logger.timer import Timer
logger = logging.getLogger(__name__)
begin_delta = 1920
log_format = "Deleted django sessions expired before {}, Time={} seconds"
class Command(BaseCommand):
def handle(self, *args, **options):
now = timezone.now()
for ddelta in range(begin_delta, 0, -1):
timer = Timer()
cut_off_dt = now - timedelta(days=ddelta)
try:
qset = Session.objects.filter(expire_date__lt=cut_off_dt)
if qset.exists():
qset.delete()
logger.info(log_format.format(cut_off_dt.date(),
timer.get_elapsed()))
time.sleep(5)
except Exception as ex:
logger.error(str(ex))
|
import logging
from datetime import timedelta
+ import time
from django.core.management.base import BaseCommand, CommandError
from django.contrib.sessions.models import Session
from django.utils import timezone
from myuw.logger.timer import Timer
logger = logging.getLogger(__name__)
begin_delta = 1920
log_format = "Deleted django sessions expired before {}, Time={} seconds"
class Command(BaseCommand):
def handle(self, *args, **options):
now = timezone.now()
for ddelta in range(begin_delta, 0, -1):
timer = Timer()
cut_off_dt = now - timedelta(days=ddelta)
try:
qset = Session.objects.filter(expire_date__lt=cut_off_dt)
if qset.exists():
qset.delete()
logger.info(log_format.format(cut_off_dt.date(),
timer.get_elapsed()))
-
+ time.sleep(5)
except Exception as ex:
logger.error(str(ex))
|
3a2fc6b73aec538802adff2bd95261ffc56ca475
|
rover.py
|
rover.py
|
class Rover:
compass = ['N', 'E', 'S', 'W']
def __init__(self, x=0, y=0, direction='N'):
self.x = x
self.y = y
self.direction = direction
@property
def position(self):
return self.x, self.y, self.direction
@property
def compass_index(self):
return next(i for i in range(0, len(self.compass)) if self.compass[i] == self.direction)
@property
def axis(self):
# 0 if pointing along x axis
# 1 if pointing along y axis
return (self.compass_index + 1) % 2
def set_position(self, x=self.x, y=self.y, direction=self.direction):
self.x = x
self.y = y
self.direction = direction
def move(*args):
for command in args:
if command == 'F':
# Move forward command
pass
else:
pass
|
class Rover:
compass = ['N', 'E', 'S', 'W']
def __init__(self, x=0, y=0, direction='N'):
self.x = x
self.y = y
self.direction = direction
@property
def position(self):
return self.x, self.y, self.direction
@property
def compass_index(self):
return next(i for i in range(0, len(self.compass)) if self.compass[i] == self.direction)
@property
def axis(self):
# 0 if pointing along x axis
# 1 if pointing along y axis
return (self.compass_index + 1) % 2
def set_position(self, x=None, y=None, direction=None):
if x is not None:
self.x = x
if y is not None:
self.y = y
if direction is not None:
self.direction = direction
def move(self, *args):
for command in args:
if command == 'F':
# Move forward command
if self.compass_index < 2:
# Upper right quadrant, increasing x/y
pass
else:
pass
|
Add default arguments to set_position, add self argument to move function
|
Add default arguments to set_position, add self argument to move function
|
Python
|
mit
|
authentik8/rover
|
class Rover:
compass = ['N', 'E', 'S', 'W']
def __init__(self, x=0, y=0, direction='N'):
self.x = x
self.y = y
self.direction = direction
@property
def position(self):
return self.x, self.y, self.direction
@property
def compass_index(self):
return next(i for i in range(0, len(self.compass)) if self.compass[i] == self.direction)
@property
def axis(self):
# 0 if pointing along x axis
# 1 if pointing along y axis
return (self.compass_index + 1) % 2
- def set_position(self, x=self.x, y=self.y, direction=self.direction):
+ def set_position(self, x=None, y=None, direction=None):
+ if x is not None:
- self.x = x
+ self.x = x
- self.y = y
- self.direction = direction
+ if y is not None:
+ self.y = y
+
+ if direction is not None:
+ self.direction = direction
+
- def move(*args):
+ def move(self, *args):
for command in args:
if command == 'F':
# Move forward command
+ if self.compass_index < 2:
+ # Upper right quadrant, increasing x/y
- pass
+ pass
else:
pass
|
Add default arguments to set_position, add self argument to move function
|
## Code Before:
class Rover:
compass = ['N', 'E', 'S', 'W']
def __init__(self, x=0, y=0, direction='N'):
self.x = x
self.y = y
self.direction = direction
@property
def position(self):
return self.x, self.y, self.direction
@property
def compass_index(self):
return next(i for i in range(0, len(self.compass)) if self.compass[i] == self.direction)
@property
def axis(self):
# 0 if pointing along x axis
# 1 if pointing along y axis
return (self.compass_index + 1) % 2
def set_position(self, x=self.x, y=self.y, direction=self.direction):
self.x = x
self.y = y
self.direction = direction
def move(*args):
for command in args:
if command == 'F':
# Move forward command
pass
else:
pass
## Instruction:
Add default arguments to set_position, add self argument to move function
## Code After:
class Rover:
compass = ['N', 'E', 'S', 'W']
def __init__(self, x=0, y=0, direction='N'):
self.x = x
self.y = y
self.direction = direction
@property
def position(self):
return self.x, self.y, self.direction
@property
def compass_index(self):
return next(i for i in range(0, len(self.compass)) if self.compass[i] == self.direction)
@property
def axis(self):
# 0 if pointing along x axis
# 1 if pointing along y axis
return (self.compass_index + 1) % 2
def set_position(self, x=None, y=None, direction=None):
if x is not None:
self.x = x
if y is not None:
self.y = y
if direction is not None:
self.direction = direction
def move(self, *args):
for command in args:
if command == 'F':
# Move forward command
if self.compass_index < 2:
# Upper right quadrant, increasing x/y
pass
else:
pass
|
class Rover:
compass = ['N', 'E', 'S', 'W']
def __init__(self, x=0, y=0, direction='N'):
self.x = x
self.y = y
self.direction = direction
@property
def position(self):
return self.x, self.y, self.direction
@property
def compass_index(self):
return next(i for i in range(0, len(self.compass)) if self.compass[i] == self.direction)
@property
def axis(self):
# 0 if pointing along x axis
# 1 if pointing along y axis
return (self.compass_index + 1) % 2
- def set_position(self, x=self.x, y=self.y, direction=self.direction):
? ^ ---- ^ ---- ^^^^^^^^^^^^
+ def set_position(self, x=None, y=None, direction=None):
? ^^^ ^^^ ^ +
+ if x is not None:
- self.x = x
+ self.x = x
? ++++++
- self.y = y
- self.direction = direction
+ if y is not None:
+ self.y = y
+
+ if direction is not None:
+ self.direction = direction
+
- def move(*args):
+ def move(self, *args):
? ++++++
for command in args:
if command == 'F':
# Move forward command
+ if self.compass_index < 2:
+ # Upper right quadrant, increasing x/y
- pass
+ pass
? ++++
else:
pass
|
68c768634503d359fac23869e20931f0b39897dc
|
fulfil_client/contrib/mocking.py
|
fulfil_client/contrib/mocking.py
|
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
|
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
context = {}
subdomain = 'mock-test'
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
self.context = {}
|
Add subdomain and context to mock
|
Add subdomain and context to mock
|
Python
|
isc
|
sharoonthomas/fulfil-python-api,fulfilio/fulfil-python-api
|
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
+ context = {}
+ subdomain = 'mock-test'
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
+ self.context = {}
|
Add subdomain and context to mock
|
## Code Before:
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
## Instruction:
Add subdomain and context to mock
## Code After:
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
context = {}
subdomain = 'mock-test'
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
self.context = {}
|
try:
from unittest import mock
except ImportError:
import mock
class MockFulfil(object):
"""
A Mock object that helps mock away the Fulfil API
for testing.
"""
responses = []
models = {}
+ context = {}
+ subdomain = 'mock-test'
def __init__(self, target, responses=None):
self.target = target
self.reset_mocks()
if responses:
self.responses.extend(responses)
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
self.stop()
self.reset_mocks()
return type is None
def model(self, model_name):
return self.models.setdefault(
model_name, mock.MagicMock(name=model_name)
)
def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
)
def stop(self):
"""
End the patch
"""
self._patcher.stop()
def reset_mocks(self):
"""
Reset all the mocks
"""
self.models = {}
+ self.context = {}
|
99a1ce2ecee6dd761113515da5c89b8c7da5537f
|
Python/Algorithm.py
|
Python/Algorithm.py
|
class Algorithm:
"""
Algorithm Class
Base class for the page substitution algorithms.
"""
def __init__(self, input = []):
"""
Algorithm Constructor.
Parameters
----------
input : list
A list containing the the input page swap.
"""
if not input: #If the list is empty throw an exception.
raise ValueError("The list must not be empty") #throws the exception.
self.blocks = input[0] #Store the page frames size.
self.pages = input[1:] #Store the pages to swap.
self.missingPages = self.blocks #Count the lack of pages.
def removeChuncks(self, list, start, stop):
"""
Remove a piece of a list.
Parameters
----------
list : list
The list to delete the elements
start : int
start point.
stop : int
stop point.
"""
del list[start:stop] #Delete range
def preparePageFrame(self):
"""
Prepare the page frames.
Returns
-------
list
The list with initialized Page frames
"""
pageFrame = [self.pages[x] for x in range(0, self.blocks)] #Create the page frame with elements passed by the user.
self.removeChuncks(self.pages, 0, self.blocks) #Remove part of the list that is on the page frame
return pageFrame #Return the list
|
class Algorithm:
"""
Algorithm Class
Base class for the page substitution algorithms.
"""
def __init__(self, input = []):
"""
Algorithm Constructor.
Parameters
----------
input : list
A list containing the the input page swap.
"""
if not input: #If the list is empty throw an exception.
raise ValueError("The list must not be empty") #throws the exception.
self.blocks = input[0] #Store the page frames size.
self.pages = input[1:] #Store the pages to swap.
self.missingPages = self.blocks #Count the lack of pages.
def removeChuncks(self, list, start, stop):
"""
Remove a piece of a list.
Parameters
----------
list : list
The list to delete the elements
start : int
start point.
stop : int
stop point.
"""
del list[start:stop] #Delete range
def preparePageFrame(self):
"""
Prepare the page frames.
Returns
-------
list
The list with initialized Page frames
"""
pageFrame = [None] * self.blocks #Create the page frame with elements passed by the user.
iterator = 0
for i in range(0, len(self.pages)):
if self.pages[i] not in pageFrame:
pageFrame[iterator] = self.pages[i]
iterator = iterator + 1
if iterator == self.blocks:
self.removeChuncks(self.pages, 0, i) #Remove part of the list that is on the page frame
break
return pageFrame #Return the list
|
Fix logic error under preparePageFrames
|
Fix logic error under preparePageFrames
|
Python
|
mit
|
caiomcg/OS-PageSubstitution
|
class Algorithm:
"""
Algorithm Class
Base class for the page substitution algorithms.
"""
def __init__(self, input = []):
"""
Algorithm Constructor.
Parameters
----------
input : list
A list containing the the input page swap.
"""
if not input: #If the list is empty throw an exception.
raise ValueError("The list must not be empty") #throws the exception.
self.blocks = input[0] #Store the page frames size.
self.pages = input[1:] #Store the pages to swap.
self.missingPages = self.blocks #Count the lack of pages.
def removeChuncks(self, list, start, stop):
"""
Remove a piece of a list.
Parameters
----------
list : list
The list to delete the elements
start : int
start point.
stop : int
stop point.
"""
del list[start:stop] #Delete range
def preparePageFrame(self):
"""
Prepare the page frames.
Returns
-------
list
The list with initialized Page frames
"""
- pageFrame = [self.pages[x] for x in range(0, self.blocks)] #Create the page frame with elements passed by the user.
+ pageFrame = [None] * self.blocks #Create the page frame with elements passed by the user.
+ iterator = 0
+ for i in range(0, len(self.pages)):
+ if self.pages[i] not in pageFrame:
+ pageFrame[iterator] = self.pages[i]
+ iterator = iterator + 1
+ if iterator == self.blocks:
- self.removeChuncks(self.pages, 0, self.blocks) #Remove part of the list that is on the page frame
+ self.removeChuncks(self.pages, 0, i) #Remove part of the list that is on the page frame
+ break
+
return pageFrame #Return the list
|
Fix logic error under preparePageFrames
|
## Code Before:
class Algorithm:
"""
Algorithm Class
Base class for the page substitution algorithms.
"""
def __init__(self, input = []):
"""
Algorithm Constructor.
Parameters
----------
input : list
A list containing the the input page swap.
"""
if not input: #If the list is empty throw an exception.
raise ValueError("The list must not be empty") #throws the exception.
self.blocks = input[0] #Store the page frames size.
self.pages = input[1:] #Store the pages to swap.
self.missingPages = self.blocks #Count the lack of pages.
def removeChuncks(self, list, start, stop):
"""
Remove a piece of a list.
Parameters
----------
list : list
The list to delete the elements
start : int
start point.
stop : int
stop point.
"""
del list[start:stop] #Delete range
def preparePageFrame(self):
"""
Prepare the page frames.
Returns
-------
list
The list with initialized Page frames
"""
pageFrame = [self.pages[x] for x in range(0, self.blocks)] #Create the page frame with elements passed by the user.
self.removeChuncks(self.pages, 0, self.blocks) #Remove part of the list that is on the page frame
return pageFrame #Return the list
## Instruction:
Fix logic error under preparePageFrames
## Code After:
class Algorithm:
"""
Algorithm Class
Base class for the page substitution algorithms.
"""
def __init__(self, input = []):
"""
Algorithm Constructor.
Parameters
----------
input : list
A list containing the the input page swap.
"""
if not input: #If the list is empty throw an exception.
raise ValueError("The list must not be empty") #throws the exception.
self.blocks = input[0] #Store the page frames size.
self.pages = input[1:] #Store the pages to swap.
self.missingPages = self.blocks #Count the lack of pages.
def removeChuncks(self, list, start, stop):
"""
Remove a piece of a list.
Parameters
----------
list : list
The list to delete the elements
start : int
start point.
stop : int
stop point.
"""
del list[start:stop] #Delete range
def preparePageFrame(self):
"""
Prepare the page frames.
Returns
-------
list
The list with initialized Page frames
"""
pageFrame = [None] * self.blocks #Create the page frame with elements passed by the user.
iterator = 0
for i in range(0, len(self.pages)):
if self.pages[i] not in pageFrame:
pageFrame[iterator] = self.pages[i]
iterator = iterator + 1
if iterator == self.blocks:
self.removeChuncks(self.pages, 0, i) #Remove part of the list that is on the page frame
break
return pageFrame #Return the list
|
class Algorithm:
"""
Algorithm Class
Base class for the page substitution algorithms.
"""
def __init__(self, input = []):
"""
Algorithm Constructor.
Parameters
----------
input : list
A list containing the the input page swap.
"""
if not input: #If the list is empty throw an exception.
raise ValueError("The list must not be empty") #throws the exception.
self.blocks = input[0] #Store the page frames size.
self.pages = input[1:] #Store the pages to swap.
self.missingPages = self.blocks #Count the lack of pages.
def removeChuncks(self, list, start, stop):
"""
Remove a piece of a list.
Parameters
----------
list : list
The list to delete the elements
start : int
start point.
stop : int
stop point.
"""
del list[start:stop] #Delete range
def preparePageFrame(self):
"""
Prepare the page frames.
Returns
-------
list
The list with initialized Page frames
"""
- pageFrame = [self.pages[x] for x in range(0, self.blocks)] #Create the page frame with elements passed by the user.
? ^ ---------- ^^^^^^^^^^^^^^^^^ --
+ pageFrame = [None] * self.blocks #Create the page frame with elements passed by the user.
? ^^^ ^
+ iterator = 0
+ for i in range(0, len(self.pages)):
+ if self.pages[i] not in pageFrame:
+ pageFrame[iterator] = self.pages[i]
+ iterator = iterator + 1
+ if iterator == self.blocks:
- self.removeChuncks(self.pages, 0, self.blocks) #Remove part of the list that is on the page frame
? ^^^^^^^^^^^
+ self.removeChuncks(self.pages, 0, i) #Remove part of the list that is on the page frame
? +++ ^
+ break
+
return pageFrame #Return the list
|
30c21806dcc347326d6ac51be2adac9ff637f241
|
day20/part1.py
|
day20/part1.py
|
ranges = []
for line in open('input.txt', 'r'):
ranges.append(tuple(map(int, line.split('-'))))
ranges.sort()
lowest = 0
for l, r in ranges:
if l <= lowest <= r:
lowest = r + 1
print(lowest)
input()
|
ranges = []
for line in open('input.txt', 'r'):
ranges.append(tuple(map(int, line.split('-'))))
ranges.sort()
lowest = 0
for l, r in ranges:
if l > lowest:
break
if lowest <= r:
lowest = r + 1
print(lowest)
input()
|
Break the loop at the first gap
|
Break the loop at the first gap
|
Python
|
unlicense
|
ultramega/adventofcode2016
|
ranges = []
for line in open('input.txt', 'r'):
ranges.append(tuple(map(int, line.split('-'))))
ranges.sort()
lowest = 0
for l, r in ranges:
+ if l > lowest:
+ break
- if l <= lowest <= r:
+ if lowest <= r:
lowest = r + 1
print(lowest)
input()
|
Break the loop at the first gap
|
## Code Before:
ranges = []
for line in open('input.txt', 'r'):
ranges.append(tuple(map(int, line.split('-'))))
ranges.sort()
lowest = 0
for l, r in ranges:
if l <= lowest <= r:
lowest = r + 1
print(lowest)
input()
## Instruction:
Break the loop at the first gap
## Code After:
ranges = []
for line in open('input.txt', 'r'):
ranges.append(tuple(map(int, line.split('-'))))
ranges.sort()
lowest = 0
for l, r in ranges:
if l > lowest:
break
if lowest <= r:
lowest = r + 1
print(lowest)
input()
|
ranges = []
for line in open('input.txt', 'r'):
ranges.append(tuple(map(int, line.split('-'))))
ranges.sort()
lowest = 0
for l, r in ranges:
+ if l > lowest:
+ break
- if l <= lowest <= r:
? -----
+ if lowest <= r:
lowest = r + 1
print(lowest)
input()
|
94861438189537b88deaf8d04cc9942192038d8c
|
user_messages/views.py
|
user_messages/views.py
|
from django.contrib.auth.decorators import login_required
from django.db.models import Q
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from user_messages.models import Thread, Message
@login_required
def inbox(request, template_name='user_messages/inbox.html'):
threads = list(Thread.objects.inbox(request.user))
threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True)
return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request))
@login_required
def thread_detail(request, thread_id,
template_name='user_messages/thread_detail.html'):
qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user))
thread = get_object_or_404(qs, pk=thread_id)
return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
|
from django.contrib.auth.decorators import login_required
from django.db.models import Q
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from user_messages.models import Thread, Message
@login_required
def inbox(request, template_name='user_messages/inbox.html'):
threads = list(Thread.objects.inbox(request.user))
threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True)
return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request))
@login_required
def thread_detail(request, thread_id,
template_name='user_messages/thread_detail.html'):
qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user))
thread = get_object_or_404(qs, pk=thread_id)
if request.user == thread.to_user:
thread.to_user_unread = False
else:
thread.from_user_unread = False
thread.save()
return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
|
Update the read status of a thread when it's viewed
|
Update the read status of a thread when it's viewed
|
Python
|
mit
|
eldarion/user_messages,eldarion/user_messages,pinax/pinax-messages,arthur-wsw/pinax-messages,pinax/pinax-messages,arthur-wsw/pinax-messages
|
from django.contrib.auth.decorators import login_required
from django.db.models import Q
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from user_messages.models import Thread, Message
@login_required
def inbox(request, template_name='user_messages/inbox.html'):
threads = list(Thread.objects.inbox(request.user))
threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True)
return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request))
@login_required
def thread_detail(request, thread_id,
template_name='user_messages/thread_detail.html'):
qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user))
thread = get_object_or_404(qs, pk=thread_id)
+ if request.user == thread.to_user:
+ thread.to_user_unread = False
+ else:
+ thread.from_user_unread = False
+ thread.save()
return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
-
-
|
Update the read status of a thread when it's viewed
|
## Code Before:
from django.contrib.auth.decorators import login_required
from django.db.models import Q
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from user_messages.models import Thread, Message
@login_required
def inbox(request, template_name='user_messages/inbox.html'):
threads = list(Thread.objects.inbox(request.user))
threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True)
return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request))
@login_required
def thread_detail(request, thread_id,
template_name='user_messages/thread_detail.html'):
qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user))
thread = get_object_or_404(qs, pk=thread_id)
return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
## Instruction:
Update the read status of a thread when it's viewed
## Code After:
from django.contrib.auth.decorators import login_required
from django.db.models import Q
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from user_messages.models import Thread, Message
@login_required
def inbox(request, template_name='user_messages/inbox.html'):
threads = list(Thread.objects.inbox(request.user))
threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True)
return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request))
@login_required
def thread_detail(request, thread_id,
template_name='user_messages/thread_detail.html'):
qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user))
thread = get_object_or_404(qs, pk=thread_id)
if request.user == thread.to_user:
thread.to_user_unread = False
else:
thread.from_user_unread = False
thread.save()
return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
|
from django.contrib.auth.decorators import login_required
from django.db.models import Q
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from user_messages.models import Thread, Message
@login_required
def inbox(request, template_name='user_messages/inbox.html'):
threads = list(Thread.objects.inbox(request.user))
threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True)
return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request))
@login_required
def thread_detail(request, thread_id,
template_name='user_messages/thread_detail.html'):
qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user))
thread = get_object_or_404(qs, pk=thread_id)
+ if request.user == thread.to_user:
+ thread.to_user_unread = False
+ else:
+ thread.from_user_unread = False
+ thread.save()
return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
-
-
|
52b022869b7092fc519accc2132c3f842502aeae
|
create_input_files.py
|
create_input_files.py
|
from utils import write_csv_rows, read_csv_rows
class input_table:
def __init__(self, filename, content):
self.filename = filename
self.content = content
connect_tbl=input_table('connectivity.csv',
[['Connectivity Table'],
['x1','y1','x2','y2','E','A']])
force_tbl=input_table('forces.csv',
[['Force Table'],
['x','y','Fx','Fy']])
bc_tbl=input_table('boundary_conditions.csv',
[['Boundary Conditions'],
['x','y','Constrained Dimension','Displacement']])
sim_tbl=input_table('simulation_parameters.csv',
[['Simulation Parameters'],
['Numerical Soln Multiplier','Degrees of Freedom'],
['1e9']])
input_files=[connect_tbl,force_tbl,bc_tbl,sim_tbl]
for i in range(0,len(input_files)):
write_csv_rows(input_files[i].filename,input_files[i].content)
print(input_files[i].content[0][0] + ' written to ' + input_files[i].filename)
|
from utils import write_csv_rows, read_csv_rows
class input_table:
def __init__(self, filename, name, headers, content=[]):
self.filename = filename
self.name = name
self.headers = headers
self.content = content
connect_filename = 'connectivity.csv'
connect_name = ['Connectivity Table']
connect_headers = ['x1','y1','x2','y2','E','A']
connect_tbl = input_table(connect_filename,
connect_name,
connect_headers)
force_filename = 'forces.csv'
force_name = ['Force Table']
force_headers = ['x','y','Fx','Fy']
force_tbl = input_table(force_filename,
force_name,
force_headers)
bc_filename = 'boundary_conditions.csv'
bc_name = ['Boundary Conditions']
bc_headers = ['x','y','Constrained Dimension','Displacement']
bc_tbl = input_table(bc_filename,
bc_name,
bc_headers)
sim_filename = 'simulation_parameters.csv'
sim_name = ['Simulation Parameters']
sim_headers = ['Numerical Soln Multiplier','Degrees of Freedom']
sim_content = ['1e9']
sim_tbl = input_table(sim_filename,
sim_name,
sim_headers,
sim_content)
input_files = [connect_tbl,force_tbl,bc_tbl,sim_tbl]
for i in range(0,len(input_files)):
tbl_list = [input_files[i].name,
input_files[i].headers,
input_files[i].content]
write_csv_rows(input_files[i].filename,tbl_list)
print(input_files[i].name[0] + ' written to ' +\
input_files[i].filename)
|
Clean up input tables class and instance declarations
|
Clean up input tables class and instance declarations
|
Python
|
mit
|
ndebuhr/openfea,ndebuhr/openfea
|
from utils import write_csv_rows, read_csv_rows
class input_table:
- def __init__(self, filename, content):
+ def __init__(self, filename, name, headers, content=[]):
self.filename = filename
+ self.name = name
+ self.headers = headers
self.content = content
+ connect_filename = 'connectivity.csv'
+ connect_name = ['Connectivity Table']
+ connect_headers = ['x1','y1','x2','y2','E','A']
- connect_tbl=input_table('connectivity.csv',
+ connect_tbl = input_table(connect_filename,
- [['Connectivity Table'],
+ connect_name,
+ connect_headers)
- ['x1','y1','x2','y2','E','A']])
- force_tbl=input_table('forces.csv',
- [['Force Table'],
- ['x','y','Fx','Fy']])
- bc_tbl=input_table('boundary_conditions.csv',
- [['Boundary Conditions'],
- ['x','y','Constrained Dimension','Displacement']])
- sim_tbl=input_table('simulation_parameters.csv',
- [['Simulation Parameters'],
- ['Numerical Soln Multiplier','Degrees of Freedom'],
- ['1e9']])
+ force_filename = 'forces.csv'
+ force_name = ['Force Table']
+ force_headers = ['x','y','Fx','Fy']
+ force_tbl = input_table(force_filename,
+ force_name,
+ force_headers)
+
+ bc_filename = 'boundary_conditions.csv'
+ bc_name = ['Boundary Conditions']
+ bc_headers = ['x','y','Constrained Dimension','Displacement']
+ bc_tbl = input_table(bc_filename,
+ bc_name,
+ bc_headers)
+
+ sim_filename = 'simulation_parameters.csv'
+ sim_name = ['Simulation Parameters']
+ sim_headers = ['Numerical Soln Multiplier','Degrees of Freedom']
+ sim_content = ['1e9']
+ sim_tbl = input_table(sim_filename,
+ sim_name,
+ sim_headers,
+ sim_content)
+
- input_files=[connect_tbl,force_tbl,bc_tbl,sim_tbl]
+ input_files = [connect_tbl,force_tbl,bc_tbl,sim_tbl]
for i in range(0,len(input_files)):
+ tbl_list = [input_files[i].name,
+ input_files[i].headers,
+ input_files[i].content]
- write_csv_rows(input_files[i].filename,input_files[i].content)
+ write_csv_rows(input_files[i].filename,tbl_list)
- print(input_files[i].content[0][0] + ' written to ' + input_files[i].filename)
+ print(input_files[i].name[0] + ' written to ' +\
+ input_files[i].filename)
|
Clean up input tables class and instance declarations
|
## Code Before:
from utils import write_csv_rows, read_csv_rows
class input_table:
def __init__(self, filename, content):
self.filename = filename
self.content = content
connect_tbl=input_table('connectivity.csv',
[['Connectivity Table'],
['x1','y1','x2','y2','E','A']])
force_tbl=input_table('forces.csv',
[['Force Table'],
['x','y','Fx','Fy']])
bc_tbl=input_table('boundary_conditions.csv',
[['Boundary Conditions'],
['x','y','Constrained Dimension','Displacement']])
sim_tbl=input_table('simulation_parameters.csv',
[['Simulation Parameters'],
['Numerical Soln Multiplier','Degrees of Freedom'],
['1e9']])
input_files=[connect_tbl,force_tbl,bc_tbl,sim_tbl]
for i in range(0,len(input_files)):
write_csv_rows(input_files[i].filename,input_files[i].content)
print(input_files[i].content[0][0] + ' written to ' + input_files[i].filename)
## Instruction:
Clean up input tables class and instance declarations
## Code After:
from utils import write_csv_rows, read_csv_rows
class input_table:
def __init__(self, filename, name, headers, content=[]):
self.filename = filename
self.name = name
self.headers = headers
self.content = content
connect_filename = 'connectivity.csv'
connect_name = ['Connectivity Table']
connect_headers = ['x1','y1','x2','y2','E','A']
connect_tbl = input_table(connect_filename,
connect_name,
connect_headers)
force_filename = 'forces.csv'
force_name = ['Force Table']
force_headers = ['x','y','Fx','Fy']
force_tbl = input_table(force_filename,
force_name,
force_headers)
bc_filename = 'boundary_conditions.csv'
bc_name = ['Boundary Conditions']
bc_headers = ['x','y','Constrained Dimension','Displacement']
bc_tbl = input_table(bc_filename,
bc_name,
bc_headers)
sim_filename = 'simulation_parameters.csv'
sim_name = ['Simulation Parameters']
sim_headers = ['Numerical Soln Multiplier','Degrees of Freedom']
sim_content = ['1e9']
sim_tbl = input_table(sim_filename,
sim_name,
sim_headers,
sim_content)
input_files = [connect_tbl,force_tbl,bc_tbl,sim_tbl]
for i in range(0,len(input_files)):
tbl_list = [input_files[i].name,
input_files[i].headers,
input_files[i].content]
write_csv_rows(input_files[i].filename,tbl_list)
print(input_files[i].name[0] + ' written to ' +\
input_files[i].filename)
|
from utils import write_csv_rows, read_csv_rows
class input_table:
- def __init__(self, filename, content):
+ def __init__(self, filename, name, headers, content=[]):
? +++++++++++++++ +++
self.filename = filename
+ self.name = name
+ self.headers = headers
self.content = content
+ connect_filename = 'connectivity.csv'
+ connect_name = ['Connectivity Table']
+ connect_headers = ['x1','y1','x2','y2','E','A']
- connect_tbl=input_table('connectivity.csv',
? - ^^^^^^^^^
+ connect_tbl = input_table(connect_filename,
? + + ++ ^^^^^^
- [['Connectivity Table'],
? ^^^^ ^^^^^^^ ^^ --
+ connect_name,
? ^^^ ^^ ^
+ connect_headers)
- ['x1','y1','x2','y2','E','A']])
- force_tbl=input_table('forces.csv',
- [['Force Table'],
- ['x','y','Fx','Fy']])
- bc_tbl=input_table('boundary_conditions.csv',
- [['Boundary Conditions'],
- ['x','y','Constrained Dimension','Displacement']])
- sim_tbl=input_table('simulation_parameters.csv',
- [['Simulation Parameters'],
- ['Numerical Soln Multiplier','Degrees of Freedom'],
- ['1e9']])
+ force_filename = 'forces.csv'
+ force_name = ['Force Table']
+ force_headers = ['x','y','Fx','Fy']
+ force_tbl = input_table(force_filename,
+ force_name,
+ force_headers)
+
+ bc_filename = 'boundary_conditions.csv'
+ bc_name = ['Boundary Conditions']
+ bc_headers = ['x','y','Constrained Dimension','Displacement']
+ bc_tbl = input_table(bc_filename,
+ bc_name,
+ bc_headers)
+
+ sim_filename = 'simulation_parameters.csv'
+ sim_name = ['Simulation Parameters']
+ sim_headers = ['Numerical Soln Multiplier','Degrees of Freedom']
+ sim_content = ['1e9']
+ sim_tbl = input_table(sim_filename,
+ sim_name,
+ sim_headers,
+ sim_content)
+
- input_files=[connect_tbl,force_tbl,bc_tbl,sim_tbl]
+ input_files = [connect_tbl,force_tbl,bc_tbl,sim_tbl]
? + +
for i in range(0,len(input_files)):
+ tbl_list = [input_files[i].name,
+ input_files[i].headers,
+ input_files[i].content]
- write_csv_rows(input_files[i].filename,input_files[i].content)
? --------- ----------
+ write_csv_rows(input_files[i].filename,tbl_list)
? +++++
- print(input_files[i].content[0][0] + ' written to ' + input_files[i].filename)
+ print(input_files[i].name[0] + ' written to ' +\
+ input_files[i].filename)
|
6381204585c64ed70bf23237731bdb92db445c05
|
cycy/parser/ast.py
|
cycy/parser/ast.py
|
class Node(object):
def __eq__(self, other):
return (
self.__class__ == other.__class__ and
self.__dict__ == other.__dict__
)
def __ne__(self, other):
return not self == other
|
class Node(object):
def __eq__(self, other):
return (
self.__class__ == other.__class__ and
self.__dict__ == other.__dict__
)
def __ne__(self, other):
return not self == other
class BinaryOperation(Node):
def __init__(self, operand, left, right):
assert operand in ("+", "-") # for now
self.operand = operand
self.left = left
self.right = right
class Int32(Node):
def __init__(self, value):
assert isinstance(value, int)
assert -2**32 < value <= 2**32-1
self.value = value
|
Add the basic AST nodes.
|
Add the basic AST nodes.
|
Python
|
mit
|
Magnetic/cycy,Magnetic/cycy,Magnetic/cycy
|
class Node(object):
def __eq__(self, other):
return (
self.__class__ == other.__class__ and
self.__dict__ == other.__dict__
)
def __ne__(self, other):
return not self == other
+ class BinaryOperation(Node):
+ def __init__(self, operand, left, right):
+ assert operand in ("+", "-") # for now
+ self.operand = operand
+ self.left = left
+ self.right = right
+
+ class Int32(Node):
+ def __init__(self, value):
+ assert isinstance(value, int)
+ assert -2**32 < value <= 2**32-1
+ self.value = value
+
|
Add the basic AST nodes.
|
## Code Before:
class Node(object):
def __eq__(self, other):
return (
self.__class__ == other.__class__ and
self.__dict__ == other.__dict__
)
def __ne__(self, other):
return not self == other
## Instruction:
Add the basic AST nodes.
## Code After:
class Node(object):
def __eq__(self, other):
return (
self.__class__ == other.__class__ and
self.__dict__ == other.__dict__
)
def __ne__(self, other):
return not self == other
class BinaryOperation(Node):
def __init__(self, operand, left, right):
assert operand in ("+", "-") # for now
self.operand = operand
self.left = left
self.right = right
class Int32(Node):
def __init__(self, value):
assert isinstance(value, int)
assert -2**32 < value <= 2**32-1
self.value = value
|
class Node(object):
def __eq__(self, other):
return (
self.__class__ == other.__class__ and
self.__dict__ == other.__dict__
)
def __ne__(self, other):
return not self == other
+
+ class BinaryOperation(Node):
+ def __init__(self, operand, left, right):
+ assert operand in ("+", "-") # for now
+ self.operand = operand
+ self.left = left
+ self.right = right
+
+ class Int32(Node):
+ def __init__(self, value):
+ assert isinstance(value, int)
+ assert -2**32 < value <= 2**32-1
+ self.value = value
|
b97842ecf1c8fa22b599353c1c7fe75fcf482702
|
tests/test_utils.py
|
tests/test_utils.py
|
from __future__ import unicode_literals
from django.test import TestCase
from modeltrans.manager import (split_translated_fieldname,
transform_translatable_fields)
from modeltrans.utils import build_localized_fieldname
from tests.app.models import Blog
class UtilsTest(TestCase):
def test_split_translated_fieldname(self):
self.assertEquals(
split_translated_fieldname('title_nl'),
('title', 'nl')
)
self.assertEquals(
split_translated_fieldname('full_name_nl'),
('full_name', 'nl')
)
def test_transform_translatable_fields(self):
self.assertEquals(
transform_translatable_fields(Blog, {'title': 'bar', 'title_nl': 'foo'}),
{
'i18n': {
'title_nl': 'foo'
},
'title': 'bar'
}
)
def test_build_localized_fieldname(self):
self.assertEquals(
build_localized_fieldname('title', 'nl'),
'title_nl'
)
self.assertEquals(
build_localized_fieldname('category__name', 'nl'),
'category__name_nl'
)
|
from __future__ import unicode_literals
from django.test import TestCase
from modeltrans.manager import transform_translatable_fields
from modeltrans.utils import (build_localized_fieldname,
split_translated_fieldname)
from tests.app.models import Blog
class UtilsTest(TestCase):
def test_split_translated_fieldname(self):
self.assertEquals(
split_translated_fieldname('title_nl'),
('title', 'nl')
)
self.assertEquals(
split_translated_fieldname('full_name_nl'),
('full_name', 'nl')
)
def test_transform_translatable_fields(self):
self.assertEquals(
transform_translatable_fields(Blog, {'title': 'bar', 'title_nl': 'foo'}),
{
'i18n': {
'title_nl': 'foo'
},
'title': 'bar'
}
)
def test_build_localized_fieldname(self):
self.assertEquals(
build_localized_fieldname('title', 'nl'),
'title_nl'
)
self.assertEquals(
build_localized_fieldname('category__name', 'nl'),
'category__name_nl'
)
|
Use proper import from utils
|
Use proper import from utils
|
Python
|
bsd-3-clause
|
zostera/django-modeltrans,zostera/django-modeltrans
|
from __future__ import unicode_literals
from django.test import TestCase
- from modeltrans.manager import (split_translated_fieldname,
+ from modeltrans.manager import transform_translatable_fields
- transform_translatable_fields)
- from modeltrans.utils import build_localized_fieldname
+ from modeltrans.utils import (build_localized_fieldname,
+ split_translated_fieldname)
from tests.app.models import Blog
class UtilsTest(TestCase):
def test_split_translated_fieldname(self):
self.assertEquals(
split_translated_fieldname('title_nl'),
('title', 'nl')
)
self.assertEquals(
split_translated_fieldname('full_name_nl'),
('full_name', 'nl')
)
def test_transform_translatable_fields(self):
self.assertEquals(
transform_translatable_fields(Blog, {'title': 'bar', 'title_nl': 'foo'}),
{
'i18n': {
'title_nl': 'foo'
},
'title': 'bar'
}
)
def test_build_localized_fieldname(self):
self.assertEquals(
build_localized_fieldname('title', 'nl'),
'title_nl'
)
self.assertEquals(
build_localized_fieldname('category__name', 'nl'),
'category__name_nl'
)
|
Use proper import from utils
|
## Code Before:
from __future__ import unicode_literals
from django.test import TestCase
from modeltrans.manager import (split_translated_fieldname,
transform_translatable_fields)
from modeltrans.utils import build_localized_fieldname
from tests.app.models import Blog
class UtilsTest(TestCase):
def test_split_translated_fieldname(self):
self.assertEquals(
split_translated_fieldname('title_nl'),
('title', 'nl')
)
self.assertEquals(
split_translated_fieldname('full_name_nl'),
('full_name', 'nl')
)
def test_transform_translatable_fields(self):
self.assertEquals(
transform_translatable_fields(Blog, {'title': 'bar', 'title_nl': 'foo'}),
{
'i18n': {
'title_nl': 'foo'
},
'title': 'bar'
}
)
def test_build_localized_fieldname(self):
self.assertEquals(
build_localized_fieldname('title', 'nl'),
'title_nl'
)
self.assertEquals(
build_localized_fieldname('category__name', 'nl'),
'category__name_nl'
)
## Instruction:
Use proper import from utils
## Code After:
from __future__ import unicode_literals
from django.test import TestCase
from modeltrans.manager import transform_translatable_fields
from modeltrans.utils import (build_localized_fieldname,
split_translated_fieldname)
from tests.app.models import Blog
class UtilsTest(TestCase):
def test_split_translated_fieldname(self):
self.assertEquals(
split_translated_fieldname('title_nl'),
('title', 'nl')
)
self.assertEquals(
split_translated_fieldname('full_name_nl'),
('full_name', 'nl')
)
def test_transform_translatable_fields(self):
self.assertEquals(
transform_translatable_fields(Blog, {'title': 'bar', 'title_nl': 'foo'}),
{
'i18n': {
'title_nl': 'foo'
},
'title': 'bar'
}
)
def test_build_localized_fieldname(self):
self.assertEquals(
build_localized_fieldname('title', 'nl'),
'title_nl'
)
self.assertEquals(
build_localized_fieldname('category__name', 'nl'),
'category__name_nl'
)
|
from __future__ import unicode_literals
from django.test import TestCase
- from modeltrans.manager import (split_translated_fieldname,
? ^ ^^^^ - ^^^^^
+ from modeltrans.manager import transform_translatable_fields
? ^^^^ ^^^^ +++ ^
- transform_translatable_fields)
- from modeltrans.utils import build_localized_fieldname
+ from modeltrans.utils import (build_localized_fieldname,
? + +
+ split_translated_fieldname)
from tests.app.models import Blog
class UtilsTest(TestCase):
def test_split_translated_fieldname(self):
self.assertEquals(
split_translated_fieldname('title_nl'),
('title', 'nl')
)
self.assertEquals(
split_translated_fieldname('full_name_nl'),
('full_name', 'nl')
)
def test_transform_translatable_fields(self):
self.assertEquals(
transform_translatable_fields(Blog, {'title': 'bar', 'title_nl': 'foo'}),
{
'i18n': {
'title_nl': 'foo'
},
'title': 'bar'
}
)
def test_build_localized_fieldname(self):
self.assertEquals(
build_localized_fieldname('title', 'nl'),
'title_nl'
)
self.assertEquals(
build_localized_fieldname('category__name', 'nl'),
'category__name_nl'
)
|
2607d142a32ad31fd4c432c0830c3173daee79fb
|
src/util/results.py
|
src/util/results.py
|
import datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
creation_datetime = datetime.datetime.now()
with DBContextManager(database_path) as cursor:
table_name = ('_'.join([controller_name, trajectory_name, creation_datetime.strftime('%Y_%m_%d_%H_%M_%S')]))
cursor.execute(QUERIES['create_sims'])
cursor.execute(QUERIES['insert_sim'], (table_name, creation_datetime))
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
|
import datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
def get_table_name(controller, trajectory, date_time):
return '_'.join([controller,
trajectory,
date_time.strftime('%Y_%m_%d_%H_%M_%S')])
date_time = datetime.datetime.now()
with DBContextManager(database_path) as cursor:
table_name = get_table_name(
controller_name,
trajectory_name,
date_time
)
cursor.execute(QUERIES['create_sims'])
cursor.execute(QUERIES['insert_sim'], (table_name, date_time))
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
|
Create inner function and rename variables
|
refactor: Create inner function and rename variables
|
Python
|
mit
|
bit0001/trajectory_tracking,bit0001/trajectory_tracking
|
import datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
+ def get_table_name(controller, trajectory, date_time):
+ return '_'.join([controller,
+ trajectory,
+ date_time.strftime('%Y_%m_%d_%H_%M_%S')])
+
- creation_datetime = datetime.datetime.now()
+ date_time = datetime.datetime.now()
with DBContextManager(database_path) as cursor:
- table_name = ('_'.join([controller_name, trajectory_name, creation_datetime.strftime('%Y_%m_%d_%H_%M_%S')]))
+ table_name = get_table_name(
+ controller_name,
+ trajectory_name,
+ date_time
+ )
+
cursor.execute(QUERIES['create_sims'])
-
- cursor.execute(QUERIES['insert_sim'], (table_name, creation_datetime))
+ cursor.execute(QUERIES['insert_sim'], (table_name, date_time))
-
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
|
Create inner function and rename variables
|
## Code Before:
import datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
creation_datetime = datetime.datetime.now()
with DBContextManager(database_path) as cursor:
table_name = ('_'.join([controller_name, trajectory_name, creation_datetime.strftime('%Y_%m_%d_%H_%M_%S')]))
cursor.execute(QUERIES['create_sims'])
cursor.execute(QUERIES['insert_sim'], (table_name, creation_datetime))
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
## Instruction:
Create inner function and rename variables
## Code After:
import datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
def get_table_name(controller, trajectory, date_time):
return '_'.join([controller,
trajectory,
date_time.strftime('%Y_%m_%d_%H_%M_%S')])
date_time = datetime.datetime.now()
with DBContextManager(database_path) as cursor:
table_name = get_table_name(
controller_name,
trajectory_name,
date_time
)
cursor.execute(QUERIES['create_sims'])
cursor.execute(QUERIES['insert_sim'], (table_name, date_time))
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
|
import datetime
from context_manager.db_context_manager import DBContextManager
from util.constants import QUERIES
def export_results(data, controller_name, trajectory_name, database_path):
+ def get_table_name(controller, trajectory, date_time):
+ return '_'.join([controller,
+ trajectory,
+ date_time.strftime('%Y_%m_%d_%H_%M_%S')])
+
- creation_datetime = datetime.datetime.now()
? ---------
+ date_time = datetime.datetime.now()
? +
with DBContextManager(database_path) as cursor:
- table_name = ('_'.join([controller_name, trajectory_name, creation_datetime.strftime('%Y_%m_%d_%H_%M_%S')]))
+ table_name = get_table_name(
+ controller_name,
+ trajectory_name,
+ date_time
+ )
+
cursor.execute(QUERIES['create_sims'])
-
- cursor.execute(QUERIES['insert_sim'], (table_name, creation_datetime))
? ---------
+ cursor.execute(QUERIES['insert_sim'], (table_name, date_time))
? +
-
cursor.execute(QUERIES['create_sim'].format(table_name))
for i in range(len(data['t'])):
cursor.execute(
QUERIES['insert_data'].format(table_name),
(data['t'][i], data['x'][i], data['x_ref'][i],
data['y'][i], data['y_ref'][i], data['theta'][i],
data['theta_ref'][i], data['v_c'][i], data['w_c'][i])
)
|
174d9862242cecdf89c3fd398b93e805e49dea44
|
tinned_django/manage.py
|
tinned_django/manage.py
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
if len(sys.argv) > 1 and sys.argv[1] == 'test':
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Set up test environment when launching tests.
|
Set up test environment when launching tests.
|
Python
|
mit
|
futurecolors/tinned-django,futurecolors/tinned-django
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
+ if len(sys.argv) > 1 and sys.argv[1] == 'test':
+ os.environ['DJANGO_CONFIGURATION'] = 'Testing'
+
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Set up test environment when launching tests.
|
## Code Before:
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
## Instruction:
Set up test environment when launching tests.
## Code After:
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
if len(sys.argv) > 1 and sys.argv[1] == 'test':
os.environ['DJANGO_CONFIGURATION'] = 'Testing'
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
os.environ.setdefault("DJANGO_CONFIGURATION", "Development")
+ if len(sys.argv) > 1 and sys.argv[1] == 'test':
+ os.environ['DJANGO_CONFIGURATION'] = 'Testing'
+
from configurations.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
3e4707a3f25f3a2f84f811394d738cebc1ca9f19
|
mygpo/search/models.py
|
mygpo/search/models.py
|
""" Wrappers for the results of a search """
class PodcastResult(object):
""" Wrapper for a Podcast search result """
@classmethod
def from_doc(cls, doc):
""" Construct a PodcastResult from a search result """
obj = cls()
for key, val in doc['_source'].items():
setattr(obj, key, val)
obj.id = doc['_id']
return obj
@property
def slug(self):
return next(iter(self.slugs), None)
@property
def url(self):
return next(iter(self.urls), None)
def get_id(self):
return self.id
@property
def display_title(self):
return self.title
|
""" Wrappers for the results of a search """
import uuid
class PodcastResult(object):
""" Wrapper for a Podcast search result """
@classmethod
def from_doc(cls, doc):
""" Construct a PodcastResult from a search result """
obj = cls()
for key, val in doc['_source'].items():
setattr(obj, key, val)
obj.id = uuid.UUID(doc['_id']).hex
return obj
@property
def slug(self):
return next(iter(self.slugs), None)
@property
def url(self):
return next(iter(self.urls), None)
def get_id(self):
return self.id
@property
def display_title(self):
return self.title
|
Fix parsing UUID in search results
|
Fix parsing UUID in search results
|
Python
|
agpl-3.0
|
gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo
|
""" Wrappers for the results of a search """
-
+ import uuid
class PodcastResult(object):
""" Wrapper for a Podcast search result """
@classmethod
def from_doc(cls, doc):
""" Construct a PodcastResult from a search result """
obj = cls()
for key, val in doc['_source'].items():
setattr(obj, key, val)
- obj.id = doc['_id']
+ obj.id = uuid.UUID(doc['_id']).hex
return obj
@property
def slug(self):
return next(iter(self.slugs), None)
@property
def url(self):
return next(iter(self.urls), None)
def get_id(self):
return self.id
@property
def display_title(self):
return self.title
|
Fix parsing UUID in search results
|
## Code Before:
""" Wrappers for the results of a search """
class PodcastResult(object):
""" Wrapper for a Podcast search result """
@classmethod
def from_doc(cls, doc):
""" Construct a PodcastResult from a search result """
obj = cls()
for key, val in doc['_source'].items():
setattr(obj, key, val)
obj.id = doc['_id']
return obj
@property
def slug(self):
return next(iter(self.slugs), None)
@property
def url(self):
return next(iter(self.urls), None)
def get_id(self):
return self.id
@property
def display_title(self):
return self.title
## Instruction:
Fix parsing UUID in search results
## Code After:
""" Wrappers for the results of a search """
import uuid
class PodcastResult(object):
""" Wrapper for a Podcast search result """
@classmethod
def from_doc(cls, doc):
""" Construct a PodcastResult from a search result """
obj = cls()
for key, val in doc['_source'].items():
setattr(obj, key, val)
obj.id = uuid.UUID(doc['_id']).hex
return obj
@property
def slug(self):
return next(iter(self.slugs), None)
@property
def url(self):
return next(iter(self.urls), None)
def get_id(self):
return self.id
@property
def display_title(self):
return self.title
|
""" Wrappers for the results of a search """
-
+ import uuid
class PodcastResult(object):
""" Wrapper for a Podcast search result """
@classmethod
def from_doc(cls, doc):
""" Construct a PodcastResult from a search result """
obj = cls()
for key, val in doc['_source'].items():
setattr(obj, key, val)
- obj.id = doc['_id']
+ obj.id = uuid.UUID(doc['_id']).hex
? ++++++++++ +++++
return obj
@property
def slug(self):
return next(iter(self.slugs), None)
@property
def url(self):
return next(iter(self.urls), None)
def get_id(self):
return self.id
@property
def display_title(self):
return self.title
|
fd1590ad0ceab26e281c58aefeac1365a3f332d5
|
tests/test_lib_tokens_webauthn.py
|
tests/test_lib_tokens_webauthn.py
|
from .base import MyTestCase
from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION
from privacyidea.lib.token import init_token
from privacyidea.lib.policy import set_policy, SCOPE
RP_ID = 'example.com'
RP_NAME = 'ACME'
class WebAuthnTokenTestCase(MyTestCase):
def test_00_users(self):
self.setUp_user_realms()
set_policy(name="WebAuthn",
scope=SCOPE.ENROLL,
action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+RP_NAME+","
+WEBAUTHNACTION.RELYING_PARTY_ID+"="+RP_ID)
def test_01_create_token(self):
pin = "1234"
#
# Init step 1
#
token = init_token({'type': 'webauthn',
'pin': pin})
serial = token.token.serial
self.assertEqual(token.type, "webauthn")
self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN")
self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn")
self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn")
|
import unittest
from copy import copy
from privacyidea.lib.tokens import webauthn
from privacyidea.lib.tokens.webauthn import COSEALGORITHM
from .base import MyTestCase
from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION
from privacyidea.lib.token import init_token
from privacyidea.lib.policy import set_policy, SCOPE
class WebAuthnTokenTestCase(MyTestCase):
RP_ID = 'example.com'
RP_NAME = 'ACME'
def test_00_users(self):
self.setUp_user_realms()
set_policy(name="WebAuthn",
scope=SCOPE.ENROLL,
action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+self.RP_NAME+","
+WEBAUTHNACTION.RELYING_PARTY_ID+"="+self.RP_ID)
def test_01_create_token(self):
pin = "1234"
#
# Init step 1
#
token = init_token({'type': 'webauthn',
'pin': pin})
serial = token.token.serial
self.assertEqual(token.type, "webauthn")
self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN")
self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn")
self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn")
|
Add testing for the WebAuthn implementation
|
Add testing for the WebAuthn implementation
|
Python
|
agpl-3.0
|
privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea
|
+ import unittest
+ from copy import copy
+
+ from privacyidea.lib.tokens import webauthn
+ from privacyidea.lib.tokens.webauthn import COSEALGORITHM
from .base import MyTestCase
from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION
from privacyidea.lib.token import init_token
from privacyidea.lib.policy import set_policy, SCOPE
- RP_ID = 'example.com'
- RP_NAME = 'ACME'
-
class WebAuthnTokenTestCase(MyTestCase):
+ RP_ID = 'example.com'
+ RP_NAME = 'ACME'
def test_00_users(self):
self.setUp_user_realms()
set_policy(name="WebAuthn",
scope=SCOPE.ENROLL,
- action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+RP_NAME+","
+ action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+self.RP_NAME+","
- +WEBAUTHNACTION.RELYING_PARTY_ID+"="+RP_ID)
+ +WEBAUTHNACTION.RELYING_PARTY_ID+"="+self.RP_ID)
def test_01_create_token(self):
pin = "1234"
#
# Init step 1
#
token = init_token({'type': 'webauthn',
'pin': pin})
serial = token.token.serial
self.assertEqual(token.type, "webauthn")
self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN")
self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn")
self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn")
|
Add testing for the WebAuthn implementation
|
## Code Before:
from .base import MyTestCase
from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION
from privacyidea.lib.token import init_token
from privacyidea.lib.policy import set_policy, SCOPE
RP_ID = 'example.com'
RP_NAME = 'ACME'
class WebAuthnTokenTestCase(MyTestCase):
def test_00_users(self):
self.setUp_user_realms()
set_policy(name="WebAuthn",
scope=SCOPE.ENROLL,
action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+RP_NAME+","
+WEBAUTHNACTION.RELYING_PARTY_ID+"="+RP_ID)
def test_01_create_token(self):
pin = "1234"
#
# Init step 1
#
token = init_token({'type': 'webauthn',
'pin': pin})
serial = token.token.serial
self.assertEqual(token.type, "webauthn")
self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN")
self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn")
self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn")
## Instruction:
Add testing for the WebAuthn implementation
## Code After:
import unittest
from copy import copy
from privacyidea.lib.tokens import webauthn
from privacyidea.lib.tokens.webauthn import COSEALGORITHM
from .base import MyTestCase
from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION
from privacyidea.lib.token import init_token
from privacyidea.lib.policy import set_policy, SCOPE
class WebAuthnTokenTestCase(MyTestCase):
RP_ID = 'example.com'
RP_NAME = 'ACME'
def test_00_users(self):
self.setUp_user_realms()
set_policy(name="WebAuthn",
scope=SCOPE.ENROLL,
action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+self.RP_NAME+","
+WEBAUTHNACTION.RELYING_PARTY_ID+"="+self.RP_ID)
def test_01_create_token(self):
pin = "1234"
#
# Init step 1
#
token = init_token({'type': 'webauthn',
'pin': pin})
serial = token.token.serial
self.assertEqual(token.type, "webauthn")
self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN")
self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn")
self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn")
|
+ import unittest
+ from copy import copy
+
+ from privacyidea.lib.tokens import webauthn
+ from privacyidea.lib.tokens.webauthn import COSEALGORITHM
from .base import MyTestCase
from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION
from privacyidea.lib.token import init_token
from privacyidea.lib.policy import set_policy, SCOPE
- RP_ID = 'example.com'
- RP_NAME = 'ACME'
-
class WebAuthnTokenTestCase(MyTestCase):
+ RP_ID = 'example.com'
+ RP_NAME = 'ACME'
def test_00_users(self):
self.setUp_user_realms()
set_policy(name="WebAuthn",
scope=SCOPE.ENROLL,
- action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+RP_NAME+","
+ action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+self.RP_NAME+","
? +++++
- +WEBAUTHNACTION.RELYING_PARTY_ID+"="+RP_ID)
+ +WEBAUTHNACTION.RELYING_PARTY_ID+"="+self.RP_ID)
? + +++++
def test_01_create_token(self):
pin = "1234"
#
# Init step 1
#
token = init_token({'type': 'webauthn',
'pin': pin})
serial = token.token.serial
self.assertEqual(token.type, "webauthn")
self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN")
self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn")
self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn")
|
78b82b0c5e074c279288b9d53fe9cb5cfe1381ae
|
fabfile.py
|
fabfile.py
|
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
|
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
local('rm -rf test/unit/__pycache__')
local('rm -rf test/integration/__pycache__')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
|
Kill __pycache__ directories in tests
|
Kill __pycache__ directories in tests
|
Python
|
mit
|
smarter-travel-media/stac
|
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
+ local('rm -rf test/unit/__pycache__')
+ local('rm -rf test/integration/__pycache__')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
|
Kill __pycache__ directories in tests
|
## Code Before:
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
## Instruction:
Kill __pycache__ directories in tests
## Code After:
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
local('rm -rf test/unit/__pycache__')
local('rm -rf test/integration/__pycache__')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
|
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
+ local('rm -rf test/unit/__pycache__')
+ local('rm -rf test/integration/__pycache__')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
|
7f8e5913f493582608712244cbfff0bf8d658c51
|
chainerrl/misc/batch_states.py
|
chainerrl/misc/batch_states.py
|
import chainer
def batch_states(states, xp, phi):
"""The default method for making batch of observations.
Args:
states (list): list of observations from an environment.
xp (module): numpy or cupy
phi (callable): Feature extractor applied to observations
Return:
the object which will be given as input to the model.
"""
if xp is chainer.cuda.cupy:
# GPU
device = chainer.cuda.Device().id
else:
# CPU
device = -1
features = [phi(s) for s in states]
return chainer.dataset.concat_examples(features, device=device)
|
import chainer
def batch_states(states, xp, phi):
"""The default method for making batch of observations.
Args:
states (list): list of observations from an environment.
xp (module): numpy or cupy
phi (callable): Feature extractor applied to observations
Return:
the object which will be given as input to the model.
"""
if chainer.cuda.available and xp is chainer.cuda.cupy:
# GPU
device = chainer.cuda.Device().id
else:
# CPU
device = -1
features = [phi(s) for s in states]
return chainer.dataset.concat_examples(features, device=device)
|
Fix error of chainer v3 on chainer.cuda.cupy
|
Fix error of chainer v3 on chainer.cuda.cupy
|
Python
|
mit
|
toslunar/chainerrl,toslunar/chainerrl
|
import chainer
def batch_states(states, xp, phi):
"""The default method for making batch of observations.
Args:
states (list): list of observations from an environment.
xp (module): numpy or cupy
phi (callable): Feature extractor applied to observations
Return:
the object which will be given as input to the model.
"""
- if xp is chainer.cuda.cupy:
+ if chainer.cuda.available and xp is chainer.cuda.cupy:
# GPU
device = chainer.cuda.Device().id
else:
# CPU
device = -1
features = [phi(s) for s in states]
return chainer.dataset.concat_examples(features, device=device)
|
Fix error of chainer v3 on chainer.cuda.cupy
|
## Code Before:
import chainer
def batch_states(states, xp, phi):
"""The default method for making batch of observations.
Args:
states (list): list of observations from an environment.
xp (module): numpy or cupy
phi (callable): Feature extractor applied to observations
Return:
the object which will be given as input to the model.
"""
if xp is chainer.cuda.cupy:
# GPU
device = chainer.cuda.Device().id
else:
# CPU
device = -1
features = [phi(s) for s in states]
return chainer.dataset.concat_examples(features, device=device)
## Instruction:
Fix error of chainer v3 on chainer.cuda.cupy
## Code After:
import chainer
def batch_states(states, xp, phi):
"""The default method for making batch of observations.
Args:
states (list): list of observations from an environment.
xp (module): numpy or cupy
phi (callable): Feature extractor applied to observations
Return:
the object which will be given as input to the model.
"""
if chainer.cuda.available and xp is chainer.cuda.cupy:
# GPU
device = chainer.cuda.Device().id
else:
# CPU
device = -1
features = [phi(s) for s in states]
return chainer.dataset.concat_examples(features, device=device)
|
import chainer
def batch_states(states, xp, phi):
"""The default method for making batch of observations.
Args:
states (list): list of observations from an environment.
xp (module): numpy or cupy
phi (callable): Feature extractor applied to observations
Return:
the object which will be given as input to the model.
"""
- if xp is chainer.cuda.cupy:
+ if chainer.cuda.available and xp is chainer.cuda.cupy:
# GPU
device = chainer.cuda.Device().id
else:
# CPU
device = -1
features = [phi(s) for s in states]
return chainer.dataset.concat_examples(features, device=device)
|
9568efceab48f87ed8302ec4f9bad4b15aac4c5a
|
tests/test_action.py
|
tests/test_action.py
|
import smtplib
import unittest
from unittest import mock
from action import PrintAction, EmailAction
@mock.patch("builtins.print")
class PrintActionTest(unittest.TestCase):
def test_executing_action_prints_message(self, mock_print):
action = PrintAction()
action.execute("GOOG > $10")
mock_print.assert_called_with("GOOG > $10")
@mock.patch("smtplib.SMTP")
class EmailActionTest(unittest.TestCase):
def setUp(self):
self.action = EmailAction(to="[email protected]")
def test_email_is_sent_to_the_right_server(self, mock_smtp_class):
self.action.execute("MSFT has crossed $10 price level")
mock_smtp_class.assert_called_with("email.stocks.com")
|
import smtplib
import unittest
from unittest import mock
from action import PrintAction, EmailAction
@mock.patch("builtins.print")
class PrintActionTest(unittest.TestCase):
def test_executing_action_prints_message(self, mock_print):
action = PrintAction()
action.execute("GOOG > $10")
mock_print.assert_called_with("GOOG > $10")
@mock.patch("smtplib.SMTP")
class EmailActionTest(unittest.TestCase):
def setUp(self):
self.action = EmailAction(to="[email protected]")
def test_email_is_sent_to_the_right_server(self, mock_smtp_class):
self.action.execute("MSFT has crossed $10 price level")
mock_smtp_class.assert_called_with("email.stocks.com")
def test_connection_closed_after_sending_mail(self, mock_smtp_class):
mock_smtp = mock_smtp_class.return_value
self.action.execute("MSFT has crossed $10 price level")
mock_smtp.send_message.assert_called_with(mock.ANY)
self.assertTrue(mock_smtp.quit.called)
mock_smtp.assert_has_calls([
mock.call.send_message(mock.ANY),
mock.call.quit()
])
|
Add test to check if connection is closed after email is sent.
|
Add test to check if connection is closed after email is sent.
|
Python
|
mit
|
bsmukasa/stock_alerter
|
import smtplib
import unittest
from unittest import mock
from action import PrintAction, EmailAction
@mock.patch("builtins.print")
class PrintActionTest(unittest.TestCase):
def test_executing_action_prints_message(self, mock_print):
action = PrintAction()
action.execute("GOOG > $10")
mock_print.assert_called_with("GOOG > $10")
@mock.patch("smtplib.SMTP")
class EmailActionTest(unittest.TestCase):
def setUp(self):
self.action = EmailAction(to="[email protected]")
def test_email_is_sent_to_the_right_server(self, mock_smtp_class):
self.action.execute("MSFT has crossed $10 price level")
mock_smtp_class.assert_called_with("email.stocks.com")
+ def test_connection_closed_after_sending_mail(self, mock_smtp_class):
+ mock_smtp = mock_smtp_class.return_value
+ self.action.execute("MSFT has crossed $10 price level")
+ mock_smtp.send_message.assert_called_with(mock.ANY)
+ self.assertTrue(mock_smtp.quit.called)
+ mock_smtp.assert_has_calls([
+ mock.call.send_message(mock.ANY),
+ mock.call.quit()
+ ])
+
|
Add test to check if connection is closed after email is sent.
|
## Code Before:
import smtplib
import unittest
from unittest import mock
from action import PrintAction, EmailAction
@mock.patch("builtins.print")
class PrintActionTest(unittest.TestCase):
def test_executing_action_prints_message(self, mock_print):
action = PrintAction()
action.execute("GOOG > $10")
mock_print.assert_called_with("GOOG > $10")
@mock.patch("smtplib.SMTP")
class EmailActionTest(unittest.TestCase):
def setUp(self):
self.action = EmailAction(to="[email protected]")
def test_email_is_sent_to_the_right_server(self, mock_smtp_class):
self.action.execute("MSFT has crossed $10 price level")
mock_smtp_class.assert_called_with("email.stocks.com")
## Instruction:
Add test to check if connection is closed after email is sent.
## Code After:
import smtplib
import unittest
from unittest import mock
from action import PrintAction, EmailAction
@mock.patch("builtins.print")
class PrintActionTest(unittest.TestCase):
def test_executing_action_prints_message(self, mock_print):
action = PrintAction()
action.execute("GOOG > $10")
mock_print.assert_called_with("GOOG > $10")
@mock.patch("smtplib.SMTP")
class EmailActionTest(unittest.TestCase):
def setUp(self):
self.action = EmailAction(to="[email protected]")
def test_email_is_sent_to_the_right_server(self, mock_smtp_class):
self.action.execute("MSFT has crossed $10 price level")
mock_smtp_class.assert_called_with("email.stocks.com")
def test_connection_closed_after_sending_mail(self, mock_smtp_class):
mock_smtp = mock_smtp_class.return_value
self.action.execute("MSFT has crossed $10 price level")
mock_smtp.send_message.assert_called_with(mock.ANY)
self.assertTrue(mock_smtp.quit.called)
mock_smtp.assert_has_calls([
mock.call.send_message(mock.ANY),
mock.call.quit()
])
|
import smtplib
import unittest
from unittest import mock
from action import PrintAction, EmailAction
@mock.patch("builtins.print")
class PrintActionTest(unittest.TestCase):
def test_executing_action_prints_message(self, mock_print):
action = PrintAction()
action.execute("GOOG > $10")
mock_print.assert_called_with("GOOG > $10")
@mock.patch("smtplib.SMTP")
class EmailActionTest(unittest.TestCase):
def setUp(self):
self.action = EmailAction(to="[email protected]")
def test_email_is_sent_to_the_right_server(self, mock_smtp_class):
self.action.execute("MSFT has crossed $10 price level")
mock_smtp_class.assert_called_with("email.stocks.com")
+
+ def test_connection_closed_after_sending_mail(self, mock_smtp_class):
+ mock_smtp = mock_smtp_class.return_value
+ self.action.execute("MSFT has crossed $10 price level")
+ mock_smtp.send_message.assert_called_with(mock.ANY)
+ self.assertTrue(mock_smtp.quit.called)
+ mock_smtp.assert_has_calls([
+ mock.call.send_message(mock.ANY),
+ mock.call.quit()
+ ])
|
f4851040b74a0c88980a1e82a8b518bd6147f508
|
FF4P/Abilities.py
|
FF4P/Abilities.py
|
import csv
abilityList = {}
def loadAbilities():
global abilityList
with open('FF4/FF4Abil.csv', 'r') as csvFile:
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return none
|
import os
import csv
abilityList = {}
def loadAbilities():
global abilityList
fileName = "FF4P/FF4P_Abil.csv"
if not os.path.exists(fileName):
fileName = "FF4P_Abil.csv"
with open(fileName, 'r') as csvFile:
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return none
|
Fix Filename Errors Module folder had changed at some point in the past, fixed the file path so it could find the CSV
|
Fix Filename Errors
Module folder had changed at some point in the past, fixed the file
path so it could find the CSV
|
Python
|
mit
|
einSynd/PyIRC
|
+ import os
import csv
abilityList = {}
def loadAbilities():
global abilityList
+ fileName = "FF4P/FF4P_Abil.csv"
+ if not os.path.exists(fileName):
+ fileName = "FF4P_Abil.csv"
+
- with open('FF4/FF4Abil.csv', 'r') as csvFile:
+ with open(fileName, 'r') as csvFile:
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return none
|
Fix Filename Errors Module folder had changed at some point in the past, fixed the file path so it could find the CSV
|
## Code Before:
import csv
abilityList = {}
def loadAbilities():
global abilityList
with open('FF4/FF4Abil.csv', 'r') as csvFile:
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return none
## Instruction:
Fix Filename Errors Module folder had changed at some point in the past, fixed the file path so it could find the CSV
## Code After:
import os
import csv
abilityList = {}
def loadAbilities():
global abilityList
fileName = "FF4P/FF4P_Abil.csv"
if not os.path.exists(fileName):
fileName = "FF4P_Abil.csv"
with open(fileName, 'r') as csvFile:
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return none
|
+ import os
import csv
abilityList = {}
def loadAbilities():
global abilityList
+ fileName = "FF4P/FF4P_Abil.csv"
+ if not os.path.exists(fileName):
+ fileName = "FF4P_Abil.csv"
+
- with open('FF4/FF4Abil.csv', 'r') as csvFile:
? ^^^^^^^^^^ ^^^^^
+ with open(fileName, 'r') as csvFile:
? ^ ^^^^^
abilityReader = csv.reader(csvFile, delimiter=',', quotechar='|')
i = 0
for row in abilityReader:
abilityList[i] = row
i += 1
def reloadAbilities():
loadAbilities()
print("Abilities reloaded.")
def getAbility(name):
if abilityList == {}:
loadAbilities()
none = ["none"]
for _,ability in abilityList.items():
if ability[0].lower() == name.lower():
return ability
return none
|
6ce6f22837b9e6a1dc8423038b6e2eb3d0a8de89
|
rxet/helper.py
|
rxet/helper.py
|
from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]
|
from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]
# read int as a big endian number
def read_uint32_BE(fileobj):
return unpack(">I", fileobj.read(4))[0]
|
Add big endian integer reading
|
Add big endian integer reading
|
Python
|
mit
|
RenolY2/battalion-tools
|
from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]
+
+ # read int as a big endian number
+ def read_uint32_BE(fileobj):
+ return unpack(">I", fileobj.read(4))[0]
|
Add big endian integer reading
|
## Code Before:
from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]
## Instruction:
Add big endian integer reading
## Code After:
from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]
# read int as a big endian number
def read_uint32_BE(fileobj):
return unpack(">I", fileobj.read(4))[0]
|
from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]
+
+ # read int as a big endian number
+ def read_uint32_BE(fileobj):
+ return unpack(">I", fileobj.read(4))[0]
|
29dbdd805eb401da5a46ff26d759f249650bedeb
|
src/enru.py
|
src/enru.py
|
import urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
# TODO: throw error if there's no word
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
|
import urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
|
Remove unneeded TODO Click takes care of arguments actually
|
Remove unneeded TODO
Click takes care of arguments actually
|
Python
|
mit
|
everyonesdesign/enru,everyonesdesign/enru-python,everyonesdesign/enru-python,everyonesdesign/enru
|
import urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
- # TODO: throw error if there's no word
-
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
|
Remove unneeded TODO Click takes care of arguments actually
|
## Code Before:
import urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
# TODO: throw error if there's no word
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
## Instruction:
Remove unneeded TODO Click takes care of arguments actually
## Code After:
import urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
|
import urllib
from bs4 import BeautifulSoup
class Enru:
def __init__(self, parser):
self.parser = parser
def run(self, word, show_examples):
- # TODO: throw error if there's no word
-
url = self.get_url(word)
markup = self.fetch(url)
content = self.parse(markup, show_examples)
return content
def fetch(self, url):
return urllib.urlopen(url)
def parse(self, markup, show_examples):
soup = BeautifulSoup(markup, "lxml")
return self.parser.get_content(soup, show_examples)
def get_url(self, word):
return self.parser.get_url(word)
|
4c60e42af4b37c260e2a9f00eb82dbd44ee53799
|
__init__.py
|
__init__.py
|
__all__ = ['effects',
'emitter',
'entity',
'gameloop',
'mixin',
'music',
'point',
'quadtree',
'sound',
'spritesheet',
'text',
'tiledimage',
'tilemap',
'tween',
'tweenfunc',
'util',
'world',
'Game', 'Constants', 'Point', 'Vector', 'GameLoop', 'World']
# convenience imports
import entity, gameloop, util, world, mixin, music, point, sound, text, \
tiledimage, tilemap, tween, tweenfunc, emitter, effects
from gameloop import Game, GameLoop
from world import World
from point import Point, Vector
from entity import Image, Entity
Constants = Game.Constants
"""A number of useful constants, such as keycodes, event types, and display flags."""
|
__all__ = ['effects',
'emitter',
'entity',
'gameloop',
'mixin',
'music',
'point',
'quadtree',
'sound',
'spritesheet',
'text',
'tiledimage',
'tilemap',
'tween',
'tweenfunc',
'util',
'world',
'Game', 'Constants', 'Point', 'Vector',
'GameLoop', 'World', 'Image', 'Entity']
# convenience imports
import entity, gameloop, util, world, mixin, music, point, sound, text, \
tiledimage, tilemap, tween, tweenfunc, emitter, effects
from gameloop import Game, GameLoop
from world import World
from point import Point, Vector
from entity import Image, Entity
Constants = Game.Constants
"""A number of useful constants, such as keycodes, event types, and display flags."""
|
Put Image and Entity into __all__
|
Put Image and Entity into __all__
|
Python
|
lgpl-2.1
|
momikey/pyrge
|
__all__ = ['effects',
'emitter',
'entity',
'gameloop',
'mixin',
'music',
'point',
'quadtree',
'sound',
'spritesheet',
'text',
'tiledimage',
'tilemap',
'tween',
'tweenfunc',
'util',
'world',
- 'Game', 'Constants', 'Point', 'Vector', 'GameLoop', 'World']
+ 'Game', 'Constants', 'Point', 'Vector',
+ 'GameLoop', 'World', 'Image', 'Entity']
# convenience imports
import entity, gameloop, util, world, mixin, music, point, sound, text, \
tiledimage, tilemap, tween, tweenfunc, emitter, effects
from gameloop import Game, GameLoop
from world import World
from point import Point, Vector
from entity import Image, Entity
Constants = Game.Constants
"""A number of useful constants, such as keycodes, event types, and display flags."""
|
Put Image and Entity into __all__
|
## Code Before:
__all__ = ['effects',
'emitter',
'entity',
'gameloop',
'mixin',
'music',
'point',
'quadtree',
'sound',
'spritesheet',
'text',
'tiledimage',
'tilemap',
'tween',
'tweenfunc',
'util',
'world',
'Game', 'Constants', 'Point', 'Vector', 'GameLoop', 'World']
# convenience imports
import entity, gameloop, util, world, mixin, music, point, sound, text, \
tiledimage, tilemap, tween, tweenfunc, emitter, effects
from gameloop import Game, GameLoop
from world import World
from point import Point, Vector
from entity import Image, Entity
Constants = Game.Constants
"""A number of useful constants, such as keycodes, event types, and display flags."""
## Instruction:
Put Image and Entity into __all__
## Code After:
__all__ = ['effects',
'emitter',
'entity',
'gameloop',
'mixin',
'music',
'point',
'quadtree',
'sound',
'spritesheet',
'text',
'tiledimage',
'tilemap',
'tween',
'tweenfunc',
'util',
'world',
'Game', 'Constants', 'Point', 'Vector',
'GameLoop', 'World', 'Image', 'Entity']
# convenience imports
import entity, gameloop, util, world, mixin, music, point, sound, text, \
tiledimage, tilemap, tween, tweenfunc, emitter, effects
from gameloop import Game, GameLoop
from world import World
from point import Point, Vector
from entity import Image, Entity
Constants = Game.Constants
"""A number of useful constants, such as keycodes, event types, and display flags."""
|
__all__ = ['effects',
'emitter',
'entity',
'gameloop',
'mixin',
'music',
'point',
'quadtree',
'sound',
'spritesheet',
'text',
'tiledimage',
'tilemap',
'tween',
'tweenfunc',
'util',
'world',
- 'Game', 'Constants', 'Point', 'Vector', 'GameLoop', 'World']
? ---------------------
+ 'Game', 'Constants', 'Point', 'Vector',
+ 'GameLoop', 'World', 'Image', 'Entity']
# convenience imports
import entity, gameloop, util, world, mixin, music, point, sound, text, \
tiledimage, tilemap, tween, tweenfunc, emitter, effects
from gameloop import Game, GameLoop
from world import World
from point import Point, Vector
from entity import Image, Entity
Constants = Game.Constants
"""A number of useful constants, such as keycodes, event types, and display flags."""
|
4b7b2727a35cfcb0117b0ba4571da9a0ea81824a
|
greenmine/base/routers.py
|
greenmine/base/routers.py
|
from rest_framework import routers
# Special router for actions.
actions_router = routers.Route(url=r'^{prefix}/{methodname}{trailing_slash}$',
mapping={'{httpmethod}': '{methodname}'},
name='{basename}-{methodnamehyphen}',
initkwargs={})
class DefaultRouter(routers.DefaultRouter):
routes = [
routers.DefaultRouter.routes[0],
actions_router,
routers.DefaultRouter.routes[2],
routers.DefaultRouter.routes[1]
]
__all__ = ["DefaultRouter"]
|
from rest_framework import routers
class DefaultRouter(routers.DefaultRouter):
pass
__all__ = ["DefaultRouter"]
|
Remove old reimplementation of routes.
|
Remove old reimplementation of routes.
|
Python
|
agpl-3.0
|
joshisa/taiga-back,joshisa/taiga-back,gam-phon/taiga-back,bdang2012/taiga-back-casting,jeffdwyatt/taiga-back,rajiteh/taiga-back,crr0004/taiga-back,crr0004/taiga-back,EvgeneOskin/taiga-back,dayatz/taiga-back,Rademade/taiga-back,joshisa/taiga-back,Rademade/taiga-back,astronaut1712/taiga-back,forging2012/taiga-back,coopsource/taiga-back,dycodedev/taiga-back,frt-arch/taiga-back,astagi/taiga-back,Rademade/taiga-back,astronaut1712/taiga-back,astagi/taiga-back,seanchen/taiga-back,Tigerwhit4/taiga-back,seanchen/taiga-back,dayatz/taiga-back,taigaio/taiga-back,Tigerwhit4/taiga-back,gauravjns/taiga-back,jeffdwyatt/taiga-back,EvgeneOskin/taiga-back,joshisa/taiga-back,forging2012/taiga-back,Zaneh-/bearded-tribble-back,obimod/taiga-back,forging2012/taiga-back,Tigerwhit4/taiga-back,gauravjns/taiga-back,gam-phon/taiga-back,taigaio/taiga-back,obimod/taiga-back,astronaut1712/taiga-back,rajiteh/taiga-back,gauravjns/taiga-back,gam-phon/taiga-back,coopsource/taiga-back,WALR/taiga-back,Zaneh-/bearded-tribble-back,Zaneh-/bearded-tribble-back,jeffdwyatt/taiga-back,jeffdwyatt/taiga-back,xdevelsistemas/taiga-back-community,gauravjns/taiga-back,astagi/taiga-back,CoolCloud/taiga-back,obimod/taiga-back,bdang2012/taiga-back-casting,CoolCloud/taiga-back,seanchen/taiga-back,Rademade/taiga-back,Tigerwhit4/taiga-back,coopsource/taiga-back,dycodedev/taiga-back,crr0004/taiga-back,rajiteh/taiga-back,CMLL/taiga-back,CMLL/taiga-back,xdevelsistemas/taiga-back-community,CoolCloud/taiga-back,CMLL/taiga-back,dycodedev/taiga-back,WALR/taiga-back,Rademade/taiga-back,bdang2012/taiga-back-casting,obimod/taiga-back,WALR/taiga-back,xdevelsistemas/taiga-back-community,rajiteh/taiga-back,dayatz/taiga-back,astronaut1712/taiga-back,WALR/taiga-back,seanchen/taiga-back,19kestier/taiga-back,frt-arch/taiga-back,CMLL/taiga-back,EvgeneOskin/taiga-back,19kestier/taiga-back,bdang2012/taiga-back-casting,forging2012/taiga-back,astagi/taiga-back,crr0004/taiga-back,coopsource/taiga-back,frt-arch/taiga-back,19kestier/taiga-back,EvgeneOskin/taiga-back,dycodedev/taiga-back,CoolCloud/taiga-back,taigaio/taiga-back,gam-phon/taiga-back
|
from rest_framework import routers
- # Special router for actions.
- actions_router = routers.Route(url=r'^{prefix}/{methodname}{trailing_slash}$',
- mapping={'{httpmethod}': '{methodname}'},
- name='{basename}-{methodnamehyphen}',
- initkwargs={})
-
class DefaultRouter(routers.DefaultRouter):
+ pass
- routes = [
- routers.DefaultRouter.routes[0],
- actions_router,
- routers.DefaultRouter.routes[2],
- routers.DefaultRouter.routes[1]
- ]
__all__ = ["DefaultRouter"]
|
Remove old reimplementation of routes.
|
## Code Before:
from rest_framework import routers
# Special router for actions.
actions_router = routers.Route(url=r'^{prefix}/{methodname}{trailing_slash}$',
mapping={'{httpmethod}': '{methodname}'},
name='{basename}-{methodnamehyphen}',
initkwargs={})
class DefaultRouter(routers.DefaultRouter):
routes = [
routers.DefaultRouter.routes[0],
actions_router,
routers.DefaultRouter.routes[2],
routers.DefaultRouter.routes[1]
]
__all__ = ["DefaultRouter"]
## Instruction:
Remove old reimplementation of routes.
## Code After:
from rest_framework import routers
class DefaultRouter(routers.DefaultRouter):
pass
__all__ = ["DefaultRouter"]
|
from rest_framework import routers
- # Special router for actions.
- actions_router = routers.Route(url=r'^{prefix}/{methodname}{trailing_slash}$',
- mapping={'{httpmethod}': '{methodname}'},
- name='{basename}-{methodnamehyphen}',
- initkwargs={})
-
class DefaultRouter(routers.DefaultRouter):
+ pass
- routes = [
- routers.DefaultRouter.routes[0],
- actions_router,
- routers.DefaultRouter.routes[2],
- routers.DefaultRouter.routes[1]
- ]
__all__ = ["DefaultRouter"]
|
be70b1528f51385c8221b7337cdc8669f53fa1c6
|
textblob/decorators.py
|
textblob/decorators.py
|
'''Custom decorators.'''
from __future__ import absolute_import
from textblob.exceptions import MissingCorpusException
class cached_property(object):
'''A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
Credit to Marcel Hellkamp, author of bottle.py.
'''
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None:
return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
def requires_nltk_corpus(func):
'''Wraps a function that requires an NLTK corpus. If the corpus isn't found,
raise a MissingCorpusException.
'''
def decorated(*args, **kwargs):
try:
return func(*args, **kwargs)
except LookupError as err:
print(err)
raise MissingCorpusException()
return decorated
|
'''Custom decorators.'''
from __future__ import absolute_import
from functools import wraps
from textblob.exceptions import MissingCorpusException
class cached_property(object):
'''A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
Credit to Marcel Hellkamp, author of bottle.py.
'''
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None:
return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
def requires_nltk_corpus(func):
'''Wraps a function that requires an NLTK corpus. If the corpus isn't found,
raise a MissingCorpusException.
'''
@wraps(func)
def decorated(*args, **kwargs):
try:
return func(*args, **kwargs)
except LookupError as err:
print(err)
raise MissingCorpusException()
return decorated
|
Use wraps decorator for requires_nltk_corpus
|
Use wraps decorator for requires_nltk_corpus
|
Python
|
mit
|
jcalbert/TextBlob,freakynit/TextBlob,nvoron23/TextBlob,IrisSteenhout/TextBlob,adelq/TextBlob,beni55/TextBlob,jonmcoe/TextBlob,dipeshtech/TextBlob,sargam111/python,sloria/TextBlob,Windy-Ground/TextBlob,laugustyniak/TextBlob
|
'''Custom decorators.'''
from __future__ import absolute_import
+ from functools import wraps
from textblob.exceptions import MissingCorpusException
class cached_property(object):
'''A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
Credit to Marcel Hellkamp, author of bottle.py.
'''
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None:
return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
def requires_nltk_corpus(func):
'''Wraps a function that requires an NLTK corpus. If the corpus isn't found,
raise a MissingCorpusException.
'''
+ @wraps(func)
def decorated(*args, **kwargs):
try:
return func(*args, **kwargs)
except LookupError as err:
print(err)
raise MissingCorpusException()
return decorated
|
Use wraps decorator for requires_nltk_corpus
|
## Code Before:
'''Custom decorators.'''
from __future__ import absolute_import
from textblob.exceptions import MissingCorpusException
class cached_property(object):
'''A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
Credit to Marcel Hellkamp, author of bottle.py.
'''
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None:
return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
def requires_nltk_corpus(func):
'''Wraps a function that requires an NLTK corpus. If the corpus isn't found,
raise a MissingCorpusException.
'''
def decorated(*args, **kwargs):
try:
return func(*args, **kwargs)
except LookupError as err:
print(err)
raise MissingCorpusException()
return decorated
## Instruction:
Use wraps decorator for requires_nltk_corpus
## Code After:
'''Custom decorators.'''
from __future__ import absolute_import
from functools import wraps
from textblob.exceptions import MissingCorpusException
class cached_property(object):
'''A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
Credit to Marcel Hellkamp, author of bottle.py.
'''
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None:
return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
def requires_nltk_corpus(func):
'''Wraps a function that requires an NLTK corpus. If the corpus isn't found,
raise a MissingCorpusException.
'''
@wraps(func)
def decorated(*args, **kwargs):
try:
return func(*args, **kwargs)
except LookupError as err:
print(err)
raise MissingCorpusException()
return decorated
|
'''Custom decorators.'''
from __future__ import absolute_import
+ from functools import wraps
from textblob.exceptions import MissingCorpusException
class cached_property(object):
'''A property that is only computed once per instance and then replaces
itself with an ordinary attribute. Deleting the attribute resets the
property.
Credit to Marcel Hellkamp, author of bottle.py.
'''
def __init__(self, func):
self.__doc__ = getattr(func, '__doc__')
self.func = func
def __get__(self, obj, cls):
if obj is None:
return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
def requires_nltk_corpus(func):
'''Wraps a function that requires an NLTK corpus. If the corpus isn't found,
raise a MissingCorpusException.
'''
+ @wraps(func)
def decorated(*args, **kwargs):
try:
return func(*args, **kwargs)
except LookupError as err:
print(err)
raise MissingCorpusException()
return decorated
|
e386b013b4c0124c623bd99dcb1a1d01b6e6bd86
|
supriya/__init__.py
|
supriya/__init__.py
|
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction, Buffer, BufferGroup, Bus, BusGroup, Group, Server, Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat, SampleFormat, SoundFile,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate, DoneAction, Range, SynthDef, SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets, SupriyaConfiguration,
)
from abjad.tools.topleveltools import ( # noqa
graph, new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
|
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction,
Buffer,
BufferGroup,
Bus,
BusGroup,
Group,
Server,
Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat,
SampleFormat,
SoundFile,
play,
render,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate,
DoneAction,
Envelope,
Range,
SynthDef,
SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets,
Profiler,
SupriyaConfiguration,
)
from supriya.tools.wrappertools import ( # noqa
Say,
)
from abjad.tools.topleveltools import ( # noqa
graph,
new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
|
Add play, render and Say to toplevel namespace.
|
Add play, render and Say to toplevel namespace.
|
Python
|
mit
|
Pulgama/supriya,Pulgama/supriya,Pulgama/supriya,josiah-wolf-oberholtzer/supriya,Pulgama/supriya
|
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
- AddAction, Buffer, BufferGroup, Bus, BusGroup, Group, Server, Synth,
+ AddAction,
+ Buffer,
+ BufferGroup,
+ Bus,
+ BusGroup,
+ Group,
+ Server,
+ Synth,
)
from supriya.tools.soundfiletools import ( # noqa
- HeaderFormat, SampleFormat, SoundFile,
+ HeaderFormat,
+ SampleFormat,
+ SoundFile,
+ play,
+ render,
)
from supriya.tools.synthdeftools import ( # noqa
- CalculationRate, DoneAction, Range, SynthDef, SynthDefBuilder,
+ CalculationRate,
+ DoneAction,
+ Envelope,
+ Range,
+ SynthDef,
+ SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
+ Assets,
+ Profiler,
- Assets, SupriyaConfiguration,
+ SupriyaConfiguration,
+ )
+ from supriya.tools.wrappertools import ( # noqa
+ Say,
)
from abjad.tools.topleveltools import ( # noqa
- graph, new,
+ graph,
+ new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
|
Add play, render and Say to toplevel namespace.
|
## Code Before:
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction, Buffer, BufferGroup, Bus, BusGroup, Group, Server, Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat, SampleFormat, SoundFile,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate, DoneAction, Range, SynthDef, SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets, SupriyaConfiguration,
)
from abjad.tools.topleveltools import ( # noqa
graph, new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
## Instruction:
Add play, render and Say to toplevel namespace.
## Code After:
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction,
Buffer,
BufferGroup,
Bus,
BusGroup,
Group,
Server,
Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat,
SampleFormat,
SoundFile,
play,
render,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate,
DoneAction,
Envelope,
Range,
SynthDef,
SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets,
Profiler,
SupriyaConfiguration,
)
from supriya.tools.wrappertools import ( # noqa
Say,
)
from abjad.tools.topleveltools import ( # noqa
graph,
new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
|
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
- AddAction, Buffer, BufferGroup, Bus, BusGroup, Group, Server, Synth,
+ AddAction,
+ Buffer,
+ BufferGroup,
+ Bus,
+ BusGroup,
+ Group,
+ Server,
+ Synth,
)
from supriya.tools.soundfiletools import ( # noqa
- HeaderFormat, SampleFormat, SoundFile,
+ HeaderFormat,
+ SampleFormat,
+ SoundFile,
+ play,
+ render,
)
from supriya.tools.synthdeftools import ( # noqa
- CalculationRate, DoneAction, Range, SynthDef, SynthDefBuilder,
+ CalculationRate,
+ DoneAction,
+ Envelope,
+ Range,
+ SynthDef,
+ SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
+ Assets,
+ Profiler,
- Assets, SupriyaConfiguration,
? --------
+ SupriyaConfiguration,
+ )
+ from supriya.tools.wrappertools import ( # noqa
+ Say,
)
from abjad.tools.topleveltools import ( # noqa
- graph, new,
? -----
+ graph,
+ new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
|
a99378deee9a802bf107d11e79d2df2f77481495
|
silver/tests/spec/test_plan.py
|
silver/tests/spec/test_plan.py
|
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
response = self.client.put('/api/plans', json.dumps({
'name': 'Hydrogen',
'interval': 'month',
'interval_count': 1,
'amount': 150,
'currency': 'USD',
'trial_period_days': 15,
'metered_features': [
{
'name': '100k PageViews',
'price_per_unit': 10,
'included_units': 5
}
],
'due_days': 10,
'generate_after': 86400
}), content_type='application/json')
plan = Plan.objects.filter(name='Hydrogen')
self.assertEqual(plan.count(), 1)
self.assertEqual(response.status_code, 201)
|
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
assert True
# response = self.client.put('/api/plans', json.dumps({
# 'name': 'Hydrogen',
# 'interval': 'month',
# 'interval_count': 1,
# 'amount': 150,
# 'currency': 'USD',
# 'trial_period_days': 15,
# 'metered_features': [
# {
# 'name': '100k PageViews',
# 'price_per_unit': 10,
# 'included_units': 5
# }
# ],
# 'due_days': 10,
# 'generate_after': 86400
# }), content_type='application/json')
# plan = Plan.objects.filter(name='Hydrogen')
# self.assertEqual(plan.count(), 1)
# self.assertEqual(response.status_code, 201)
|
Comment out the failing Plan test
|
Comment out the failing Plan test
|
Python
|
apache-2.0
|
PressLabs/silver,PressLabs/silver,PressLabs/silver
|
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
+ assert True
- response = self.client.put('/api/plans', json.dumps({
+ # response = self.client.put('/api/plans', json.dumps({
- 'name': 'Hydrogen',
+ # 'name': 'Hydrogen',
- 'interval': 'month',
+ # 'interval': 'month',
- 'interval_count': 1,
+ # 'interval_count': 1,
- 'amount': 150,
+ # 'amount': 150,
- 'currency': 'USD',
+ # 'currency': 'USD',
- 'trial_period_days': 15,
+ # 'trial_period_days': 15,
- 'metered_features': [
+ # 'metered_features': [
- {
+ # {
- 'name': '100k PageViews',
+ # 'name': '100k PageViews',
- 'price_per_unit': 10,
+ # 'price_per_unit': 10,
- 'included_units': 5
+ # 'included_units': 5
- }
+ # }
- ],
+ # ],
- 'due_days': 10,
+ # 'due_days': 10,
- 'generate_after': 86400
+ # 'generate_after': 86400
- }), content_type='application/json')
+ # }), content_type='application/json')
- plan = Plan.objects.filter(name='Hydrogen')
+ # plan = Plan.objects.filter(name='Hydrogen')
- self.assertEqual(plan.count(), 1)
+ # self.assertEqual(plan.count(), 1)
- self.assertEqual(response.status_code, 201)
+ # self.assertEqual(response.status_code, 201)
|
Comment out the failing Plan test
|
## Code Before:
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
response = self.client.put('/api/plans', json.dumps({
'name': 'Hydrogen',
'interval': 'month',
'interval_count': 1,
'amount': 150,
'currency': 'USD',
'trial_period_days': 15,
'metered_features': [
{
'name': '100k PageViews',
'price_per_unit': 10,
'included_units': 5
}
],
'due_days': 10,
'generate_after': 86400
}), content_type='application/json')
plan = Plan.objects.filter(name='Hydrogen')
self.assertEqual(plan.count(), 1)
self.assertEqual(response.status_code, 201)
## Instruction:
Comment out the failing Plan test
## Code After:
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
assert True
# response = self.client.put('/api/plans', json.dumps({
# 'name': 'Hydrogen',
# 'interval': 'month',
# 'interval_count': 1,
# 'amount': 150,
# 'currency': 'USD',
# 'trial_period_days': 15,
# 'metered_features': [
# {
# 'name': '100k PageViews',
# 'price_per_unit': 10,
# 'included_units': 5
# }
# ],
# 'due_days': 10,
# 'generate_after': 86400
# }), content_type='application/json')
# plan = Plan.objects.filter(name='Hydrogen')
# self.assertEqual(plan.count(), 1)
# self.assertEqual(response.status_code, 201)
|
import json
from silver.models import Plan
from django.test.client import Client
from django.test import TestCase
class PlansSpecificationTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_create_plan(self):
+ assert True
- response = self.client.put('/api/plans', json.dumps({
+ # response = self.client.put('/api/plans', json.dumps({
? ++
- 'name': 'Hydrogen',
+ # 'name': 'Hydrogen',
? ++
- 'interval': 'month',
+ # 'interval': 'month',
? ++
- 'interval_count': 1,
+ # 'interval_count': 1,
? ++
- 'amount': 150,
+ # 'amount': 150,
? ++
- 'currency': 'USD',
+ # 'currency': 'USD',
? ++
- 'trial_period_days': 15,
+ # 'trial_period_days': 15,
? ++
- 'metered_features': [
+ # 'metered_features': [
? ++
- {
+ # {
? ++
- 'name': '100k PageViews',
+ # 'name': '100k PageViews',
? ++
- 'price_per_unit': 10,
+ # 'price_per_unit': 10,
? ++
- 'included_units': 5
+ # 'included_units': 5
? ++
- }
+ # }
? ++
- ],
+ # ],
? ++
- 'due_days': 10,
+ # 'due_days': 10,
? ++
- 'generate_after': 86400
+ # 'generate_after': 86400
? ++
- }), content_type='application/json')
+ # }), content_type='application/json')
? ++
- plan = Plan.objects.filter(name='Hydrogen')
+ # plan = Plan.objects.filter(name='Hydrogen')
? ++
- self.assertEqual(plan.count(), 1)
+ # self.assertEqual(plan.count(), 1)
? ++
- self.assertEqual(response.status_code, 201)
+ # self.assertEqual(response.status_code, 201)
? ++
|
3a5dc4332e7f13119563e2190e6ef7d66b464054
|
tests/test_utils.py
|
tests/test_utils.py
|
from geomdl import utilities
def test_autogen_knotvector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
def test_check_knotvector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
assert check_result == True
|
from geomdl import utilities
def test_autogen_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
def test_check_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
assert check_result
def test_normalize_knot_vector():
input_kv = (-5, -5, -3, -2, 2, 3, 5, 5)
output_kv = [0.0, 0.0, 0.2, 0.3, 0.7, 0.8, 1.0, 1.0]
to_check = utilities.normalize_knot_vector(input_kv)
assert to_check == output_kv
|
Add knot vector normalization test
|
Add knot vector normalization test
|
Python
|
mit
|
orbingol/NURBS-Python,orbingol/NURBS-Python
|
from geomdl import utilities
- def test_autogen_knotvector():
+ def test_autogen_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
- def test_check_knotvector():
+ def test_check_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
- assert check_result == True
+ assert check_result
+ def test_normalize_knot_vector():
+ input_kv = (-5, -5, -3, -2, 2, 3, 5, 5)
+ output_kv = [0.0, 0.0, 0.2, 0.3, 0.7, 0.8, 1.0, 1.0]
+ to_check = utilities.normalize_knot_vector(input_kv)
+ assert to_check == output_kv
+
|
Add knot vector normalization test
|
## Code Before:
from geomdl import utilities
def test_autogen_knotvector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
def test_check_knotvector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
assert check_result == True
## Instruction:
Add knot vector normalization test
## Code After:
from geomdl import utilities
def test_autogen_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
def test_check_knot_vector():
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
assert check_result
def test_normalize_knot_vector():
input_kv = (-5, -5, -3, -2, 2, 3, 5, 5)
output_kv = [0.0, 0.0, 0.2, 0.3, 0.7, 0.8, 1.0, 1.0]
to_check = utilities.normalize_knot_vector(input_kv)
assert to_check == output_kv
|
from geomdl import utilities
- def test_autogen_knotvector():
+ def test_autogen_knot_vector():
? +
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
result = [0.0, 0.0, 0.0, 0.0, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.0, 1.0, 1.0, 1.0]
assert autogen_kv == result
- def test_check_knotvector():
+ def test_check_knot_vector():
? +
degree = 4
num_ctrlpts = 12
autogen_kv = utilities.generate_knot_vector(degree, num_ctrlpts)
check_result = utilities.check_knot_vector(degree=degree, control_points_size=num_ctrlpts, knot_vector=autogen_kv)
- assert check_result == True
? --------
+ assert check_result
+
+ def test_normalize_knot_vector():
+ input_kv = (-5, -5, -3, -2, 2, 3, 5, 5)
+ output_kv = [0.0, 0.0, 0.2, 0.3, 0.7, 0.8, 1.0, 1.0]
+ to_check = utilities.normalize_knot_vector(input_kv)
+ assert to_check == output_kv
|
12f1024d559c300c7c04256362da78ec8d3a647b
|
data/models.py
|
data/models.py
|
from django.db import models
class DataPoint(models.Model):
name = models.CharField(max_length=600)
exact_name = models.CharField(max_length=1000, null=True, blank=True)
decay_feature = models.CharField(max_length=1000, null=True, blank=True)
options = models.CharField(max_length=100)
homo = models.FloatField()
lumo = models.FloatField()
homo_orbital = models.IntegerField()
energy = models.FloatField()
dipole = models.FloatField()
band_gap = models.FloatField(null=True, blank=True)
def __unicode__(self):
return self.exact_name
|
import numpy
import ast
from django.db import models
class DataPoint(models.Model):
name = models.CharField(max_length=600)
exact_name = models.CharField(max_length=1000, null=True, blank=True)
decay_feature = models.CharField(max_length=1000, null=True, blank=True)
options = models.CharField(max_length=100)
homo = models.FloatField()
lumo = models.FloatField()
homo_orbital = models.IntegerField()
energy = models.FloatField()
dipole = models.FloatField()
band_gap = models.FloatField(null=True, blank=True)
def __unicode__(self):
return self.exact_name
@classmethod
def get_data(cls):
data = DataPoint.objects.filter(band_gap__isnull=False,
exact_name__isnull=False,
decay_feature__isnull=False)
M = len(data)
HOMO = numpy.zeros((M, 1))
LUMO = numpy.zeros((M, 1))
GAP = numpy.zeros((M, 1))
vectors = []
for i, x in enumerate(data):
HOMO[i] = x.homo
LUMO[i] = x.lumo
GAP[i] = x.band_gap
vectors.append(ast.literal_eval(x.decay_feature))
FEATURE = numpy.matrix(vectors)
return FEATURE, HOMO, LUMO, GAP
|
Add method on DataPoint to get numpy matrices with all the ML data
|
Add method on DataPoint to get numpy matrices with all the ML data
|
Python
|
mit
|
crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp
|
+ import numpy
+ import ast
+
from django.db import models
class DataPoint(models.Model):
name = models.CharField(max_length=600)
exact_name = models.CharField(max_length=1000, null=True, blank=True)
decay_feature = models.CharField(max_length=1000, null=True, blank=True)
options = models.CharField(max_length=100)
homo = models.FloatField()
lumo = models.FloatField()
homo_orbital = models.IntegerField()
energy = models.FloatField()
dipole = models.FloatField()
band_gap = models.FloatField(null=True, blank=True)
def __unicode__(self):
return self.exact_name
+ @classmethod
+ def get_data(cls):
+ data = DataPoint.objects.filter(band_gap__isnull=False,
+ exact_name__isnull=False,
+ decay_feature__isnull=False)
+ M = len(data)
+ HOMO = numpy.zeros((M, 1))
+ LUMO = numpy.zeros((M, 1))
+ GAP = numpy.zeros((M, 1))
+ vectors = []
+ for i, x in enumerate(data):
+ HOMO[i] = x.homo
+ LUMO[i] = x.lumo
+ GAP[i] = x.band_gap
+ vectors.append(ast.literal_eval(x.decay_feature))
+ FEATURE = numpy.matrix(vectors)
+ return FEATURE, HOMO, LUMO, GAP
|
Add method on DataPoint to get numpy matrices with all the ML data
|
## Code Before:
from django.db import models
class DataPoint(models.Model):
name = models.CharField(max_length=600)
exact_name = models.CharField(max_length=1000, null=True, blank=True)
decay_feature = models.CharField(max_length=1000, null=True, blank=True)
options = models.CharField(max_length=100)
homo = models.FloatField()
lumo = models.FloatField()
homo_orbital = models.IntegerField()
energy = models.FloatField()
dipole = models.FloatField()
band_gap = models.FloatField(null=True, blank=True)
def __unicode__(self):
return self.exact_name
## Instruction:
Add method on DataPoint to get numpy matrices with all the ML data
## Code After:
import numpy
import ast
from django.db import models
class DataPoint(models.Model):
name = models.CharField(max_length=600)
exact_name = models.CharField(max_length=1000, null=True, blank=True)
decay_feature = models.CharField(max_length=1000, null=True, blank=True)
options = models.CharField(max_length=100)
homo = models.FloatField()
lumo = models.FloatField()
homo_orbital = models.IntegerField()
energy = models.FloatField()
dipole = models.FloatField()
band_gap = models.FloatField(null=True, blank=True)
def __unicode__(self):
return self.exact_name
@classmethod
def get_data(cls):
data = DataPoint.objects.filter(band_gap__isnull=False,
exact_name__isnull=False,
decay_feature__isnull=False)
M = len(data)
HOMO = numpy.zeros((M, 1))
LUMO = numpy.zeros((M, 1))
GAP = numpy.zeros((M, 1))
vectors = []
for i, x in enumerate(data):
HOMO[i] = x.homo
LUMO[i] = x.lumo
GAP[i] = x.band_gap
vectors.append(ast.literal_eval(x.decay_feature))
FEATURE = numpy.matrix(vectors)
return FEATURE, HOMO, LUMO, GAP
|
+ import numpy
+ import ast
+
from django.db import models
class DataPoint(models.Model):
name = models.CharField(max_length=600)
exact_name = models.CharField(max_length=1000, null=True, blank=True)
decay_feature = models.CharField(max_length=1000, null=True, blank=True)
options = models.CharField(max_length=100)
homo = models.FloatField()
lumo = models.FloatField()
homo_orbital = models.IntegerField()
energy = models.FloatField()
dipole = models.FloatField()
band_gap = models.FloatField(null=True, blank=True)
def __unicode__(self):
return self.exact_name
+
+ @classmethod
+ def get_data(cls):
+ data = DataPoint.objects.filter(band_gap__isnull=False,
+ exact_name__isnull=False,
+ decay_feature__isnull=False)
+ M = len(data)
+ HOMO = numpy.zeros((M, 1))
+ LUMO = numpy.zeros((M, 1))
+ GAP = numpy.zeros((M, 1))
+ vectors = []
+ for i, x in enumerate(data):
+ HOMO[i] = x.homo
+ LUMO[i] = x.lumo
+ GAP[i] = x.band_gap
+ vectors.append(ast.literal_eval(x.decay_feature))
+ FEATURE = numpy.matrix(vectors)
+ return FEATURE, HOMO, LUMO, GAP
|
3ea9a14cdc4e19595ae8b14667d86ae42ba3d58c
|
astropy/wcs/tests/extension/test_extension.py
|
astropy/wcs/tests/extension/test_extension.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import subprocess
import sys
def test_wcsapi_extension(tmpdir):
# Test that we can build a simple C extension with the astropy.wcs C API
setup_path = os.path.dirname(__file__)
env = os.environ.copy()
env['PYTHONPATH'] = str(tmpdir) + ':' + env.get('PYTHONPATH', '')
# Build the extension
subprocess.check_call(
[sys.executable, 'setup.py',
'install', '--install-lib={0}'.format(tmpdir)],
cwd=setup_path,
env=env
)
code = """
import sys
import wcsapi_test
sys.exit(wcsapi_test.test())
"""
code = code.strip().replace('\n', '; ')
# Import and run the extension
subprocess.check_call(
[sys.executable, '-c', code],
env=env)
|
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import subprocess
import sys
def test_wcsapi_extension(tmpdir):
# Test that we can build a simple C extension with the astropy.wcs C API
setup_path = os.path.dirname(__file__)
astropy_path = os.path.abspath(
os.path.join(setup_path, '..', '..', '..', '..'))
env = os.environ.copy()
paths = [str(tmpdir), astropy_path]
if env.get('PYTHONPATH'):
paths.append(env.get('PYTHONPATH'))
env['PYTHONPATH'] = ':'.join(paths)
# Build the extension
subprocess.check_call(
[sys.executable, 'setup.py',
'install', '--install-lib={0}'.format(tmpdir)],
cwd=setup_path,
env=env
)
code = """
import sys
import wcsapi_test
sys.exit(wcsapi_test.test())
"""
code = code.strip().replace('\n', '; ')
# Import and run the extension
subprocess.check_call(
[sys.executable, '-c', code],
env=env)
|
Make work when astropy isn't installed.
|
Make work when astropy isn't installed.
|
Python
|
bsd-3-clause
|
dhomeier/astropy,dhomeier/astropy,StuartLittlefair/astropy,joergdietrich/astropy,astropy/astropy,kelle/astropy,mhvk/astropy,stargaser/astropy,larrybradley/astropy,kelle/astropy,kelle/astropy,dhomeier/astropy,mhvk/astropy,joergdietrich/astropy,kelle/astropy,mhvk/astropy,astropy/astropy,saimn/astropy,MSeifert04/astropy,DougBurke/astropy,dhomeier/astropy,tbabej/astropy,DougBurke/astropy,aleksandr-bakanov/astropy,AustereCuriosity/astropy,stargaser/astropy,funbaker/astropy,bsipocz/astropy,astropy/astropy,saimn/astropy,stargaser/astropy,DougBurke/astropy,aleksandr-bakanov/astropy,DougBurke/astropy,lpsinger/astropy,larrybradley/astropy,funbaker/astropy,pllim/astropy,mhvk/astropy,joergdietrich/astropy,larrybradley/astropy,tbabej/astropy,bsipocz/astropy,mhvk/astropy,lpsinger/astropy,AustereCuriosity/astropy,AustereCuriosity/astropy,funbaker/astropy,astropy/astropy,joergdietrich/astropy,pllim/astropy,saimn/astropy,lpsinger/astropy,joergdietrich/astropy,kelle/astropy,tbabej/astropy,MSeifert04/astropy,pllim/astropy,funbaker/astropy,pllim/astropy,MSeifert04/astropy,MSeifert04/astropy,lpsinger/astropy,pllim/astropy,dhomeier/astropy,AustereCuriosity/astropy,astropy/astropy,stargaser/astropy,saimn/astropy,StuartLittlefair/astropy,StuartLittlefair/astropy,tbabej/astropy,larrybradley/astropy,aleksandr-bakanov/astropy,tbabej/astropy,saimn/astropy,StuartLittlefair/astropy,AustereCuriosity/astropy,bsipocz/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,larrybradley/astropy,lpsinger/astropy,bsipocz/astropy
|
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import subprocess
import sys
def test_wcsapi_extension(tmpdir):
# Test that we can build a simple C extension with the astropy.wcs C API
setup_path = os.path.dirname(__file__)
+ astropy_path = os.path.abspath(
+ os.path.join(setup_path, '..', '..', '..', '..'))
env = os.environ.copy()
- env['PYTHONPATH'] = str(tmpdir) + ':' + env.get('PYTHONPATH', '')
+ paths = [str(tmpdir), astropy_path]
+ if env.get('PYTHONPATH'):
+ paths.append(env.get('PYTHONPATH'))
+ env['PYTHONPATH'] = ':'.join(paths)
# Build the extension
subprocess.check_call(
[sys.executable, 'setup.py',
'install', '--install-lib={0}'.format(tmpdir)],
cwd=setup_path,
env=env
)
code = """
import sys
import wcsapi_test
sys.exit(wcsapi_test.test())
"""
code = code.strip().replace('\n', '; ')
# Import and run the extension
subprocess.check_call(
[sys.executable, '-c', code],
env=env)
|
Make work when astropy isn't installed.
|
## Code Before:
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import subprocess
import sys
def test_wcsapi_extension(tmpdir):
# Test that we can build a simple C extension with the astropy.wcs C API
setup_path = os.path.dirname(__file__)
env = os.environ.copy()
env['PYTHONPATH'] = str(tmpdir) + ':' + env.get('PYTHONPATH', '')
# Build the extension
subprocess.check_call(
[sys.executable, 'setup.py',
'install', '--install-lib={0}'.format(tmpdir)],
cwd=setup_path,
env=env
)
code = """
import sys
import wcsapi_test
sys.exit(wcsapi_test.test())
"""
code = code.strip().replace('\n', '; ')
# Import and run the extension
subprocess.check_call(
[sys.executable, '-c', code],
env=env)
## Instruction:
Make work when astropy isn't installed.
## Code After:
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import subprocess
import sys
def test_wcsapi_extension(tmpdir):
# Test that we can build a simple C extension with the astropy.wcs C API
setup_path = os.path.dirname(__file__)
astropy_path = os.path.abspath(
os.path.join(setup_path, '..', '..', '..', '..'))
env = os.environ.copy()
paths = [str(tmpdir), astropy_path]
if env.get('PYTHONPATH'):
paths.append(env.get('PYTHONPATH'))
env['PYTHONPATH'] = ':'.join(paths)
# Build the extension
subprocess.check_call(
[sys.executable, 'setup.py',
'install', '--install-lib={0}'.format(tmpdir)],
cwd=setup_path,
env=env
)
code = """
import sys
import wcsapi_test
sys.exit(wcsapi_test.test())
"""
code = code.strip().replace('\n', '; ')
# Import and run the extension
subprocess.check_call(
[sys.executable, '-c', code],
env=env)
|
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import subprocess
import sys
def test_wcsapi_extension(tmpdir):
# Test that we can build a simple C extension with the astropy.wcs C API
setup_path = os.path.dirname(__file__)
+ astropy_path = os.path.abspath(
+ os.path.join(setup_path, '..', '..', '..', '..'))
env = os.environ.copy()
- env['PYTHONPATH'] = str(tmpdir) + ':' + env.get('PYTHONPATH', '')
+ paths = [str(tmpdir), astropy_path]
+ if env.get('PYTHONPATH'):
+ paths.append(env.get('PYTHONPATH'))
+ env['PYTHONPATH'] = ':'.join(paths)
# Build the extension
subprocess.check_call(
[sys.executable, 'setup.py',
'install', '--install-lib={0}'.format(tmpdir)],
cwd=setup_path,
env=env
)
code = """
import sys
import wcsapi_test
sys.exit(wcsapi_test.test())
"""
code = code.strip().replace('\n', '; ')
# Import and run the extension
subprocess.check_call(
[sys.executable, '-c', code],
env=env)
|
a65eaeaef60492bfc6319fb9c810155d62c1a3b3
|
luigi/tasks/export/ftp/go_annotations.py
|
luigi/tasks/export/ftp/go_annotations.py
|
import luigi
from tasks.config import db
from tasks.config import export
from tasks.utils.files import atomic_output
class GoAnnotation(luigi.Task):
def output(self):
return luigi.LocalTarget(export().go('rnacentral_annotations.tsv'))
def run(self):
with atomic_output(self.output()) as out:
export(db(), out)
|
import luigi
from tasks.config import db
from tasks.config import export
from rnacentral.export.ftp import go_terms
from tasks.utils.files import atomic_output
class GoAnnotationExport(luigi.Task):
def output(self):
return luigi.LocalTarget(export().go('rnacentral_annotations.tsv'))
def run(self):
with atomic_output(self.output()) as out:
go_terms.export(db(), out)
|
Update name and call correct export
|
Update name and call correct export
This now calls the correct export function. Additionally, the class name
is changed to reflect it does export.
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
|
import luigi
from tasks.config import db
from tasks.config import export
+ from rnacentral.export.ftp import go_terms
+
from tasks.utils.files import atomic_output
- class GoAnnotation(luigi.Task):
+ class GoAnnotationExport(luigi.Task):
def output(self):
return luigi.LocalTarget(export().go('rnacentral_annotations.tsv'))
def run(self):
with atomic_output(self.output()) as out:
- export(db(), out)
+ go_terms.export(db(), out)
|
Update name and call correct export
|
## Code Before:
import luigi
from tasks.config import db
from tasks.config import export
from tasks.utils.files import atomic_output
class GoAnnotation(luigi.Task):
def output(self):
return luigi.LocalTarget(export().go('rnacentral_annotations.tsv'))
def run(self):
with atomic_output(self.output()) as out:
export(db(), out)
## Instruction:
Update name and call correct export
## Code After:
import luigi
from tasks.config import db
from tasks.config import export
from rnacentral.export.ftp import go_terms
from tasks.utils.files import atomic_output
class GoAnnotationExport(luigi.Task):
def output(self):
return luigi.LocalTarget(export().go('rnacentral_annotations.tsv'))
def run(self):
with atomic_output(self.output()) as out:
go_terms.export(db(), out)
|
import luigi
from tasks.config import db
from tasks.config import export
+ from rnacentral.export.ftp import go_terms
+
from tasks.utils.files import atomic_output
- class GoAnnotation(luigi.Task):
+ class GoAnnotationExport(luigi.Task):
? ++++++
def output(self):
return luigi.LocalTarget(export().go('rnacentral_annotations.tsv'))
def run(self):
with atomic_output(self.output()) as out:
- export(db(), out)
+ go_terms.export(db(), out)
? +++++++++
|
1b7a3f045bf7a23ef993d136b481f22258c4a778
|
wagtail/wagtailimages/rich_text.py
|
wagtail/wagtailimages/rich_text.py
|
from wagtail.wagtailimages.models import get_image_model
from wagtail.wagtailimages.formats import get_image_format
class ImageEmbedHandler(object):
"""
ImageEmbedHandler will be invoked whenever we encounter an element in HTML content
with an attribute of data-embedtype="image". The resulting element in the database
representation will be:
<embed embedtype="image" id="42" format="thumb" alt="some custom alt text">
"""
@staticmethod
def get_db_attributes(tag):
"""
Given a tag that we've identified as an image embed (because it has a
data-embedtype="image" attribute), return a dict of the attributes we should
have on the resulting <embed> element.
"""
return {
'id': tag['data-id'],
'format': tag['data-format'],
'alt': tag['data-alt'],
}
@staticmethod
def expand_db_attributes(attrs, for_editor):
"""
Given a dict of attributes from the <embed> tag, return the real HTML
representation.
"""
Image = get_image_model()
try:
image = Image.objects.get(id=attrs['id'])
image_format = get_image_format(attrs['format'])
if for_editor:
try:
return image_format.image_to_editor_html(image, attrs['alt'])
except:
return ''
else:
return image_format.image_to_html(image, attrs['alt'])
except Image.DoesNotExist:
return "<img>"
|
from wagtail.wagtailimages.models import get_image_model
from wagtail.wagtailimages.formats import get_image_format
class ImageEmbedHandler(object):
"""
ImageEmbedHandler will be invoked whenever we encounter an element in HTML content
with an attribute of data-embedtype="image". The resulting element in the database
representation will be:
<embed embedtype="image" id="42" format="thumb" alt="some custom alt text">
"""
@staticmethod
def get_db_attributes(tag):
"""
Given a tag that we've identified as an image embed (because it has a
data-embedtype="image" attribute), return a dict of the attributes we should
have on the resulting <embed> element.
"""
return {
'id': tag['data-id'],
'format': tag['data-format'],
'alt': tag['data-alt'],
}
@staticmethod
def expand_db_attributes(attrs, for_editor):
"""
Given a dict of attributes from the <embed> tag, return the real HTML
representation.
"""
Image = get_image_model()
try:
image = Image.objects.get(id=attrs['id'])
except Image.DoesNotExist:
return "<img>"
image_format = get_image_format(attrs['format'])
if for_editor:
try:
return image_format.image_to_editor_html(image, attrs['alt'])
except:
return ''
else:
return image_format.image_to_html(image, attrs['alt'])
|
Refactor try-catch block by limiting code in the try block
|
Refactor try-catch block by limiting code in the try block
Always good to know which line will raise an exception and limit the try block to that statement
|
Python
|
bsd-3-clause
|
Toshakins/wagtail,timorieber/wagtail,nrsimha/wagtail,kurtrwall/wagtail,timorieber/wagtail,FlipperPA/wagtail,inonit/wagtail,davecranwell/wagtail,nealtodd/wagtail,iansprice/wagtail,thenewguy/wagtail,nutztherookie/wagtail,jnns/wagtail,kaedroho/wagtail,iansprice/wagtail,serzans/wagtail,inonit/wagtail,kurtw/wagtail,mixxorz/wagtail,takeflight/wagtail,jnns/wagtail,serzans/wagtail,kaedroho/wagtail,Toshakins/wagtail,wagtail/wagtail,mixxorz/wagtail,thenewguy/wagtail,chrxr/wagtail,thenewguy/wagtail,JoshBarr/wagtail,quru/wagtail,FlipperPA/wagtail,timorieber/wagtail,nimasmi/wagtail,Tivix/wagtail,nutztherookie/wagtail,wagtail/wagtail,mixxorz/wagtail,Tivix/wagtail,thenewguy/wagtail,nealtodd/wagtail,hamsterbacke23/wagtail,kaedroho/wagtail,gasman/wagtail,hamsterbacke23/wagtail,mikedingjan/wagtail,davecranwell/wagtail,nrsimha/wagtail,gogobook/wagtail,chrxr/wagtail,nimasmi/wagtail,zerolab/wagtail,timorieber/wagtail,mayapurmedia/wagtail,kurtw/wagtail,chrxr/wagtail,wagtail/wagtail,gasman/wagtail,nilnvoid/wagtail,wagtail/wagtail,nealtodd/wagtail,zerolab/wagtail,nimasmi/wagtail,jnns/wagtail,chrxr/wagtail,zerolab/wagtail,takeflight/wagtail,quru/wagtail,inonit/wagtail,nilnvoid/wagtail,rsalmaso/wagtail,mikedingjan/wagtail,nrsimha/wagtail,nutztherookie/wagtail,Tivix/wagtail,hanpama/wagtail,rsalmaso/wagtail,gogobook/wagtail,inonit/wagtail,mayapurmedia/wagtail,JoshBarr/wagtail,iansprice/wagtail,jnns/wagtail,Toshakins/wagtail,quru/wagtail,nilnvoid/wagtail,kurtrwall/wagtail,nutztherookie/wagtail,hamsterbacke23/wagtail,quru/wagtail,gasman/wagtail,FlipperPA/wagtail,kurtrwall/wagtail,hamsterbacke23/wagtail,serzans/wagtail,FlipperPA/wagtail,JoshBarr/wagtail,rsalmaso/wagtail,Toshakins/wagtail,mayapurmedia/wagtail,thenewguy/wagtail,mikedingjan/wagtail,hanpama/wagtail,mixxorz/wagtail,mikedingjan/wagtail,torchbox/wagtail,gasman/wagtail,zerolab/wagtail,mayapurmedia/wagtail,mixxorz/wagtail,gogobook/wagtail,rsalmaso/wagtail,hanpama/wagtail,serzans/wagtail,gogobook/wagtail,takeflight/wagtail,torchbox/wagtail,torchbox/wagtail,zerolab/wagtail,torchbox/wagtail,wagtail/wagtail,kaedroho/wagtail,JoshBarr/wagtail,rsalmaso/wagtail,gasman/wagtail,Tivix/wagtail,takeflight/wagtail,nrsimha/wagtail,iansprice/wagtail,hanpama/wagtail,nealtodd/wagtail,kurtrwall/wagtail,nimasmi/wagtail,nilnvoid/wagtail,kurtw/wagtail,davecranwell/wagtail,kurtw/wagtail,kaedroho/wagtail,davecranwell/wagtail
|
from wagtail.wagtailimages.models import get_image_model
from wagtail.wagtailimages.formats import get_image_format
class ImageEmbedHandler(object):
"""
ImageEmbedHandler will be invoked whenever we encounter an element in HTML content
with an attribute of data-embedtype="image". The resulting element in the database
representation will be:
<embed embedtype="image" id="42" format="thumb" alt="some custom alt text">
"""
@staticmethod
def get_db_attributes(tag):
"""
Given a tag that we've identified as an image embed (because it has a
data-embedtype="image" attribute), return a dict of the attributes we should
have on the resulting <embed> element.
"""
return {
'id': tag['data-id'],
'format': tag['data-format'],
'alt': tag['data-alt'],
}
@staticmethod
def expand_db_attributes(attrs, for_editor):
"""
Given a dict of attributes from the <embed> tag, return the real HTML
representation.
"""
Image = get_image_model()
try:
image = Image.objects.get(id=attrs['id'])
- image_format = get_image_format(attrs['format'])
-
- if for_editor:
- try:
- return image_format.image_to_editor_html(image, attrs['alt'])
- except:
- return ''
- else:
- return image_format.image_to_html(image, attrs['alt'])
-
except Image.DoesNotExist:
return "<img>"
+ image_format = get_image_format(attrs['format'])
+ if for_editor:
+ try:
+ return image_format.image_to_editor_html(image, attrs['alt'])
+ except:
+ return ''
+ else:
+ return image_format.image_to_html(image, attrs['alt'])
+
|
Refactor try-catch block by limiting code in the try block
|
## Code Before:
from wagtail.wagtailimages.models import get_image_model
from wagtail.wagtailimages.formats import get_image_format
class ImageEmbedHandler(object):
"""
ImageEmbedHandler will be invoked whenever we encounter an element in HTML content
with an attribute of data-embedtype="image". The resulting element in the database
representation will be:
<embed embedtype="image" id="42" format="thumb" alt="some custom alt text">
"""
@staticmethod
def get_db_attributes(tag):
"""
Given a tag that we've identified as an image embed (because it has a
data-embedtype="image" attribute), return a dict of the attributes we should
have on the resulting <embed> element.
"""
return {
'id': tag['data-id'],
'format': tag['data-format'],
'alt': tag['data-alt'],
}
@staticmethod
def expand_db_attributes(attrs, for_editor):
"""
Given a dict of attributes from the <embed> tag, return the real HTML
representation.
"""
Image = get_image_model()
try:
image = Image.objects.get(id=attrs['id'])
image_format = get_image_format(attrs['format'])
if for_editor:
try:
return image_format.image_to_editor_html(image, attrs['alt'])
except:
return ''
else:
return image_format.image_to_html(image, attrs['alt'])
except Image.DoesNotExist:
return "<img>"
## Instruction:
Refactor try-catch block by limiting code in the try block
## Code After:
from wagtail.wagtailimages.models import get_image_model
from wagtail.wagtailimages.formats import get_image_format
class ImageEmbedHandler(object):
"""
ImageEmbedHandler will be invoked whenever we encounter an element in HTML content
with an attribute of data-embedtype="image". The resulting element in the database
representation will be:
<embed embedtype="image" id="42" format="thumb" alt="some custom alt text">
"""
@staticmethod
def get_db_attributes(tag):
"""
Given a tag that we've identified as an image embed (because it has a
data-embedtype="image" attribute), return a dict of the attributes we should
have on the resulting <embed> element.
"""
return {
'id': tag['data-id'],
'format': tag['data-format'],
'alt': tag['data-alt'],
}
@staticmethod
def expand_db_attributes(attrs, for_editor):
"""
Given a dict of attributes from the <embed> tag, return the real HTML
representation.
"""
Image = get_image_model()
try:
image = Image.objects.get(id=attrs['id'])
except Image.DoesNotExist:
return "<img>"
image_format = get_image_format(attrs['format'])
if for_editor:
try:
return image_format.image_to_editor_html(image, attrs['alt'])
except:
return ''
else:
return image_format.image_to_html(image, attrs['alt'])
|
from wagtail.wagtailimages.models import get_image_model
from wagtail.wagtailimages.formats import get_image_format
class ImageEmbedHandler(object):
"""
ImageEmbedHandler will be invoked whenever we encounter an element in HTML content
with an attribute of data-embedtype="image". The resulting element in the database
representation will be:
<embed embedtype="image" id="42" format="thumb" alt="some custom alt text">
"""
@staticmethod
def get_db_attributes(tag):
"""
Given a tag that we've identified as an image embed (because it has a
data-embedtype="image" attribute), return a dict of the attributes we should
have on the resulting <embed> element.
"""
return {
'id': tag['data-id'],
'format': tag['data-format'],
'alt': tag['data-alt'],
}
@staticmethod
def expand_db_attributes(attrs, for_editor):
"""
Given a dict of attributes from the <embed> tag, return the real HTML
representation.
"""
Image = get_image_model()
try:
image = Image.objects.get(id=attrs['id'])
- image_format = get_image_format(attrs['format'])
-
- if for_editor:
- try:
- return image_format.image_to_editor_html(image, attrs['alt'])
- except:
- return ''
- else:
- return image_format.image_to_html(image, attrs['alt'])
-
except Image.DoesNotExist:
return "<img>"
+
+ image_format = get_image_format(attrs['format'])
+ if for_editor:
+ try:
+ return image_format.image_to_editor_html(image, attrs['alt'])
+ except:
+ return ''
+ else:
+ return image_format.image_to_html(image, attrs['alt'])
|
5c0937993fdf34c96ccde3226c8e2a81efb381ce
|
troposphere/views/allocations.py
|
troposphere/views/allocations.py
|
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
template_params['THEME_URL'] = "/themes/%s" % settings.THEME_NAME
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
|
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
template_params['THEME_URL'] = "/assets/theme"
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
|
Fix theme asset pathing in "no allocation"
|
Fix theme asset pathing in "no allocation"
|
Python
|
apache-2.0
|
CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend
|
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
- template_params['THEME_URL'] = "/themes/%s" % settings.THEME_NAME
+ template_params['THEME_URL'] = "/assets/theme"
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
|
Fix theme asset pathing in "no allocation"
|
## Code Before:
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
template_params['THEME_URL'] = "/themes/%s" % settings.THEME_NAME
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
## Instruction:
Fix theme asset pathing in "no allocation"
## Code After:
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
template_params['THEME_URL'] = "/assets/theme"
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
|
import logging
from django.conf import settings
from django.shortcuts import render, redirect, render_to_response
from django.template import RequestContext
logger = logging.getLogger(__name__)
def allocations(request):
"""
View that is shown if a community member has XSEDE/Globus access,
but is missing allocation-sources (no way to charge activity).
"""
# populate with values `site_metadata` in the future
template_params = {}
- template_params['THEME_URL'] = "/themes/%s" % settings.THEME_NAME
+ template_params['THEME_URL'] = "/assets/theme"
template_params['ORG_NAME'] = settings.ORG_NAME
if hasattr(settings, "BASE_URL"):
template_params['BASE_URL'] = settings.BASE_URL
response = render_to_response(
'allocations.html',
template_params,
context_instance=RequestContext(request)
)
return response
|
bed671bdd7dc221e55b5f60c4f9daca3c338a737
|
artists/views.py
|
artists/views.py
|
from django.shortcuts import get_object_or_404
from rest_framework import permissions, viewsets
from similarities.utils import get_similar
from .models import Artist
from similarities.models import UserSimilarity
from .serializers import ArtistSerializer, SimilaritySerializer
class ArtistViewSet(viewsets.ModelViewSet):
"""API endpoint that allows artists to be viewed or edited"""
queryset = Artist.objects.all()
serializer_class = ArtistSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def get_queryset(self):
name = self.request.GET.get('name', "")
if name:
qs = get_similar(name)
else:
qs = super().get_queryset()
return qs[:100]
class SimilarViewSet(viewsets.ModelViewSet):
queryset = UserSimilarity.objects.all()
serializer_class = SimilaritySerializer
permission_classes = (permissions.IsAuthenticated,)
http_method_names = ['get', 'post', 'put', 'delete']
filter_fields = ['cc_artist']
def get_queryset(self):
return super().get_queryset().filter(user=self.request.user)
def pre_save(self, obj):
obj.user = self.request.user
|
from django.shortcuts import get_object_or_404
from rest_framework import permissions, viewsets
from similarities.utils import get_similar
from .models import Artist
from similarities.models import UserSimilarity, Similarity, update_similarities
from .serializers import ArtistSerializer, SimilaritySerializer
class ArtistViewSet(viewsets.ModelViewSet):
"""API endpoint that allows artists to be viewed or edited"""
queryset = Artist.objects.all()
serializer_class = ArtistSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def get_queryset(self):
name = self.request.GET.get('name', "")
if name:
qs = get_similar(name)
else:
qs = super().get_queryset()
return qs[:100]
class SimilarViewSet(viewsets.ModelViewSet):
queryset = UserSimilarity.objects.all()
serializer_class = SimilaritySerializer
permission_classes = (permissions.IsAuthenticated,)
http_method_names = ['get', 'post', 'put', 'delete']
filter_fields = ['cc_artist']
def get_queryset(self):
return super().get_queryset().filter(user=self.request.user)
def pre_save(self, obj):
obj.user = self.request.user
def post_save(self, obj, created=False):
cumulative_similarity, _ = Similarity.objects.get_or_create(
other_artist=obj.other_artist,
cc_artist=obj.cc_artist,
)
update_similarities([cumulative_similarity])
|
Update cumulative similarities on save
|
Update cumulative similarities on save
|
Python
|
bsd-3-clause
|
FreeMusicNinja/api.freemusic.ninja
|
from django.shortcuts import get_object_or_404
from rest_framework import permissions, viewsets
from similarities.utils import get_similar
from .models import Artist
- from similarities.models import UserSimilarity
+ from similarities.models import UserSimilarity, Similarity, update_similarities
from .serializers import ArtistSerializer, SimilaritySerializer
class ArtistViewSet(viewsets.ModelViewSet):
"""API endpoint that allows artists to be viewed or edited"""
queryset = Artist.objects.all()
serializer_class = ArtistSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def get_queryset(self):
name = self.request.GET.get('name', "")
if name:
qs = get_similar(name)
else:
qs = super().get_queryset()
return qs[:100]
class SimilarViewSet(viewsets.ModelViewSet):
queryset = UserSimilarity.objects.all()
serializer_class = SimilaritySerializer
permission_classes = (permissions.IsAuthenticated,)
http_method_names = ['get', 'post', 'put', 'delete']
filter_fields = ['cc_artist']
def get_queryset(self):
return super().get_queryset().filter(user=self.request.user)
def pre_save(self, obj):
obj.user = self.request.user
+ def post_save(self, obj, created=False):
+ cumulative_similarity, _ = Similarity.objects.get_or_create(
+ other_artist=obj.other_artist,
+ cc_artist=obj.cc_artist,
+ )
+ update_similarities([cumulative_similarity])
+
|
Update cumulative similarities on save
|
## Code Before:
from django.shortcuts import get_object_or_404
from rest_framework import permissions, viewsets
from similarities.utils import get_similar
from .models import Artist
from similarities.models import UserSimilarity
from .serializers import ArtistSerializer, SimilaritySerializer
class ArtistViewSet(viewsets.ModelViewSet):
"""API endpoint that allows artists to be viewed or edited"""
queryset = Artist.objects.all()
serializer_class = ArtistSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def get_queryset(self):
name = self.request.GET.get('name', "")
if name:
qs = get_similar(name)
else:
qs = super().get_queryset()
return qs[:100]
class SimilarViewSet(viewsets.ModelViewSet):
queryset = UserSimilarity.objects.all()
serializer_class = SimilaritySerializer
permission_classes = (permissions.IsAuthenticated,)
http_method_names = ['get', 'post', 'put', 'delete']
filter_fields = ['cc_artist']
def get_queryset(self):
return super().get_queryset().filter(user=self.request.user)
def pre_save(self, obj):
obj.user = self.request.user
## Instruction:
Update cumulative similarities on save
## Code After:
from django.shortcuts import get_object_or_404
from rest_framework import permissions, viewsets
from similarities.utils import get_similar
from .models import Artist
from similarities.models import UserSimilarity, Similarity, update_similarities
from .serializers import ArtistSerializer, SimilaritySerializer
class ArtistViewSet(viewsets.ModelViewSet):
"""API endpoint that allows artists to be viewed or edited"""
queryset = Artist.objects.all()
serializer_class = ArtistSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def get_queryset(self):
name = self.request.GET.get('name', "")
if name:
qs = get_similar(name)
else:
qs = super().get_queryset()
return qs[:100]
class SimilarViewSet(viewsets.ModelViewSet):
queryset = UserSimilarity.objects.all()
serializer_class = SimilaritySerializer
permission_classes = (permissions.IsAuthenticated,)
http_method_names = ['get', 'post', 'put', 'delete']
filter_fields = ['cc_artist']
def get_queryset(self):
return super().get_queryset().filter(user=self.request.user)
def pre_save(self, obj):
obj.user = self.request.user
def post_save(self, obj, created=False):
cumulative_similarity, _ = Similarity.objects.get_or_create(
other_artist=obj.other_artist,
cc_artist=obj.cc_artist,
)
update_similarities([cumulative_similarity])
|
from django.shortcuts import get_object_or_404
from rest_framework import permissions, viewsets
from similarities.utils import get_similar
from .models import Artist
- from similarities.models import UserSimilarity
+ from similarities.models import UserSimilarity, Similarity, update_similarities
from .serializers import ArtistSerializer, SimilaritySerializer
class ArtistViewSet(viewsets.ModelViewSet):
"""API endpoint that allows artists to be viewed or edited"""
queryset = Artist.objects.all()
serializer_class = ArtistSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
def get_queryset(self):
name = self.request.GET.get('name', "")
if name:
qs = get_similar(name)
else:
qs = super().get_queryset()
return qs[:100]
class SimilarViewSet(viewsets.ModelViewSet):
queryset = UserSimilarity.objects.all()
serializer_class = SimilaritySerializer
permission_classes = (permissions.IsAuthenticated,)
http_method_names = ['get', 'post', 'put', 'delete']
filter_fields = ['cc_artist']
def get_queryset(self):
return super().get_queryset().filter(user=self.request.user)
def pre_save(self, obj):
obj.user = self.request.user
+
+ def post_save(self, obj, created=False):
+ cumulative_similarity, _ = Similarity.objects.get_or_create(
+ other_artist=obj.other_artist,
+ cc_artist=obj.cc_artist,
+ )
+ update_similarities([cumulative_similarity])
|
292b4843fdb0efbf3cc8d7c97aaa8abd2cd22a28
|
optimization/simple.py
|
optimization/simple.py
|
from gurobipy import *
m = Model("simple")
x1 = m.addVar(name="x1")
x2 = m.addVar(name="x2")
m.update()
print("x1:%s x2:%s" % (x1,x2))
m.setObjective(x1 + 2*x2, GRB.MAXIMIZE)
m.addConstr(x1 + x2 <= 40, "C1")
m.addConstr(2*x1 + x2 <= 60, "C2")
m.optimize()
print("Solution: %f" % (m.objVal,))
for v in m.getVars():
print("%s:%f" % (v.varName, v.x))
|
from gurobipy import *
m = Model("simple")
x1 = m.addVar(name="x1")
x2 = m.addVar(name="x2")
m.update()
print("x1:%s x2:%s" % (x1,x2))
#m.setObjective(x1 + 2*x2, GRB.MAXIMIZE)
coef=[1,2]
var=[x1,x2]
s=[]
for c,v in zip(coef,var):
print(c,v)
s.append(c*v)
m.setObjective(sum(s),GRB.MAXIMIZE)
m.addConstr(x1 + x2 <= 40, "C1")
m.addConstr(2*x1 + x2 <= 60, "C2")
m.optimize()
print("Solution: %f" % (m.objVal,))
for v in m.getVars():
print("%s:%f" % (v.varName, v.x))
|
Use sum function to construct objective function.
|
Use sum function to construct objective function.
|
Python
|
apache-2.0
|
MiddelkoopT/CompOpt-2014-Fall,MiddelkoopT/CompOpt-2014-Fall
|
from gurobipy import *
m = Model("simple")
x1 = m.addVar(name="x1")
x2 = m.addVar(name="x2")
m.update()
print("x1:%s x2:%s" % (x1,x2))
- m.setObjective(x1 + 2*x2, GRB.MAXIMIZE)
+ #m.setObjective(x1 + 2*x2, GRB.MAXIMIZE)
+ coef=[1,2]
+ var=[x1,x2]
+
+ s=[]
+ for c,v in zip(coef,var):
+ print(c,v)
+ s.append(c*v)
+ m.setObjective(sum(s),GRB.MAXIMIZE)
+
m.addConstr(x1 + x2 <= 40, "C1")
m.addConstr(2*x1 + x2 <= 60, "C2")
m.optimize()
print("Solution: %f" % (m.objVal,))
for v in m.getVars():
print("%s:%f" % (v.varName, v.x))
|
Use sum function to construct objective function.
|
## Code Before:
from gurobipy import *
m = Model("simple")
x1 = m.addVar(name="x1")
x2 = m.addVar(name="x2")
m.update()
print("x1:%s x2:%s" % (x1,x2))
m.setObjective(x1 + 2*x2, GRB.MAXIMIZE)
m.addConstr(x1 + x2 <= 40, "C1")
m.addConstr(2*x1 + x2 <= 60, "C2")
m.optimize()
print("Solution: %f" % (m.objVal,))
for v in m.getVars():
print("%s:%f" % (v.varName, v.x))
## Instruction:
Use sum function to construct objective function.
## Code After:
from gurobipy import *
m = Model("simple")
x1 = m.addVar(name="x1")
x2 = m.addVar(name="x2")
m.update()
print("x1:%s x2:%s" % (x1,x2))
#m.setObjective(x1 + 2*x2, GRB.MAXIMIZE)
coef=[1,2]
var=[x1,x2]
s=[]
for c,v in zip(coef,var):
print(c,v)
s.append(c*v)
m.setObjective(sum(s),GRB.MAXIMIZE)
m.addConstr(x1 + x2 <= 40, "C1")
m.addConstr(2*x1 + x2 <= 60, "C2")
m.optimize()
print("Solution: %f" % (m.objVal,))
for v in m.getVars():
print("%s:%f" % (v.varName, v.x))
|
from gurobipy import *
m = Model("simple")
x1 = m.addVar(name="x1")
x2 = m.addVar(name="x2")
m.update()
print("x1:%s x2:%s" % (x1,x2))
- m.setObjective(x1 + 2*x2, GRB.MAXIMIZE)
+ #m.setObjective(x1 + 2*x2, GRB.MAXIMIZE)
? +
+ coef=[1,2]
+ var=[x1,x2]
+
+ s=[]
+ for c,v in zip(coef,var):
+ print(c,v)
+ s.append(c*v)
+ m.setObjective(sum(s),GRB.MAXIMIZE)
+
m.addConstr(x1 + x2 <= 40, "C1")
m.addConstr(2*x1 + x2 <= 60, "C2")
m.optimize()
print("Solution: %f" % (m.objVal,))
for v in m.getVars():
print("%s:%f" % (v.varName, v.x))
|
24c8122db0f38a1f798461a23d08535e4e6781d5
|
photo/idxitem.py
|
photo/idxitem.py
|
import hashlib
def _md5file(fname):
"""Calculate the md5 hash for a file.
"""
m = hashlib.md5()
chunksize = 8192
with open(fname, 'rb') as f:
while True:
chunk = f.read(chunksize)
if not chunk:
break
m.update(chunk)
return m.hexdigest()
class IdxItem(object):
def __init__(self, filename=None, data=None):
self.filename = None
self.tags = []
if data is not None:
self.__dict__.update(data)
elif filename is not None:
self.filename = filename
self.md5 = _md5file(filename)
def as_dict(self):
return dict(self.__dict__)
|
import hashlib
def _md5file(fname):
"""Calculate the md5 hash for a file.
"""
m = hashlib.md5()
chunksize = 8192
with open(fname, 'rb') as f:
while True:
chunk = f.read(chunksize)
if not chunk:
break
m.update(chunk)
return m.hexdigest()
class IdxItem(object):
def __init__(self, filename=None, data=None):
self.filename = None
self.tags = []
if data is not None:
self.__dict__.update(data)
elif filename is not None:
self.filename = filename
self.md5 = _md5file(filename)
self.tags = set(self.tags)
def as_dict(self):
d = self.__dict__.copy()
d['tags'] = list(d['tags'])
return d
|
Convert tags to a set on init and back to a list on writing.
|
Convert tags to a set on init and back to a list on writing.
|
Python
|
apache-2.0
|
RKrahl/photo-tools
|
import hashlib
def _md5file(fname):
"""Calculate the md5 hash for a file.
"""
m = hashlib.md5()
chunksize = 8192
with open(fname, 'rb') as f:
while True:
chunk = f.read(chunksize)
if not chunk:
break
m.update(chunk)
return m.hexdigest()
class IdxItem(object):
def __init__(self, filename=None, data=None):
self.filename = None
self.tags = []
if data is not None:
self.__dict__.update(data)
elif filename is not None:
self.filename = filename
self.md5 = _md5file(filename)
+ self.tags = set(self.tags)
def as_dict(self):
- return dict(self.__dict__)
+ d = self.__dict__.copy()
+ d['tags'] = list(d['tags'])
+ return d
|
Convert tags to a set on init and back to a list on writing.
|
## Code Before:
import hashlib
def _md5file(fname):
"""Calculate the md5 hash for a file.
"""
m = hashlib.md5()
chunksize = 8192
with open(fname, 'rb') as f:
while True:
chunk = f.read(chunksize)
if not chunk:
break
m.update(chunk)
return m.hexdigest()
class IdxItem(object):
def __init__(self, filename=None, data=None):
self.filename = None
self.tags = []
if data is not None:
self.__dict__.update(data)
elif filename is not None:
self.filename = filename
self.md5 = _md5file(filename)
def as_dict(self):
return dict(self.__dict__)
## Instruction:
Convert tags to a set on init and back to a list on writing.
## Code After:
import hashlib
def _md5file(fname):
"""Calculate the md5 hash for a file.
"""
m = hashlib.md5()
chunksize = 8192
with open(fname, 'rb') as f:
while True:
chunk = f.read(chunksize)
if not chunk:
break
m.update(chunk)
return m.hexdigest()
class IdxItem(object):
def __init__(self, filename=None, data=None):
self.filename = None
self.tags = []
if data is not None:
self.__dict__.update(data)
elif filename is not None:
self.filename = filename
self.md5 = _md5file(filename)
self.tags = set(self.tags)
def as_dict(self):
d = self.__dict__.copy()
d['tags'] = list(d['tags'])
return d
|
import hashlib
def _md5file(fname):
"""Calculate the md5 hash for a file.
"""
m = hashlib.md5()
chunksize = 8192
with open(fname, 'rb') as f:
while True:
chunk = f.read(chunksize)
if not chunk:
break
m.update(chunk)
return m.hexdigest()
class IdxItem(object):
def __init__(self, filename=None, data=None):
self.filename = None
self.tags = []
if data is not None:
self.__dict__.update(data)
elif filename is not None:
self.filename = filename
self.md5 = _md5file(filename)
+ self.tags = set(self.tags)
def as_dict(self):
- return dict(self.__dict__)
+ d = self.__dict__.copy()
+ d['tags'] = list(d['tags'])
+ return d
|
eb03de241f3d47173381ee22f85b5cdf5d9c1fb4
|
examples/monitoring/worker.py
|
examples/monitoring/worker.py
|
import random
import time
from os import getenv
from aiographite.aiographite import connect
from aiographite.protocol import PlaintextProtocol
GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost')
async def run(worker, *args, **kwargs):
value = random.randrange(10)
try:
connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop)
await connection.send('workers.worker', value, time.time())
await connection.close()
except Exception as e:
worker.logger.error('Cannot connect to graphite')
|
import random
import time
from os import getenv
from aiographite.aiographite import connect
from aiographite.protocol import PlaintextProtocol
GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost')
async def run(worker, *args, **kwargs):
value = random.randrange(10)
try:
connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop)
await connection.send('workers.worker', value, time.time())
await connection.close()
except Exception:
worker.logger.error('Cannot connect to graphite')
|
Fix flake8 issues in examples
|
Fix flake8 issues in examples
|
Python
|
apache-2.0
|
aioworkers/aioworkers
|
import random
import time
from os import getenv
from aiographite.aiographite import connect
from aiographite.protocol import PlaintextProtocol
GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost')
async def run(worker, *args, **kwargs):
value = random.randrange(10)
try:
- connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop)
+ connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop)
- await connection.send('workers.worker', value, time.time())
+ await connection.send('workers.worker', value, time.time())
- await connection.close()
+ await connection.close()
- except Exception as e:
+ except Exception:
- worker.logger.error('Cannot connect to graphite')
+ worker.logger.error('Cannot connect to graphite')
|
Fix flake8 issues in examples
|
## Code Before:
import random
import time
from os import getenv
from aiographite.aiographite import connect
from aiographite.protocol import PlaintextProtocol
GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost')
async def run(worker, *args, **kwargs):
value = random.randrange(10)
try:
connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop)
await connection.send('workers.worker', value, time.time())
await connection.close()
except Exception as e:
worker.logger.error('Cannot connect to graphite')
## Instruction:
Fix flake8 issues in examples
## Code After:
import random
import time
from os import getenv
from aiographite.aiographite import connect
from aiographite.protocol import PlaintextProtocol
GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost')
async def run(worker, *args, **kwargs):
value = random.randrange(10)
try:
connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop)
await connection.send('workers.worker', value, time.time())
await connection.close()
except Exception:
worker.logger.error('Cannot connect to graphite')
|
import random
import time
from os import getenv
from aiographite.aiographite import connect
from aiographite.protocol import PlaintextProtocol
GRAPHITE_HOST = getenv('GRAPHITE_HOST', 'localhost')
async def run(worker, *args, **kwargs):
value = random.randrange(10)
try:
- connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop)
? ^
+ connection = await connect(GRAPHITE_HOST, 2003, PlaintextProtocol(), loop=worker.loop)
? ^^^^
- await connection.send('workers.worker', value, time.time())
? ^
+ await connection.send('workers.worker', value, time.time())
? ^^^^
- await connection.close()
? ^
+ await connection.close()
? ^^^^
- except Exception as e:
? -----
+ except Exception:
- worker.logger.error('Cannot connect to graphite')
? ^
+ worker.logger.error('Cannot connect to graphite')
? ^^^^
|
e4e38ecd09b4c96e5b801b1bc9f7a943934c6485
|
cobertura_clover_transform/converter.py
|
cobertura_clover_transform/converter.py
|
import lxml.etree as ET
import argparse
import pkg_resources
def convert(inxml):
dom = ET.parse(inxml)
xslt = ET.parse(pkg_resources.resource_stream('cobertura_clover_transform',
'transform.xslt'))
transform = ET.XSLT(xslt)
newdom = transform(dom)
return ET.tostring(newdom, pretty_print=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('coverage_xml')
args = parser.parse_args()
converted = convert(args.coverage_xml)
print(converted)
|
import lxml.etree as ET
import argparse
import pkg_resources
def convert(inxml):
dom = ET.parse(inxml)
xslt = ET.parse(pkg_resources.resource_stream('cobertura_clover_transform',
'transform.xslt'))
transform = ET.XSLT(xslt)
newdom = transform(dom)
return ET.tostring(newdom, pretty_print=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('coverage_xml')
parser.add_argument('-o', '--output', required=False)
args = parser.parse_args()
converted = convert(args.coverage_xml)
if args.output:
with open(args.output, 'w') as out:
out.write(converted)
else:
print(converted)
|
Add support for output to a file
|
Add support for output to a file
|
Python
|
mit
|
cwacek/cobertura-clover-transform
|
import lxml.etree as ET
import argparse
import pkg_resources
def convert(inxml):
dom = ET.parse(inxml)
xslt = ET.parse(pkg_resources.resource_stream('cobertura_clover_transform',
'transform.xslt'))
transform = ET.XSLT(xslt)
newdom = transform(dom)
return ET.tostring(newdom, pretty_print=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('coverage_xml')
+ parser.add_argument('-o', '--output', required=False)
args = parser.parse_args()
converted = convert(args.coverage_xml)
+ if args.output:
+ with open(args.output, 'w') as out:
+ out.write(converted)
+ else:
- print(converted)
+ print(converted)
|
Add support for output to a file
|
## Code Before:
import lxml.etree as ET
import argparse
import pkg_resources
def convert(inxml):
dom = ET.parse(inxml)
xslt = ET.parse(pkg_resources.resource_stream('cobertura_clover_transform',
'transform.xslt'))
transform = ET.XSLT(xslt)
newdom = transform(dom)
return ET.tostring(newdom, pretty_print=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('coverage_xml')
args = parser.parse_args()
converted = convert(args.coverage_xml)
print(converted)
## Instruction:
Add support for output to a file
## Code After:
import lxml.etree as ET
import argparse
import pkg_resources
def convert(inxml):
dom = ET.parse(inxml)
xslt = ET.parse(pkg_resources.resource_stream('cobertura_clover_transform',
'transform.xslt'))
transform = ET.XSLT(xslt)
newdom = transform(dom)
return ET.tostring(newdom, pretty_print=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('coverage_xml')
parser.add_argument('-o', '--output', required=False)
args = parser.parse_args()
converted = convert(args.coverage_xml)
if args.output:
with open(args.output, 'w') as out:
out.write(converted)
else:
print(converted)
|
import lxml.etree as ET
import argparse
import pkg_resources
def convert(inxml):
dom = ET.parse(inxml)
xslt = ET.parse(pkg_resources.resource_stream('cobertura_clover_transform',
'transform.xslt'))
transform = ET.XSLT(xslt)
newdom = transform(dom)
return ET.tostring(newdom, pretty_print=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('coverage_xml')
+ parser.add_argument('-o', '--output', required=False)
args = parser.parse_args()
converted = convert(args.coverage_xml)
+ if args.output:
+ with open(args.output, 'w') as out:
+ out.write(converted)
+ else:
- print(converted)
+ print(converted)
? ++++
|
e7f923488ebf589aa78f7dc37792ffba3fffd2a3
|
pyinfra_kubernetes/defaults.py
|
pyinfra_kubernetes/defaults.py
|
DEFAULTS = {
# Install
'kubernetes_version': None, # must be provided
'kubernetes_download_base_url': 'https://dl.k8s.io',
'kubernetes_install_dir': '/usr/local/kubernetes',
'kubernetes_bin_dir': '/usr/local/bin',
'kubernetes_conf_dir': '/etc/kubernetes',
# Config
'kubernetes_service_cidr': None, # must be provided
'kubernetes_master_url': 'http://127.0.0.1',
}
|
DEFAULTS = {
# Install
'kubernetes_version': None, # must be provided
'kubernetes_download_base_url': 'https://dl.k8s.io',
'kubernetes_install_dir': '/usr/local/kubernetes',
'kubernetes_bin_dir': '/usr/local/bin',
'kubernetes_conf_dir': '/etc/kubernetes',
# Config
'kubernetes_service_cidr': None, # must be provided
# API server URL for master components (controller-manager, scheduler)
'kubernetes_master_url': 'http://127.0.0.1',
}
|
Update comment about default data.
|
Update comment about default data.
|
Python
|
mit
|
EDITD/pyinfra-kubernetes,EDITD/pyinfra-kubernetes
|
DEFAULTS = {
# Install
'kubernetes_version': None, # must be provided
'kubernetes_download_base_url': 'https://dl.k8s.io',
'kubernetes_install_dir': '/usr/local/kubernetes',
'kubernetes_bin_dir': '/usr/local/bin',
'kubernetes_conf_dir': '/etc/kubernetes',
# Config
'kubernetes_service_cidr': None, # must be provided
+
+ # API server URL for master components (controller-manager, scheduler)
'kubernetes_master_url': 'http://127.0.0.1',
}
|
Update comment about default data.
|
## Code Before:
DEFAULTS = {
# Install
'kubernetes_version': None, # must be provided
'kubernetes_download_base_url': 'https://dl.k8s.io',
'kubernetes_install_dir': '/usr/local/kubernetes',
'kubernetes_bin_dir': '/usr/local/bin',
'kubernetes_conf_dir': '/etc/kubernetes',
# Config
'kubernetes_service_cidr': None, # must be provided
'kubernetes_master_url': 'http://127.0.0.1',
}
## Instruction:
Update comment about default data.
## Code After:
DEFAULTS = {
# Install
'kubernetes_version': None, # must be provided
'kubernetes_download_base_url': 'https://dl.k8s.io',
'kubernetes_install_dir': '/usr/local/kubernetes',
'kubernetes_bin_dir': '/usr/local/bin',
'kubernetes_conf_dir': '/etc/kubernetes',
# Config
'kubernetes_service_cidr': None, # must be provided
# API server URL for master components (controller-manager, scheduler)
'kubernetes_master_url': 'http://127.0.0.1',
}
|
DEFAULTS = {
# Install
'kubernetes_version': None, # must be provided
'kubernetes_download_base_url': 'https://dl.k8s.io',
'kubernetes_install_dir': '/usr/local/kubernetes',
'kubernetes_bin_dir': '/usr/local/bin',
'kubernetes_conf_dir': '/etc/kubernetes',
# Config
'kubernetes_service_cidr': None, # must be provided
+
+ # API server URL for master components (controller-manager, scheduler)
'kubernetes_master_url': 'http://127.0.0.1',
}
|
570a1468796c6afdcbd77052227d9a155601e710
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
return app
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
from app.converters import WordClassConverter
app.url_map.converters["word_class"] = WordClassConverter
return app
|
Add word class converter to URL map
|
Add word class converter to URL map
|
Python
|
mit
|
Encrylize/MyDictionary,Encrylize/MyDictionary,Encrylize/MyDictionary
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
+ from app.converters import WordClassConverter
+ app.url_map.converters["word_class"] = WordClassConverter
+
return app
|
Add word class converter to URL map
|
## Code Before:
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
return app
## Instruction:
Add word class converter to URL map
## Code After:
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
from app.converters import WordClassConverter
app.url_map.converters["word_class"] = WordClassConverter
return app
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
+ from app.converters import WordClassConverter
+ app.url_map.converters["word_class"] = WordClassConverter
+
return app
|
cef6f3cce4a942bea53d6bae639dcd48d680d05a
|
gpytorch/means/linear_mean.py
|
gpytorch/means/linear_mean.py
|
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
|
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
else:
self.bias = None
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
|
Fix LinearMean bias when bias=False
|
Fix LinearMean bias when bias=False
|
Python
|
mit
|
jrg365/gpytorch,jrg365/gpytorch,jrg365/gpytorch
|
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
+ else:
+ self.bias = None
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
|
Fix LinearMean bias when bias=False
|
## Code Before:
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
## Instruction:
Fix LinearMean bias when bias=False
## Code After:
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
else:
self.bias = None
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
|
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
+ else:
+ self.bias = None
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
|
067b557258a85945635a880ced65454cfa2b61af
|
supermega/tests/test_session.py
|
supermega/tests/test_session.py
|
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
|
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
def test_key_derivation(self):
self.assertEqual(models.User.derive_key("password"), 'd\x039r^n\xbd\x13\xa2_\x00R\x12\x9f|\xb1')
|
Add test for key derivation
|
Add test for key derivation
|
Python
|
bsd-3-clause
|
lmb/Supermega
|
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
+
+ def test_key_derivation(self):
+ self.assertEqual(models.User.derive_key("password"), 'd\x039r^n\xbd\x13\xa2_\x00R\x12\x9f|\xb1')
+
|
Add test for key derivation
|
## Code Before:
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
## Instruction:
Add test for key derivation
## Code After:
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
def test_key_derivation(self):
self.assertEqual(models.User.derive_key("password"), 'd\x039r^n\xbd\x13\xa2_\x00R\x12\x9f|\xb1')
|
import unittest
import hashlib
from .. import Session
from .. import models
class TestSession(unittest.TestCase):
def setUp(self):
self.sess = Session()
def test_public_file_download(self):
url = 'https://mega.co.nz/#!2ctGgQAI!AkJMowjRiXVcSrRLn3d-e1vl47ZxZEK0CbrHGIKFY-E'
sha256 = '9431103cb989f2913cbc503767015ca22c0ae40942932186c59ffe6d6a69830d'
hash = hashlib.sha256()
def verify_hash(file, chunks):
for chunk in chunks:
hash.update(chunk)
self.assertEqual(hash.hexdigest(), sha256)
self.sess.download(verify_hash, url)
def test_ephemeral_account(self):
sess = self.sess
user = models.User(sess)
user.ephemeral()
sess.init_datastore()
+
+ def test_key_derivation(self):
+ self.assertEqual(models.User.derive_key("password"), 'd\x039r^n\xbd\x13\xa2_\x00R\x12\x9f|\xb1')
|
cc0f33a51f3b13cec191a7a97d20af95082e38db
|
tests/test_utils.py
|
tests/test_utils.py
|
"""Tests for the Texcavator utility functions"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings")
|
"""Tests for the Texcavator utility functions"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings")
from nose.tools import assert_equals
from testfixtures import compare
import texcavator.utils as utils
def test_json_error_message():
response = utils.json_error_message('test')
compare(response.content, '{"status": "error", "msg": "test"}')
assert_equals(response.status_code, 200)
|
Add test for utility function json_error_message()
|
Add test for utility function json_error_message()
|
Python
|
apache-2.0
|
UUDigitalHumanitieslab/texcavator,msassmann/texcavator,msassmann/texcavator,msassmann/texcavator,UUDigitalHumanitieslab/texcavator,UUDigitalHumanitieslab/texcavator,msassmann/texcavator
|
"""Tests for the Texcavator utility functions"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings")
+ from nose.tools import assert_equals
+ from testfixtures import compare
+
+ import texcavator.utils as utils
+
+
+ def test_json_error_message():
+ response = utils.json_error_message('test')
+
+ compare(response.content, '{"status": "error", "msg": "test"}')
+ assert_equals(response.status_code, 200)
+
|
Add test for utility function json_error_message()
|
## Code Before:
"""Tests for the Texcavator utility functions"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings")
## Instruction:
Add test for utility function json_error_message()
## Code After:
"""Tests for the Texcavator utility functions"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings")
from nose.tools import assert_equals
from testfixtures import compare
import texcavator.utils as utils
def test_json_error_message():
response = utils.json_error_message('test')
compare(response.content, '{"status": "error", "msg": "test"}')
assert_equals(response.status_code, 200)
|
"""Tests for the Texcavator utility functions"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "texcavator.settings")
+
+ from nose.tools import assert_equals
+ from testfixtures import compare
+
+ import texcavator.utils as utils
+
+
+ def test_json_error_message():
+ response = utils.json_error_message('test')
+
+ compare(response.content, '{"status": "error", "msg": "test"}')
+ assert_equals(response.status_code, 200)
|
1d6fa0521b0fbba48ddbc231614b7074a63488c2
|
tests/utils.py
|
tests/utils.py
|
import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
print "importing " + abs_path
sys.path.insert(0, abs_path)
|
import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
sys.path.insert(0, abs_path)
|
Remove debug messages from import.
|
Remove debug messages from import.
|
Python
|
mpl-2.0
|
EsriOceans/btm
|
import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
- print "importing " + abs_path
sys.path.insert(0, abs_path)
-
|
Remove debug messages from import.
|
## Code Before:
import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
print "importing " + abs_path
sys.path.insert(0, abs_path)
## Instruction:
Remove debug messages from import.
## Code After:
import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
sys.path.insert(0, abs_path)
|
import os
import sys
from config import *
def addLocalPaths(paths):
for path_part in paths:
base_path = os.path.join(local_path, path_part)
abs_path = os.path.abspath(base_path)
- print "importing " + abs_path
sys.path.insert(0, abs_path)
-
|
8281a2f614d686ba7c8c14e108d8415a43c80602
|
tests/blueprints/test_bp_features.py
|
tests/blueprints/test_bp_features.py
|
from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
self.assert200(self.client.get(url_for('features.bustimes')))
self.assertTemplateUsed("bustimes.html")
|
from unittest.mock import patch, MagicMock
from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
mock = MagicMock()
with patch('sipa.blueprints.features.get_bustimes', mock):
resp = self.client.get(url_for('features.bustimes'))
self.assert200(resp)
self.assertTemplateUsed("bustimes.html")
self.assertTrue(mock.called)
|
Improve test_bustimes speed by using mock
|
Improve test_bustimes speed by using mock
|
Python
|
mit
|
MarauderXtreme/sipa,agdsn/sipa,agdsn/sipa,agdsn/sipa,lukasjuhrich/sipa,lukasjuhrich/sipa,MarauderXtreme/sipa,agdsn/sipa,lukasjuhrich/sipa,MarauderXtreme/sipa,lukasjuhrich/sipa
|
+ from unittest.mock import patch, MagicMock
+
from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
+ mock = MagicMock()
+ with patch('sipa.blueprints.features.get_bustimes', mock):
- self.assert200(self.client.get(url_for('features.bustimes')))
+ resp = self.client.get(url_for('features.bustimes'))
+
+ self.assert200(resp)
self.assertTemplateUsed("bustimes.html")
+ self.assertTrue(mock.called)
|
Improve test_bustimes speed by using mock
|
## Code Before:
from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
self.assert200(self.client.get(url_for('features.bustimes')))
self.assertTemplateUsed("bustimes.html")
## Instruction:
Improve test_bustimes speed by using mock
## Code After:
from unittest.mock import patch, MagicMock
from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
mock = MagicMock()
with patch('sipa.blueprints.features.get_bustimes', mock):
resp = self.client.get(url_for('features.bustimes'))
self.assert200(resp)
self.assertTemplateUsed("bustimes.html")
self.assertTrue(mock.called)
|
+ from unittest.mock import patch, MagicMock
+
from flask import url_for
from tests.base import SampleFrontendTestBase
class BpFeaturesTestCase(SampleFrontendTestBase):
def test_bustimes_reachable(self):
+ mock = MagicMock()
+ with patch('sipa.blueprints.features.get_bustimes', mock):
- self.assert200(self.client.get(url_for('features.bustimes')))
? ^^^^^^^^^^^^^^ -
+ resp = self.client.get(url_for('features.bustimes'))
? ++++++ ^^^^
+
+ self.assert200(resp)
self.assertTemplateUsed("bustimes.html")
+ self.assertTrue(mock.called)
|
22ab67a2c5a3bf3f7d1696a35b5fe029b848d63e
|
virtool/models.py
|
virtool/models.py
|
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
id = Column(String, primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
|
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String, Sequence, Integer
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
id = Column(Integer, Sequence('labels_id_seq'), primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
|
Use serial integer IDs for SQL records
|
Use serial integer IDs for SQL records
|
Python
|
mit
|
virtool/virtool,igboyes/virtool,virtool/virtool,igboyes/virtool
|
from sqlalchemy.ext.declarative import declarative_base
- from sqlalchemy import Column, String
+ from sqlalchemy import Column, String, Sequence, Integer
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
- id = Column(String, primary_key=True)
+ id = Column(Integer, Sequence('labels_id_seq'), primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
|
Use serial integer IDs for SQL records
|
## Code Before:
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
id = Column(String, primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
## Instruction:
Use serial integer IDs for SQL records
## Code After:
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String, Sequence, Integer
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
id = Column(Integer, Sequence('labels_id_seq'), primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
|
from sqlalchemy.ext.declarative import declarative_base
- from sqlalchemy import Column, String
+ from sqlalchemy import Column, String, Sequence, Integer
? +++++++++++++++++++
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
- id = Column(String, primary_key=True)
+ id = Column(Integer, Sequence('labels_id_seq'), primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
|
5b6823ec19185ed5b413d1c01d3afeb5b1716778
|
taca/server_status/cli.py
|
taca/server_status/cli.py
|
import click
import logging
from taca.server_status import server_status as status
from taca.utils.config import CONFIG
from taca.server_status import cronjobs as cj # to avoid similar names with command, otherwise exception
@click.group(name='server_status')
def server_status():
""" Monitor server status """
if not CONFIG.get('server_status', ''):
logging.warning("Configuration missing required entries: server_status")
# server status subcommands
@server_status.command()
@click.option('--statusdb', is_flag=True, help="Update the statusdb")
def nases(statusdb):
""" Checks the available space on all the nases
"""
disk_space = status.get_nases_disk_space()
if statusdb:
status.update_status_db(disk_space, server_type='nas')
@server_status.command()
def cronjobs():
""" Monitors cronjobs and updates statusdb
"""
cj.update_cronjob_db()
|
import click
import logging
from taca.server_status import server_status as status
from taca.utils.config import CONFIG
from taca.server_status import cronjobs as cj # to avoid similar names with command, otherwise exception
@click.group(name='server_status')
def server_status():
""" Monitor server status """
# server status subcommands
@server_status.command()
@click.option('--statusdb', is_flag=True, help="Update the statusdb")
def nases(statusdb):
""" Checks the available space on all the nases
"""
if not CONFIG.get('server_status', ''):
logging.warning("Configuration missing required entries: server_status")
disk_space = status.get_nases_disk_space()
if statusdb:
status.update_status_db(disk_space, server_type='nas')
@server_status.command()
def cronjobs():
""" Monitors cronjobs and updates statusdb
"""
cj.update_cronjob_db()
|
Move warning about missing config entry to relevant subcommand
|
Move warning about missing config entry to relevant subcommand
|
Python
|
mit
|
SciLifeLab/TACA,SciLifeLab/TACA,SciLifeLab/TACA
|
import click
import logging
from taca.server_status import server_status as status
from taca.utils.config import CONFIG
from taca.server_status import cronjobs as cj # to avoid similar names with command, otherwise exception
@click.group(name='server_status')
def server_status():
""" Monitor server status """
- if not CONFIG.get('server_status', ''):
- logging.warning("Configuration missing required entries: server_status")
# server status subcommands
@server_status.command()
@click.option('--statusdb', is_flag=True, help="Update the statusdb")
def nases(statusdb):
""" Checks the available space on all the nases
"""
+ if not CONFIG.get('server_status', ''):
+ logging.warning("Configuration missing required entries: server_status")
disk_space = status.get_nases_disk_space()
if statusdb:
status.update_status_db(disk_space, server_type='nas')
@server_status.command()
def cronjobs():
""" Monitors cronjobs and updates statusdb
"""
cj.update_cronjob_db()
|
Move warning about missing config entry to relevant subcommand
|
## Code Before:
import click
import logging
from taca.server_status import server_status as status
from taca.utils.config import CONFIG
from taca.server_status import cronjobs as cj # to avoid similar names with command, otherwise exception
@click.group(name='server_status')
def server_status():
""" Monitor server status """
if not CONFIG.get('server_status', ''):
logging.warning("Configuration missing required entries: server_status")
# server status subcommands
@server_status.command()
@click.option('--statusdb', is_flag=True, help="Update the statusdb")
def nases(statusdb):
""" Checks the available space on all the nases
"""
disk_space = status.get_nases_disk_space()
if statusdb:
status.update_status_db(disk_space, server_type='nas')
@server_status.command()
def cronjobs():
""" Monitors cronjobs and updates statusdb
"""
cj.update_cronjob_db()
## Instruction:
Move warning about missing config entry to relevant subcommand
## Code After:
import click
import logging
from taca.server_status import server_status as status
from taca.utils.config import CONFIG
from taca.server_status import cronjobs as cj # to avoid similar names with command, otherwise exception
@click.group(name='server_status')
def server_status():
""" Monitor server status """
# server status subcommands
@server_status.command()
@click.option('--statusdb', is_flag=True, help="Update the statusdb")
def nases(statusdb):
""" Checks the available space on all the nases
"""
if not CONFIG.get('server_status', ''):
logging.warning("Configuration missing required entries: server_status")
disk_space = status.get_nases_disk_space()
if statusdb:
status.update_status_db(disk_space, server_type='nas')
@server_status.command()
def cronjobs():
""" Monitors cronjobs and updates statusdb
"""
cj.update_cronjob_db()
|
import click
import logging
from taca.server_status import server_status as status
from taca.utils.config import CONFIG
from taca.server_status import cronjobs as cj # to avoid similar names with command, otherwise exception
@click.group(name='server_status')
def server_status():
""" Monitor server status """
- if not CONFIG.get('server_status', ''):
- logging.warning("Configuration missing required entries: server_status")
# server status subcommands
@server_status.command()
@click.option('--statusdb', is_flag=True, help="Update the statusdb")
def nases(statusdb):
""" Checks the available space on all the nases
"""
+ if not CONFIG.get('server_status', ''):
+ logging.warning("Configuration missing required entries: server_status")
disk_space = status.get_nases_disk_space()
if statusdb:
status.update_status_db(disk_space, server_type='nas')
@server_status.command()
def cronjobs():
""" Monitors cronjobs and updates statusdb
"""
cj.update_cronjob_db()
|
30b4003b22ab12bcc83013c63903dad7e36a5374
|
webserver/codemanagement/urls.py
|
webserver/codemanagement/urls.py
|
from django.conf.urls.defaults import patterns, url, include
from piston.resource import Resource
from .views import (CreateRepoView, UpdatePasswordView,
ListSubmissionView, SubmitView)
from .api_handlers import RepoAuthHandler, RepoPathHandler, RepoTagListHandler
urlpatterns = patterns(
"",
url(r'^api/repo/auth/', Resource(handler=RepoAuthHandler)),
url(r'^api/repo/path/', Resource(handler=RepoPathHandler)),
url(r'^api/repo/tags/', Resource(handler=RepoTagListHandler)),
url(r'^competition/(?P<comp_slug>[\w-]+)/create-repo/$',
CreateRepoView.as_view(),
name='create_repo'),
url(r'^competition/(?P<comp_slug>[\w-]+)/update-password/$',
UpdatePasswordView.as_view(),
name='update_repo_password'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submissions/$',
ListSubmissionView.as_view(),
name='list_submissions'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submit/(?P<sha>[a-f0-9]{40})/$',
SubmitView.as_view(),
name='list_submissions'),
url(r'^repo/', include('greta.repo_view_urls')),
)
|
from django.conf.urls.defaults import patterns, url, include
from piston.resource import Resource
from .views import (CreateRepoView, UpdatePasswordView,
ListSubmissionView, SubmitView)
from .api_handlers import RepoAuthHandler, RepoPathHandler, RepoTagListHandler
urlpatterns = patterns(
"",
url(r'^api/repo/auth/', Resource(handler=RepoAuthHandler)),
url(r'^api/repo/path/', Resource(handler=RepoPathHandler)),
url(r'^api/repo/tags/', Resource(handler=RepoTagListHandler)),
url(r'^competition/(?P<comp_slug>[\w-]+)/create-repo/$',
CreateRepoView.as_view(),
name='create_repo'),
url(r'^competition/(?P<comp_slug>[\w-]+)/update-password/$',
UpdatePasswordView.as_view(),
name='update_repo_password'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submissions/$',
ListSubmissionView.as_view(),
name='list_submissions'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submit/(?P<sha>[a-f0-9]{40})/$',
SubmitView.as_view(),
name='submit'),
url(r'^repo/', include('greta.repo_view_urls')),
)
|
Fix URL name for submission page
|
Fix URL name for submission page
|
Python
|
bsd-3-clause
|
siggame/webserver,siggame/webserver,siggame/webserver
|
from django.conf.urls.defaults import patterns, url, include
from piston.resource import Resource
from .views import (CreateRepoView, UpdatePasswordView,
ListSubmissionView, SubmitView)
from .api_handlers import RepoAuthHandler, RepoPathHandler, RepoTagListHandler
urlpatterns = patterns(
"",
url(r'^api/repo/auth/', Resource(handler=RepoAuthHandler)),
url(r'^api/repo/path/', Resource(handler=RepoPathHandler)),
url(r'^api/repo/tags/', Resource(handler=RepoTagListHandler)),
url(r'^competition/(?P<comp_slug>[\w-]+)/create-repo/$',
CreateRepoView.as_view(),
name='create_repo'),
url(r'^competition/(?P<comp_slug>[\w-]+)/update-password/$',
UpdatePasswordView.as_view(),
name='update_repo_password'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submissions/$',
ListSubmissionView.as_view(),
name='list_submissions'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submit/(?P<sha>[a-f0-9]{40})/$',
SubmitView.as_view(),
- name='list_submissions'),
+ name='submit'),
url(r'^repo/', include('greta.repo_view_urls')),
)
|
Fix URL name for submission page
|
## Code Before:
from django.conf.urls.defaults import patterns, url, include
from piston.resource import Resource
from .views import (CreateRepoView, UpdatePasswordView,
ListSubmissionView, SubmitView)
from .api_handlers import RepoAuthHandler, RepoPathHandler, RepoTagListHandler
urlpatterns = patterns(
"",
url(r'^api/repo/auth/', Resource(handler=RepoAuthHandler)),
url(r'^api/repo/path/', Resource(handler=RepoPathHandler)),
url(r'^api/repo/tags/', Resource(handler=RepoTagListHandler)),
url(r'^competition/(?P<comp_slug>[\w-]+)/create-repo/$',
CreateRepoView.as_view(),
name='create_repo'),
url(r'^competition/(?P<comp_slug>[\w-]+)/update-password/$',
UpdatePasswordView.as_view(),
name='update_repo_password'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submissions/$',
ListSubmissionView.as_view(),
name='list_submissions'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submit/(?P<sha>[a-f0-9]{40})/$',
SubmitView.as_view(),
name='list_submissions'),
url(r'^repo/', include('greta.repo_view_urls')),
)
## Instruction:
Fix URL name for submission page
## Code After:
from django.conf.urls.defaults import patterns, url, include
from piston.resource import Resource
from .views import (CreateRepoView, UpdatePasswordView,
ListSubmissionView, SubmitView)
from .api_handlers import RepoAuthHandler, RepoPathHandler, RepoTagListHandler
urlpatterns = patterns(
"",
url(r'^api/repo/auth/', Resource(handler=RepoAuthHandler)),
url(r'^api/repo/path/', Resource(handler=RepoPathHandler)),
url(r'^api/repo/tags/', Resource(handler=RepoTagListHandler)),
url(r'^competition/(?P<comp_slug>[\w-]+)/create-repo/$',
CreateRepoView.as_view(),
name='create_repo'),
url(r'^competition/(?P<comp_slug>[\w-]+)/update-password/$',
UpdatePasswordView.as_view(),
name='update_repo_password'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submissions/$',
ListSubmissionView.as_view(),
name='list_submissions'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submit/(?P<sha>[a-f0-9]{40})/$',
SubmitView.as_view(),
name='submit'),
url(r'^repo/', include('greta.repo_view_urls')),
)
|
from django.conf.urls.defaults import patterns, url, include
from piston.resource import Resource
from .views import (CreateRepoView, UpdatePasswordView,
ListSubmissionView, SubmitView)
from .api_handlers import RepoAuthHandler, RepoPathHandler, RepoTagListHandler
urlpatterns = patterns(
"",
url(r'^api/repo/auth/', Resource(handler=RepoAuthHandler)),
url(r'^api/repo/path/', Resource(handler=RepoPathHandler)),
url(r'^api/repo/tags/', Resource(handler=RepoTagListHandler)),
url(r'^competition/(?P<comp_slug>[\w-]+)/create-repo/$',
CreateRepoView.as_view(),
name='create_repo'),
url(r'^competition/(?P<comp_slug>[\w-]+)/update-password/$',
UpdatePasswordView.as_view(),
name='update_repo_password'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submissions/$',
ListSubmissionView.as_view(),
name='list_submissions'),
url(r'^competition/(?P<comp_slug>[\w-]+)/submit/(?P<sha>[a-f0-9]{40})/$',
SubmitView.as_view(),
- name='list_submissions'),
? ----- ^^^^^^
+ name='submit'),
? ^
url(r'^repo/', include('greta.repo_view_urls')),
)
|
3642fefbb1d4b5f2aeead54135ac37f0a5b635cd
|
tests/general.py
|
tests/general.py
|
import unittest
from app import create_app, configure_settings, db
class AppTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
configure_settings(self.app)
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
class DummyPostData(dict):
def getlist(self, key):
v = self[key]
if not isinstance(v, (list, tuple)):
v = [v]
return v
|
import unittest
from app import create_app, db
class AppTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
# Temporary. Will be removed once default settings has been set up.
self.app.config['posts_per_page'] = '10'
self.app.config['blog_name'] = 'flask-blogger'
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
class DummyPostData(dict):
def getlist(self, key):
v = self[key]
if not isinstance(v, (list, tuple)):
v = [v]
return v
|
Add placeholder settings to unit tests
|
Add placeholder settings to unit tests
|
Python
|
mit
|
Encrylize/flask-blogger,Encrylize/flask-blogger,Encrylize/flask-blogger
|
import unittest
- from app import create_app, configure_settings, db
+ from app import create_app, db
class AppTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
- configure_settings(self.app)
+
+ # Temporary. Will be removed once default settings has been set up.
+ self.app.config['posts_per_page'] = '10'
+ self.app.config['blog_name'] = 'flask-blogger'
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
class DummyPostData(dict):
def getlist(self, key):
v = self[key]
if not isinstance(v, (list, tuple)):
v = [v]
return v
|
Add placeholder settings to unit tests
|
## Code Before:
import unittest
from app import create_app, configure_settings, db
class AppTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
configure_settings(self.app)
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
class DummyPostData(dict):
def getlist(self, key):
v = self[key]
if not isinstance(v, (list, tuple)):
v = [v]
return v
## Instruction:
Add placeholder settings to unit tests
## Code After:
import unittest
from app import create_app, db
class AppTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
# Temporary. Will be removed once default settings has been set up.
self.app.config['posts_per_page'] = '10'
self.app.config['blog_name'] = 'flask-blogger'
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
class DummyPostData(dict):
def getlist(self, key):
v = self[key]
if not isinstance(v, (list, tuple)):
v = [v]
return v
|
import unittest
- from app import create_app, configure_settings, db
? --------------------
+ from app import create_app, db
class AppTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
- configure_settings(self.app)
+
+ # Temporary. Will be removed once default settings has been set up.
+ self.app.config['posts_per_page'] = '10'
+ self.app.config['blog_name'] = 'flask-blogger'
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
class DummyPostData(dict):
def getlist(self, key):
v = self[key]
if not isinstance(v, (list, tuple)):
v = [v]
return v
|
8528f21397672b5719fcf4edecd8efa3a1eec60a
|
cellardoor/serializers/json_serializer.py
|
cellardoor/serializers/json_serializer.py
|
import re
import json
from datetime import datetime
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
try:
iterable = iter(obj)
except TypeError:
pass
else:
return list(iterable)
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)
|
import re
import json
from datetime import datetime
import collections
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, collections.Iterable):
return list(obj)
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)
|
Use more reliable method of detecting iterables
|
Use more reliable method of detecting iterables
|
Python
|
mit
|
cooper-software/cellardoor
|
import re
import json
from datetime import datetime
+ import collections
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
+ if isinstance(obj, collections.Iterable):
- try:
- iterable = iter(obj)
- except TypeError:
- pass
- else:
- return list(iterable)
+ return list(obj)
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)
|
Use more reliable method of detecting iterables
|
## Code Before:
import re
import json
from datetime import datetime
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
try:
iterable = iter(obj)
except TypeError:
pass
else:
return list(iterable)
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)
## Instruction:
Use more reliable method of detecting iterables
## Code After:
import re
import json
from datetime import datetime
import collections
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, collections.Iterable):
return list(obj)
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)
|
import re
import json
from datetime import datetime
+ import collections
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
+ if isinstance(obj, collections.Iterable):
- try:
- iterable = iter(obj)
- except TypeError:
- pass
- else:
- return list(iterable)
? ^^^^^ ^^
+ return list(obj)
? ^ ^
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)
|
d6b3f4e0798f430761f51529ea61c368e1ce610a
|
utest/contrib/testrunner/test_pybot_arguments_validation.py
|
utest/contrib/testrunner/test_pybot_arguments_validation.py
|
import unittest
import robot.errors
from robotide.contrib.testrunner.runprofiles import PybotProfile
class TestPybotArgumentsValidation(unittest.TestCase):
def setUp(self):
self._profile = PybotProfile(lambda:0)
@unittest.expectedFailure # No more DataError, better argument detection
def test_invalid_argument(self):
try:
self.assertRaisesRegex(robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
except AttributeError: # Python2
self.assertRaisesRegexp(robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
def test_valid_argument_short(self):
self._working_arguments('-T')
def _working_arguments(self, args):
self.assertEqual(None, self._profile._get_invalid_message(args))
def test_valid_argument_long(self):
self._working_arguments('--timestampoutputs')
def test_valid_argument_with_value(self):
self._working_arguments('--log somelog.html')
def test_runfailed_argument_works(self):
self._working_arguments('--runfailed output.xml')
if __name__ == '__main__':
unittest.main()
|
import unittest
import robotide.lib.robot.errors
from robotide.contrib.testrunner.runprofiles import PybotProfile
class TestPybotArgumentsValidation(unittest.TestCase):
def setUp(self):
self._profile = PybotProfile(lambda:0)
@unittest.expectedFailure # No more DataError, better argument detection
def test_invalid_argument(self):
try:
self.assertRaisesRegex(robotide.lib.robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
except AttributeError: # Python2
self.assertRaisesRegexp(robotide.lib.robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
def test_valid_argument_short(self):
self._working_arguments('-T')
def _working_arguments(self, args):
self.assertEqual(None, self._profile._get_invalid_message(args))
def test_valid_argument_long(self):
self._working_arguments('--timestampoutputs')
def test_valid_argument_with_value(self):
self._working_arguments('--log somelog.html')
def test_runfailed_argument_works(self):
self._working_arguments('--runfailed output.xml')
if __name__ == '__main__':
unittest.main()
|
Fix unit test for when robotframework is not installed.
|
Fix unit test for when robotframework is not installed.
|
Python
|
apache-2.0
|
HelioGuilherme66/RIDE,robotframework/RIDE,robotframework/RIDE,HelioGuilherme66/RIDE,HelioGuilherme66/RIDE,robotframework/RIDE,HelioGuilherme66/RIDE,robotframework/RIDE
|
import unittest
- import robot.errors
+ import robotide.lib.robot.errors
from robotide.contrib.testrunner.runprofiles import PybotProfile
class TestPybotArgumentsValidation(unittest.TestCase):
def setUp(self):
self._profile = PybotProfile(lambda:0)
@unittest.expectedFailure # No more DataError, better argument detection
def test_invalid_argument(self):
try:
- self.assertRaisesRegex(robot.errors.DataError,
+ self.assertRaisesRegex(robotide.lib.robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
except AttributeError: # Python2
- self.assertRaisesRegexp(robot.errors.DataError,
+ self.assertRaisesRegexp(robotide.lib.robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
def test_valid_argument_short(self):
self._working_arguments('-T')
def _working_arguments(self, args):
self.assertEqual(None, self._profile._get_invalid_message(args))
def test_valid_argument_long(self):
self._working_arguments('--timestampoutputs')
def test_valid_argument_with_value(self):
self._working_arguments('--log somelog.html')
def test_runfailed_argument_works(self):
self._working_arguments('--runfailed output.xml')
if __name__ == '__main__':
unittest.main()
|
Fix unit test for when robotframework is not installed.
|
## Code Before:
import unittest
import robot.errors
from robotide.contrib.testrunner.runprofiles import PybotProfile
class TestPybotArgumentsValidation(unittest.TestCase):
def setUp(self):
self._profile = PybotProfile(lambda:0)
@unittest.expectedFailure # No more DataError, better argument detection
def test_invalid_argument(self):
try:
self.assertRaisesRegex(robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
except AttributeError: # Python2
self.assertRaisesRegexp(robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
def test_valid_argument_short(self):
self._working_arguments('-T')
def _working_arguments(self, args):
self.assertEqual(None, self._profile._get_invalid_message(args))
def test_valid_argument_long(self):
self._working_arguments('--timestampoutputs')
def test_valid_argument_with_value(self):
self._working_arguments('--log somelog.html')
def test_runfailed_argument_works(self):
self._working_arguments('--runfailed output.xml')
if __name__ == '__main__':
unittest.main()
## Instruction:
Fix unit test for when robotframework is not installed.
## Code After:
import unittest
import robotide.lib.robot.errors
from robotide.contrib.testrunner.runprofiles import PybotProfile
class TestPybotArgumentsValidation(unittest.TestCase):
def setUp(self):
self._profile = PybotProfile(lambda:0)
@unittest.expectedFailure # No more DataError, better argument detection
def test_invalid_argument(self):
try:
self.assertRaisesRegex(robotide.lib.robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
except AttributeError: # Python2
self.assertRaisesRegexp(robotide.lib.robot.errors.DataError,
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
def test_valid_argument_short(self):
self._working_arguments('-T')
def _working_arguments(self, args):
self.assertEqual(None, self._profile._get_invalid_message(args))
def test_valid_argument_long(self):
self._working_arguments('--timestampoutputs')
def test_valid_argument_with_value(self):
self._working_arguments('--log somelog.html')
def test_runfailed_argument_works(self):
self._working_arguments('--runfailed output.xml')
if __name__ == '__main__':
unittest.main()
|
import unittest
- import robot.errors
+ import robotide.lib.robot.errors
from robotide.contrib.testrunner.runprofiles import PybotProfile
class TestPybotArgumentsValidation(unittest.TestCase):
def setUp(self):
self._profile = PybotProfile(lambda:0)
@unittest.expectedFailure # No more DataError, better argument detection
def test_invalid_argument(self):
try:
- self.assertRaisesRegex(robot.errors.DataError,
+ self.assertRaisesRegex(robotide.lib.robot.errors.DataError,
? +++++++++++++
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
except AttributeError: # Python2
- self.assertRaisesRegexp(robot.errors.DataError,
+ self.assertRaisesRegexp(robotide.lib.robot.errors.DataError,
? +++++++++++++
'option --invalidargument not recognized',
self._profile._get_invalid_message,
'--invalidargument')
def test_valid_argument_short(self):
self._working_arguments('-T')
def _working_arguments(self, args):
self.assertEqual(None, self._profile._get_invalid_message(args))
def test_valid_argument_long(self):
self._working_arguments('--timestampoutputs')
def test_valid_argument_with_value(self):
self._working_arguments('--log somelog.html')
def test_runfailed_argument_works(self):
self._working_arguments('--runfailed output.xml')
if __name__ == '__main__':
unittest.main()
|
d37f9646b13df624f04050a63d34b3d33e9e6e9e
|
python/matasano/set1/c8.py
|
python/matasano/set1/c8.py
|
from matasano.util.converters import hex_to_bytestr
from Crypto.Cipher import AES
if __name__ == "__main__":
chal_file = open("matasano/data/c8.txt", 'r');
for line in chal_file:
ct = hex_to_bytestr(line[:-1])
for i in range(0, len(ct), 16):
for j in range(i+16, len(ct), 16):
if ct[i:i+16] == ct[j:j+16]:
print("Block collision found in ciphertext: {} Block {} == Block {}".format(line, i//16, j//16))
break
else:
continue
break
else:
continue
break
|
from matasano.util.converters import hex_to_bytestr
if __name__ == "__main__":
chal_file = open("matasano/data/c8.txt", 'r');
coll_count = {}
for idx, line in enumerate(chal_file):
count = 0
ct = line[:-1]
for i in range(0, len(ct), 32):
for j in range(i+32, len(ct), 32):
if ct[i:i+32] == ct[j:j+32]:
print("Block collision found in ciphertext: {} Block {} == Block {}".format(idx, i//16, j//16))
count += 1
coll_count[idx] = count
most_coll = max(coll_count, key=coll_count.get)
print("Ciphertext {} had the most collisions with {}".format(most_coll, coll_count[most_coll]))
|
Improve the code, return most collisions. Work on hex strings.
|
Improve the code, return most collisions. Work on hex strings.
|
Python
|
mit
|
TheLunchtimeAttack/matasano-challenges,TheLunchtimeAttack/matasano-challenges
|
from matasano.util.converters import hex_to_bytestr
- from Crypto.Cipher import AES
if __name__ == "__main__":
chal_file = open("matasano/data/c8.txt", 'r');
+ coll_count = {}
- for line in chal_file:
+ for idx, line in enumerate(chal_file):
- ct = hex_to_bytestr(line[:-1])
+ count = 0
+ ct = line[:-1]
- for i in range(0, len(ct), 16):
+ for i in range(0, len(ct), 32):
- for j in range(i+16, len(ct), 16):
+ for j in range(i+32, len(ct), 32):
- if ct[i:i+16] == ct[j:j+16]:
+ if ct[i:i+32] == ct[j:j+32]:
- print("Block collision found in ciphertext: {} Block {} == Block {}".format(line, i//16, j//16))
+ print("Block collision found in ciphertext: {} Block {} == Block {}".format(idx, i//16, j//16))
+ count += 1
+ coll_count[idx] = count
- break
- else:
- continue
- break
- else:
- continue
- break
+ most_coll = max(coll_count, key=coll_count.get)
+ print("Ciphertext {} had the most collisions with {}".format(most_coll, coll_count[most_coll]))
+
+
|
Improve the code, return most collisions. Work on hex strings.
|
## Code Before:
from matasano.util.converters import hex_to_bytestr
from Crypto.Cipher import AES
if __name__ == "__main__":
chal_file = open("matasano/data/c8.txt", 'r');
for line in chal_file:
ct = hex_to_bytestr(line[:-1])
for i in range(0, len(ct), 16):
for j in range(i+16, len(ct), 16):
if ct[i:i+16] == ct[j:j+16]:
print("Block collision found in ciphertext: {} Block {} == Block {}".format(line, i//16, j//16))
break
else:
continue
break
else:
continue
break
## Instruction:
Improve the code, return most collisions. Work on hex strings.
## Code After:
from matasano.util.converters import hex_to_bytestr
if __name__ == "__main__":
chal_file = open("matasano/data/c8.txt", 'r');
coll_count = {}
for idx, line in enumerate(chal_file):
count = 0
ct = line[:-1]
for i in range(0, len(ct), 32):
for j in range(i+32, len(ct), 32):
if ct[i:i+32] == ct[j:j+32]:
print("Block collision found in ciphertext: {} Block {} == Block {}".format(idx, i//16, j//16))
count += 1
coll_count[idx] = count
most_coll = max(coll_count, key=coll_count.get)
print("Ciphertext {} had the most collisions with {}".format(most_coll, coll_count[most_coll]))
|
from matasano.util.converters import hex_to_bytestr
- from Crypto.Cipher import AES
if __name__ == "__main__":
chal_file = open("matasano/data/c8.txt", 'r');
+ coll_count = {}
- for line in chal_file:
+ for idx, line in enumerate(chal_file):
? +++++ ++++++++++ +
- ct = hex_to_bytestr(line[:-1])
+ count = 0
+ ct = line[:-1]
- for i in range(0, len(ct), 16):
? ^^
+ for i in range(0, len(ct), 32):
? ^^
- for j in range(i+16, len(ct), 16):
? ^^ ^^
+ for j in range(i+32, len(ct), 32):
? ^^ ^^
- if ct[i:i+16] == ct[j:j+16]:
? ^^ ^^
+ if ct[i:i+32] == ct[j:j+32]:
? ^^ ^^
- print("Block collision found in ciphertext: {} Block {} == Block {}".format(line, i//16, j//16))
? - ^^
+ print("Block collision found in ciphertext: {} Block {} == Block {}".format(idx, i//16, j//16))
? ^^
+ count += 1
+ coll_count[idx] = count
- break
- else:
- continue
- break
- else:
- continue
- break
+
+ most_coll = max(coll_count, key=coll_count.get)
+ print("Ciphertext {} had the most collisions with {}".format(most_coll, coll_count[most_coll]))
+
|
9a6467688f567abc405a3fca6c4bfda7b6cd0351
|
FileWatcher.py
|
FileWatcher.py
|
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
def on_modified(self, event):
if event.src_path == self.filePath:
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = filePath
self.callback = callback
self.eventHandler = MyEventHandler(filePath, callback)
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.observer.stop()
def resume(self):
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
|
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
def on_modified(self, event):
if os.path.normpath(event.src_path) == self.filePath:
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = os.path.normpath(filePath)
self.callback = callback
self.eventHandler = MyEventHandler(self.filePath, callback)
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.observer.stop()
def resume(self):
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
|
Handle filepaths in an OS independent manner.
|
Handle filepaths in an OS independent manner.
--CAR
|
Python
|
apache-2.0
|
BBN-Q/PyQLab,calebjordan/PyQLab,Plourde-Research-Lab/PyQLab,rmcgurrin/PyQLab
|
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
def on_modified(self, event):
- if event.src_path == self.filePath:
+ if os.path.normpath(event.src_path) == self.filePath:
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
- self.filePath = filePath
+ self.filePath = os.path.normpath(filePath)
self.callback = callback
- self.eventHandler = MyEventHandler(filePath, callback)
+ self.eventHandler = MyEventHandler(self.filePath, callback)
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.observer.stop()
def resume(self):
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
|
Handle filepaths in an OS independent manner.
|
## Code Before:
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
def on_modified(self, event):
if event.src_path == self.filePath:
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = filePath
self.callback = callback
self.eventHandler = MyEventHandler(filePath, callback)
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.observer.stop()
def resume(self):
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
## Instruction:
Handle filepaths in an OS independent manner.
## Code After:
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
def on_modified(self, event):
if os.path.normpath(event.src_path) == self.filePath:
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
self.filePath = os.path.normpath(filePath)
self.callback = callback
self.eventHandler = MyEventHandler(self.filePath, callback)
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.observer.stop()
def resume(self):
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
|
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import os
class MyEventHandler(FileSystemEventHandler):
def __init__(self, filePath, callback):
super(MyEventHandler, self).__init__()
self.filePath = filePath
self.callback = callback
def on_modified(self, event):
- if event.src_path == self.filePath:
+ if os.path.normpath(event.src_path) == self.filePath:
? +++++++++++++++++ +
self.callback()
class LibraryFileWatcher(object):
def __init__(self, filePath, callback):
super(LibraryFileWatcher, self).__init__()
- self.filePath = filePath
+ self.filePath = os.path.normpath(filePath)
? +++++++++++++++++ +
self.callback = callback
- self.eventHandler = MyEventHandler(filePath, callback)
+ self.eventHandler = MyEventHandler(self.filePath, callback)
? +++++
self.resume()
def __del__(self):
self.observer.stop()
self.observer.join()
def pause(self):
self.observer.stop()
def resume(self):
self.observer = Observer()
self.watch = self.observer.schedule(self.eventHandler, path=os.path.dirname(self.filePath))
self.observer.start()
|
4ad6f599cdcebc34e9f32a5ab8eaf44a3845ed21
|
pinry/pins/forms.py
|
pinry/pins/forms.py
|
from django import forms
from .models import Pin
class PinForm(forms.ModelForm):
url = forms.CharField(required=False)
image = forms.ImageField(label='or Upload', required=False)
class Meta:
model = Pin
fields = ['url', 'image', 'description', 'tags']
def clean(self):
cleaned_data = super(PinForm, self).clean()
url = cleaned_data.get('url')
image = cleaned_data.get('image')
if url:
image_file_types = ['png', 'gif', 'jpeg', 'jpg']
if not url.split('.')[-1].lower() in image_file_types:
raise forms.ValidationError("Requested URL is not an image file. "
"Only images are currently supported.")
try:
Pin.objects.get(url=url)
raise forms.ValidationError("URL has already been pinned!")
except Pin.DoesNotExist:
pass
protocol = url.split(':')[0]
if protocol not in ['http', 'https']:
raise forms.ValidationError("Currently only support HTTP and "
"HTTPS protocols, please be sure "
"you include this in the URL.")
try:
Pin.objects.get(url=url)
raise forms.ValidationError("URL has already been pinned!")
except Pin.DoesNotExist:
pass
elif image:
pass
else:
raise forms.ValidationError("Need either a URL or Upload.")
return cleaned_data
|
from django import forms
from .models import Pin
class PinForm(forms.ModelForm):
url = forms.CharField(required=False)
image = forms.ImageField(label='or Upload', required=False)
_errors = {
'not_image': 'Requested URL is not an image file. Only images are currently supported.',
'pinned': 'URL has already been pinned!',
'protocol': 'Currently only support HTTP and HTTPS protocols, please be sure you include this in the URL.',
'nothing': 'Need either a URL or Upload',
}
class Meta:
model = Pin
fields = ['url', 'image', 'description', 'tags']
def clean(self):
cleaned_data = super(PinForm, self).clean()
url = cleaned_data.get('url')
image = cleaned_data.get('image')
if url:
image_file_types = ['png', 'gif', 'jpeg', 'jpg']
if not url.split('.')[-1].lower() in image_file_types:
raise forms.ValidationError(self._errors['not_image'])
protocol = url.split(':')[0]
if protocol not in ['http', 'https']:
raise forms.ValidationError(self._errors['protocol'])
try:
Pin.objects.get(url=url)
raise forms.ValidationError(self._errors['pinned'])
except Pin.DoesNotExist:
pass
elif image:
pass
else:
raise forms.ValidationError(self._errors['nothing'])
return cleaned_data
|
Move ValidationError messages to a dictionary that can be accessed from PinForm.clean
|
Move ValidationError messages to a dictionary that can be accessed from PinForm.clean
|
Python
|
bsd-2-clause
|
supervacuo/pinry,Stackato-Apps/pinry,wangjun/pinry,dotcom900825/xishi,QLGu/pinry,pinry/pinry,lapo-luchini/pinry,dotcom900825/xishi,supervacuo/pinry,MSylvia/pinry,Stackato-Apps/pinry,lapo-luchini/pinry,wangjun/pinry,QLGu/pinry,pinry/pinry,MSylvia/pinry,MSylvia/pinry,pinry/pinry,Stackato-Apps/pinry,pinry/pinry,supervacuo/pinry,QLGu/pinry,rafirosenberg/pinry,rafirosenberg/pinry,wangjun/pinry,lapo-luchini/pinry,lapo-luchini/pinry
|
from django import forms
from .models import Pin
class PinForm(forms.ModelForm):
url = forms.CharField(required=False)
image = forms.ImageField(label='or Upload', required=False)
+
+ _errors = {
+ 'not_image': 'Requested URL is not an image file. Only images are currently supported.',
+ 'pinned': 'URL has already been pinned!',
+ 'protocol': 'Currently only support HTTP and HTTPS protocols, please be sure you include this in the URL.',
+ 'nothing': 'Need either a URL or Upload',
+ }
class Meta:
model = Pin
fields = ['url', 'image', 'description', 'tags']
def clean(self):
cleaned_data = super(PinForm, self).clean()
url = cleaned_data.get('url')
image = cleaned_data.get('image')
if url:
image_file_types = ['png', 'gif', 'jpeg', 'jpg']
if not url.split('.')[-1].lower() in image_file_types:
- raise forms.ValidationError("Requested URL is not an image file. "
- "Only images are currently supported.")
+ raise forms.ValidationError(self._errors['not_image'])
+ protocol = url.split(':')[0]
+ if protocol not in ['http', 'https']:
+ raise forms.ValidationError(self._errors['protocol'])
try:
Pin.objects.get(url=url)
- raise forms.ValidationError("URL has already been pinned!")
+ raise forms.ValidationError(self._errors['pinned'])
- except Pin.DoesNotExist:
- pass
- protocol = url.split(':')[0]
- if protocol not in ['http', 'https']:
- raise forms.ValidationError("Currently only support HTTP and "
- "HTTPS protocols, please be sure "
- "you include this in the URL.")
- try:
- Pin.objects.get(url=url)
- raise forms.ValidationError("URL has already been pinned!")
except Pin.DoesNotExist:
pass
elif image:
pass
else:
- raise forms.ValidationError("Need either a URL or Upload.")
+ raise forms.ValidationError(self._errors['nothing'])
return cleaned_data
|
Move ValidationError messages to a dictionary that can be accessed from PinForm.clean
|
## Code Before:
from django import forms
from .models import Pin
class PinForm(forms.ModelForm):
url = forms.CharField(required=False)
image = forms.ImageField(label='or Upload', required=False)
class Meta:
model = Pin
fields = ['url', 'image', 'description', 'tags']
def clean(self):
cleaned_data = super(PinForm, self).clean()
url = cleaned_data.get('url')
image = cleaned_data.get('image')
if url:
image_file_types = ['png', 'gif', 'jpeg', 'jpg']
if not url.split('.')[-1].lower() in image_file_types:
raise forms.ValidationError("Requested URL is not an image file. "
"Only images are currently supported.")
try:
Pin.objects.get(url=url)
raise forms.ValidationError("URL has already been pinned!")
except Pin.DoesNotExist:
pass
protocol = url.split(':')[0]
if protocol not in ['http', 'https']:
raise forms.ValidationError("Currently only support HTTP and "
"HTTPS protocols, please be sure "
"you include this in the URL.")
try:
Pin.objects.get(url=url)
raise forms.ValidationError("URL has already been pinned!")
except Pin.DoesNotExist:
pass
elif image:
pass
else:
raise forms.ValidationError("Need either a URL or Upload.")
return cleaned_data
## Instruction:
Move ValidationError messages to a dictionary that can be accessed from PinForm.clean
## Code After:
from django import forms
from .models import Pin
class PinForm(forms.ModelForm):
url = forms.CharField(required=False)
image = forms.ImageField(label='or Upload', required=False)
_errors = {
'not_image': 'Requested URL is not an image file. Only images are currently supported.',
'pinned': 'URL has already been pinned!',
'protocol': 'Currently only support HTTP and HTTPS protocols, please be sure you include this in the URL.',
'nothing': 'Need either a URL or Upload',
}
class Meta:
model = Pin
fields = ['url', 'image', 'description', 'tags']
def clean(self):
cleaned_data = super(PinForm, self).clean()
url = cleaned_data.get('url')
image = cleaned_data.get('image')
if url:
image_file_types = ['png', 'gif', 'jpeg', 'jpg']
if not url.split('.')[-1].lower() in image_file_types:
raise forms.ValidationError(self._errors['not_image'])
protocol = url.split(':')[0]
if protocol not in ['http', 'https']:
raise forms.ValidationError(self._errors['protocol'])
try:
Pin.objects.get(url=url)
raise forms.ValidationError(self._errors['pinned'])
except Pin.DoesNotExist:
pass
elif image:
pass
else:
raise forms.ValidationError(self._errors['nothing'])
return cleaned_data
|
from django import forms
from .models import Pin
class PinForm(forms.ModelForm):
url = forms.CharField(required=False)
image = forms.ImageField(label='or Upload', required=False)
+
+ _errors = {
+ 'not_image': 'Requested URL is not an image file. Only images are currently supported.',
+ 'pinned': 'URL has already been pinned!',
+ 'protocol': 'Currently only support HTTP and HTTPS protocols, please be sure you include this in the URL.',
+ 'nothing': 'Need either a URL or Upload',
+ }
class Meta:
model = Pin
fields = ['url', 'image', 'description', 'tags']
def clean(self):
cleaned_data = super(PinForm, self).clean()
url = cleaned_data.get('url')
image = cleaned_data.get('image')
if url:
image_file_types = ['png', 'gif', 'jpeg', 'jpg']
if not url.split('.')[-1].lower() in image_file_types:
- raise forms.ValidationError("Requested URL is not an image file. "
- "Only images are currently supported.")
+ raise forms.ValidationError(self._errors['not_image'])
+ protocol = url.split(':')[0]
+ if protocol not in ['http', 'https']:
+ raise forms.ValidationError(self._errors['protocol'])
try:
Pin.objects.get(url=url)
- raise forms.ValidationError("URL has already been pinned!")
? ------- ^^ ^^^^^^^^^^ ^^
+ raise forms.ValidationError(self._errors['pinned'])
? ^ ++++ ^^^^^^ ^^
- except Pin.DoesNotExist:
- pass
- protocol = url.split(':')[0]
- if protocol not in ['http', 'https']:
- raise forms.ValidationError("Currently only support HTTP and "
- "HTTPS protocols, please be sure "
- "you include this in the URL.")
- try:
- Pin.objects.get(url=url)
- raise forms.ValidationError("URL has already been pinned!")
except Pin.DoesNotExist:
pass
elif image:
pass
else:
- raise forms.ValidationError("Need either a URL or Upload.")
+ raise forms.ValidationError(self._errors['nothing'])
return cleaned_data
|
154632b0ab27d36b63c302a550589a182a319ef8
|
distance_matrix.py
|
distance_matrix.py
|
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file = correlation_file[0] + '.correlations.npz'
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.save_compressed(correlation_file, corr=distances)
|
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file[correlation_file.index('chrom')] = "corr"
correlation_file = '.'.join(correlation_file)
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.savez_compressed(correlation_file, corr=distances)
|
Change how/where to save the file
|
Change how/where to save the file
|
Python
|
apache-2.0
|
pombo-lab/gamtools,pombo-lab/gamtools
|
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
- correlation_file = correlation_file[0] + '.correlations.npz'
+ correlation_file[correlation_file.index('chrom')] = "corr"
+ correlation_file = '.'.join(correlation_file)
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
- np.save_compressed(correlation_file, corr=distances)
+ np.savez_compressed(correlation_file, corr=distances)
|
Change how/where to save the file
|
## Code Before:
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file = correlation_file[0] + '.correlations.npz'
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.save_compressed(correlation_file, corr=distances)
## Instruction:
Change how/where to save the file
## Code After:
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
correlation_file[correlation_file.index('chrom')] = "corr"
correlation_file = '.'.join(correlation_file)
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
np.savez_compressed(correlation_file, corr=distances)
|
from GamTools import corr
import numpy as np
import argparse
parser = argparse.ArgumentParser(description='Calculate coverage over different window sizes for a list of bam files.')
parser.add_argument('npz_frequencies_file', help='An npz file containing co-segregation frequencies to convert to correlations')
args = parser.parse_args()
correlation_file = args.npz_frequencies_file.split('.')
- correlation_file = correlation_file[0] + '.correlations.npz'
+ correlation_file[correlation_file.index('chrom')] = "corr"
+ correlation_file = '.'.join(correlation_file)
freqs = np.load(args.npz_frequencies_file)['freqs']
def flatten_freqs(freqs):
freqs_shape = freqs.shape
flat_shape = ( freqs_shape[0] * freqs_shape[1], freqs_shape[2], freqs_shape[3])
return freqs.reshape(flat_shape)
distances = np.array(map(corr, flatten_freqs(freqs))).reshape(freqs.shape[:2])
- np.save_compressed(correlation_file, corr=distances)
+ np.savez_compressed(correlation_file, corr=distances)
? +
|
7e7be00f696bd9fea2e9f18e126d27b6e9e1882d
|
jarn/mkrelease/python.py
|
jarn/mkrelease/python.py
|
import sys
from exit import err_exit
class Python(object):
"""Python interpreter abstraction."""
def __init__(self, python=None, version_info=None):
self.python = sys.executable
self.version_info = sys.version_info
if python is not None:
self.python = python
if version_info is not None:
self.version_info = version_info
def __str__(self):
return self.python
def is_valid_python(self):
return (self.version_info[:2] >= (2, 6) and
self.version_info[:2] < (3, 0))
def check_valid_python(self):
if not self.is_valid_python():
err_exit('Python 2.6 or 2.7 required')
|
import sys
from exit import err_exit
class Python(object):
"""Python interpreter abstraction."""
def __init__(self, python=None, version_info=None):
self.python = python or sys.executable
self.version_info = version_info or sys.version_info
def __str__(self):
return self.python
def is_valid_python(self):
return (self.version_info[:2] >= (2, 6) and
self.version_info[:2] < (3, 0))
def check_valid_python(self):
if not self.is_valid_python():
err_exit('Python 2.6 or 2.7 required')
|
Use terser idiom for initialization.
|
Use terser idiom for initialization.
|
Python
|
bsd-2-clause
|
Jarn/jarn.mkrelease
|
import sys
from exit import err_exit
class Python(object):
"""Python interpreter abstraction."""
def __init__(self, python=None, version_info=None):
- self.python = sys.executable
+ self.python = python or sys.executable
- self.version_info = sys.version_info
+ self.version_info = version_info or sys.version_info
- if python is not None:
- self.python = python
- if version_info is not None:
- self.version_info = version_info
def __str__(self):
return self.python
def is_valid_python(self):
return (self.version_info[:2] >= (2, 6) and
self.version_info[:2] < (3, 0))
def check_valid_python(self):
if not self.is_valid_python():
err_exit('Python 2.6 or 2.7 required')
+
|
Use terser idiom for initialization.
|
## Code Before:
import sys
from exit import err_exit
class Python(object):
"""Python interpreter abstraction."""
def __init__(self, python=None, version_info=None):
self.python = sys.executable
self.version_info = sys.version_info
if python is not None:
self.python = python
if version_info is not None:
self.version_info = version_info
def __str__(self):
return self.python
def is_valid_python(self):
return (self.version_info[:2] >= (2, 6) and
self.version_info[:2] < (3, 0))
def check_valid_python(self):
if not self.is_valid_python():
err_exit('Python 2.6 or 2.7 required')
## Instruction:
Use terser idiom for initialization.
## Code After:
import sys
from exit import err_exit
class Python(object):
"""Python interpreter abstraction."""
def __init__(self, python=None, version_info=None):
self.python = python or sys.executable
self.version_info = version_info or sys.version_info
def __str__(self):
return self.python
def is_valid_python(self):
return (self.version_info[:2] >= (2, 6) and
self.version_info[:2] < (3, 0))
def check_valid_python(self):
if not self.is_valid_python():
err_exit('Python 2.6 or 2.7 required')
|
import sys
from exit import err_exit
class Python(object):
"""Python interpreter abstraction."""
def __init__(self, python=None, version_info=None):
- self.python = sys.executable
+ self.python = python or sys.executable
? ++++++++++
- self.version_info = sys.version_info
+ self.version_info = version_info or sys.version_info
? ++++++++++++++++
- if python is not None:
- self.python = python
- if version_info is not None:
- self.version_info = version_info
def __str__(self):
return self.python
def is_valid_python(self):
return (self.version_info[:2] >= (2, 6) and
self.version_info[:2] < (3, 0))
def check_valid_python(self):
if not self.is_valid_python():
err_exit('Python 2.6 or 2.7 required')
+
|
c242ad95221c9c5b2f76795abd7dcbad5145cb2a
|
datagrid_gtk3/tests/utils/test_transformations.py
|
datagrid_gtk3/tests/utils/test_transformations.py
|
"""Data transformation utilities test cases."""
import unittest
from datagrid_gtk3.utils.transformations import degree_decimal_str_transform
class DegreeDecimalStrTransformTest(unittest.TestCase):
"""Degree decimal string transformation test case."""
def test_no_basestring(self):
"""AssertionError raised when no basestring value is passed."""
self.assertRaises(AssertionError, degree_decimal_str_transform, 0)
self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23)
self.assertRaises(AssertionError, degree_decimal_str_transform, True)
def test_no_digit(self):
"""AssertionError raised when other characters than digits."""
self.assertRaises(AssertionError, degree_decimal_str_transform, '.')
self.assertRaises(AssertionError, degree_decimal_str_transform, '+')
self.assertRaises(AssertionError, degree_decimal_str_transform, '-')
def test_length(self):
"""AssertionError when more characters than expected passed."""
self.assertRaises(
AssertionError, degree_decimal_str_transform, '123456789')
def test_point_insertion(self):
"""Decimal point is inserted in the expected location."""
self.assertEqual(
degree_decimal_str_transform('12345678'),
'12.345678',
)
self.assertEqual(
degree_decimal_str_transform('123456'),
'0.123456',
)
|
"""Data transformation utilities test cases."""
import unittest
from datagrid_gtk3.utils.transformations import degree_decimal_str_transform
class DegreeDecimalStrTransformTest(unittest.TestCase):
"""Degree decimal string transformation test case."""
def test_no_basestring(self):
"""AssertionError raised when no basestring value is passed."""
self.assertRaises(AssertionError, degree_decimal_str_transform, 0)
self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23)
self.assertRaises(AssertionError, degree_decimal_str_transform, True)
def test_no_digit(self):
"""AssertionError raised when other characters than digits."""
self.assertRaises(AssertionError, degree_decimal_str_transform, '.')
self.assertRaises(AssertionError, degree_decimal_str_transform, '+')
self.assertRaises(AssertionError, degree_decimal_str_transform, '-')
def test_length(self):
"""AssertionError when more characters than expected passed."""
self.assertRaises(
AssertionError, degree_decimal_str_transform, '123456789')
def test_point_insertion(self):
"""Decimal point is inserted in the expected location."""
self.assertEqual(
degree_decimal_str_transform('12345678'),
'12.345678',
)
self.assertEqual(
degree_decimal_str_transform('1234567'),
'1.234567',
)
self.assertEqual(
degree_decimal_str_transform('123456'),
'0.123456',
)
self.assertEqual(
degree_decimal_str_transform('12345'),
'0.012345',
)
|
Add more test cases to verify transformer behavior
|
Add more test cases to verify transformer behavior
|
Python
|
mit
|
nowsecure/datagrid-gtk3,jcollado/datagrid-gtk3
|
"""Data transformation utilities test cases."""
import unittest
from datagrid_gtk3.utils.transformations import degree_decimal_str_transform
class DegreeDecimalStrTransformTest(unittest.TestCase):
"""Degree decimal string transformation test case."""
def test_no_basestring(self):
"""AssertionError raised when no basestring value is passed."""
self.assertRaises(AssertionError, degree_decimal_str_transform, 0)
self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23)
self.assertRaises(AssertionError, degree_decimal_str_transform, True)
def test_no_digit(self):
"""AssertionError raised when other characters than digits."""
self.assertRaises(AssertionError, degree_decimal_str_transform, '.')
self.assertRaises(AssertionError, degree_decimal_str_transform, '+')
self.assertRaises(AssertionError, degree_decimal_str_transform, '-')
def test_length(self):
"""AssertionError when more characters than expected passed."""
self.assertRaises(
AssertionError, degree_decimal_str_transform, '123456789')
def test_point_insertion(self):
"""Decimal point is inserted in the expected location."""
self.assertEqual(
degree_decimal_str_transform('12345678'),
'12.345678',
)
self.assertEqual(
+ degree_decimal_str_transform('1234567'),
+ '1.234567',
+ )
+ self.assertEqual(
degree_decimal_str_transform('123456'),
'0.123456',
)
+ self.assertEqual(
+ degree_decimal_str_transform('12345'),
+ '0.012345',
+ )
|
Add more test cases to verify transformer behavior
|
## Code Before:
"""Data transformation utilities test cases."""
import unittest
from datagrid_gtk3.utils.transformations import degree_decimal_str_transform
class DegreeDecimalStrTransformTest(unittest.TestCase):
"""Degree decimal string transformation test case."""
def test_no_basestring(self):
"""AssertionError raised when no basestring value is passed."""
self.assertRaises(AssertionError, degree_decimal_str_transform, 0)
self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23)
self.assertRaises(AssertionError, degree_decimal_str_transform, True)
def test_no_digit(self):
"""AssertionError raised when other characters than digits."""
self.assertRaises(AssertionError, degree_decimal_str_transform, '.')
self.assertRaises(AssertionError, degree_decimal_str_transform, '+')
self.assertRaises(AssertionError, degree_decimal_str_transform, '-')
def test_length(self):
"""AssertionError when more characters than expected passed."""
self.assertRaises(
AssertionError, degree_decimal_str_transform, '123456789')
def test_point_insertion(self):
"""Decimal point is inserted in the expected location."""
self.assertEqual(
degree_decimal_str_transform('12345678'),
'12.345678',
)
self.assertEqual(
degree_decimal_str_transform('123456'),
'0.123456',
)
## Instruction:
Add more test cases to verify transformer behavior
## Code After:
"""Data transformation utilities test cases."""
import unittest
from datagrid_gtk3.utils.transformations import degree_decimal_str_transform
class DegreeDecimalStrTransformTest(unittest.TestCase):
"""Degree decimal string transformation test case."""
def test_no_basestring(self):
"""AssertionError raised when no basestring value is passed."""
self.assertRaises(AssertionError, degree_decimal_str_transform, 0)
self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23)
self.assertRaises(AssertionError, degree_decimal_str_transform, True)
def test_no_digit(self):
"""AssertionError raised when other characters than digits."""
self.assertRaises(AssertionError, degree_decimal_str_transform, '.')
self.assertRaises(AssertionError, degree_decimal_str_transform, '+')
self.assertRaises(AssertionError, degree_decimal_str_transform, '-')
def test_length(self):
"""AssertionError when more characters than expected passed."""
self.assertRaises(
AssertionError, degree_decimal_str_transform, '123456789')
def test_point_insertion(self):
"""Decimal point is inserted in the expected location."""
self.assertEqual(
degree_decimal_str_transform('12345678'),
'12.345678',
)
self.assertEqual(
degree_decimal_str_transform('1234567'),
'1.234567',
)
self.assertEqual(
degree_decimal_str_transform('123456'),
'0.123456',
)
self.assertEqual(
degree_decimal_str_transform('12345'),
'0.012345',
)
|
"""Data transformation utilities test cases."""
import unittest
from datagrid_gtk3.utils.transformations import degree_decimal_str_transform
class DegreeDecimalStrTransformTest(unittest.TestCase):
"""Degree decimal string transformation test case."""
def test_no_basestring(self):
"""AssertionError raised when no basestring value is passed."""
self.assertRaises(AssertionError, degree_decimal_str_transform, 0)
self.assertRaises(AssertionError, degree_decimal_str_transform, 1.23)
self.assertRaises(AssertionError, degree_decimal_str_transform, True)
def test_no_digit(self):
"""AssertionError raised when other characters than digits."""
self.assertRaises(AssertionError, degree_decimal_str_transform, '.')
self.assertRaises(AssertionError, degree_decimal_str_transform, '+')
self.assertRaises(AssertionError, degree_decimal_str_transform, '-')
def test_length(self):
"""AssertionError when more characters than expected passed."""
self.assertRaises(
AssertionError, degree_decimal_str_transform, '123456789')
def test_point_insertion(self):
"""Decimal point is inserted in the expected location."""
self.assertEqual(
degree_decimal_str_transform('12345678'),
'12.345678',
)
self.assertEqual(
+ degree_decimal_str_transform('1234567'),
+ '1.234567',
+ )
+ self.assertEqual(
degree_decimal_str_transform('123456'),
'0.123456',
)
+ self.assertEqual(
+ degree_decimal_str_transform('12345'),
+ '0.012345',
+ )
|
24e6d37108bc01b69d2f64014862bebd1e980fee
|
olim/olim/apps/storage/models.py
|
olim/olim/apps/storage/models.py
|
from django.db import models
# Create your models here.
|
from django.db import models
class Filesys(models.Model):
name = models.CharField(max_length=100)
url = models.URLField()
date = models.DateField(auto_now=True)
#uploader = models.ForeignKey('account.User')
thumbnail = models.FileField(upload_to='thumb')
parent_dir = models.CharField(max_length=100)
is_dir = models.BooleanField()
def __str__(self):
if is_dir:
return '/' + name
else:
return name
|
Make a model 'Filesys'. (not yet, because of uploader field)
|
Make a model 'Filesys'. (not yet, because of uploader field)
|
Python
|
apache-2.0
|
sparcs-kaist/olim,sparcs-kaist/olim
|
from django.db import models
- # Create your models here.
+ class Filesys(models.Model):
+ name = models.CharField(max_length=100)
+ url = models.URLField()
+ date = models.DateField(auto_now=True)
+ #uploader = models.ForeignKey('account.User')
+ thumbnail = models.FileField(upload_to='thumb')
+ parent_dir = models.CharField(max_length=100)
+ is_dir = models.BooleanField()
+
+ def __str__(self):
+ if is_dir:
+ return '/' + name
+ else:
+ return name
+
|
Make a model 'Filesys'. (not yet, because of uploader field)
|
## Code Before:
from django.db import models
# Create your models here.
## Instruction:
Make a model 'Filesys'. (not yet, because of uploader field)
## Code After:
from django.db import models
class Filesys(models.Model):
name = models.CharField(max_length=100)
url = models.URLField()
date = models.DateField(auto_now=True)
#uploader = models.ForeignKey('account.User')
thumbnail = models.FileField(upload_to='thumb')
parent_dir = models.CharField(max_length=100)
is_dir = models.BooleanField()
def __str__(self):
if is_dir:
return '/' + name
else:
return name
|
from django.db import models
- # Create your models here.
+ class Filesys(models.Model):
+ name = models.CharField(max_length=100)
+ url = models.URLField()
+ date = models.DateField(auto_now=True)
+ #uploader = models.ForeignKey('account.User')
+ thumbnail = models.FileField(upload_to='thumb')
+
+ parent_dir = models.CharField(max_length=100)
+ is_dir = models.BooleanField()
+
+ def __str__(self):
+ if is_dir:
+ return '/' + name
+ else:
+ return name
|
a8e8c8c33075c4e60467da4e1f8e05e35351b07f
|
url_shortener/default_config.py
|
url_shortener/default_config.py
|
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
|
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
:var MIN_NEW_ALIAS_LENGTH: a minimum number of characters in a newly
generated alias
:var MAX_NEW_ALIAS_LENGTH: a maximum number of characters in a newly
generated alias
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
MIN_NEW_ALIAS_LENGTH = 1
MAX_NEW_ALIAS_LENGTH = 4
|
Add configuration values for length of newly generated aliases
|
Add configuration values for length of newly generated aliases
|
Python
|
mit
|
piotr-rusin/url-shortener,piotr-rusin/url-shortener
|
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
+
+ :var MIN_NEW_ALIAS_LENGTH: a minimum number of characters in a newly
+ generated alias
+ :var MAX_NEW_ALIAS_LENGTH: a maximum number of characters in a newly
+ generated alias
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
+ MIN_NEW_ALIAS_LENGTH = 1
+ MAX_NEW_ALIAS_LENGTH = 4
|
Add configuration values for length of newly generated aliases
|
## Code Before:
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
## Instruction:
Add configuration values for length of newly generated aliases
## Code After:
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
:var MIN_NEW_ALIAS_LENGTH: a minimum number of characters in a newly
generated alias
:var MAX_NEW_ALIAS_LENGTH: a maximum number of characters in a newly
generated alias
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
MIN_NEW_ALIAS_LENGTH = 1
MAX_NEW_ALIAS_LENGTH = 4
|
''' Default configuration for the application
This data must be supplemented with custom configuration to which
URL_SHORTENER_CONFIGURATION environment variable points, overriding
some of the values specified here.
:var SQLALCHEMY_DATABASE_URI: uri of database to be used by the application.
The default value servers only as documentation, and it was taken from:
http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
+
+ :var MIN_NEW_ALIAS_LENGTH: a minimum number of characters in a newly
+ generated alias
+ :var MAX_NEW_ALIAS_LENGTH: a maximum number of characters in a newly
+ generated alias
'''
SQLALCHEMY_DATABASE_URI = (
'dialect+driver://username:password@host:port/database'
)
+ MIN_NEW_ALIAS_LENGTH = 1
+ MAX_NEW_ALIAS_LENGTH = 4
|
c8a7b9acc6c66a44eeb9ceac91587bb8ad08ad89
|
pagedown/utils.py
|
pagedown/utils.py
|
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.contrib.staticfiles.storage import staticfiles_storage
return staticfiles_storage.url(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
|
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.templatetags.static import static
return static(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
|
Use `django.templatetags.static`to load the file
|
Use `django.templatetags.static`to load the file
Debugging this issue: https://github.com/timmyomahony/django-pagedown/issues/25
|
Python
|
bsd-3-clause
|
timmyomahony/django-pagedown,timmyomahony/django-pagedown,timmyomahony/django-pagedown
|
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
- from django.contrib.staticfiles.storage import staticfiles_storage
+ from django.templatetags.static import static
- return staticfiles_storage.url(path)
+ return static(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
+
|
Use `django.templatetags.static`to load the file
|
## Code Before:
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.contrib.staticfiles.storage import staticfiles_storage
return staticfiles_storage.url(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
## Instruction:
Use `django.templatetags.static`to load the file
## Code After:
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
from django.templatetags.static import static
return static(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
|
from django.conf import settings
def compatible_staticpath(path):
'''
Try to return a path compatible all the way back to Django 1.2. If anyone
has a cleaner or better way to do this let me know!
'''
try:
# >= 1.4
- from django.contrib.staticfiles.storage import staticfiles_storage
+ from django.templatetags.static import static
- return staticfiles_storage.url(path)
? -----------------
+ return static(path)
except ImportError:
pass
try:
# >= 1.3
return '%s/%s' % (settings.STATIC_URL.rstrip('/'), path)
except AttributeError:
pass
try:
return '%s/%s' % (settings.PAGEDOWN_URL.rstrip('/'), path)
except AttributeError:
pass
return '%s/%s' % (settings.MEDIA_URL.rstrip('/'), path)
|
81de62d46d7daefb2e1eef0d0cc4f5ca5c8aef2f
|
blog/utils.py
|
blog/utils.py
|
from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
return get_object_or_404(
Post, **filter_dict)
|
from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
model = Post
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
'not_exist':
"No {} by that date and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
if queryset is None:
queryset = self.get_queryset()
queryset = queryset.filter(**filter_dict)
try:
obj = queryset.get()
except queryset.model.DoesNotExist:
raise Http404(
self.errors['not_exist'].format(
queryset.model
._meta.verbose_name))
return obj
|
Use GCBV queryset to get PostGetMixin obj.
|
Ch18: Use GCBV queryset to get PostGetMixin obj.
|
Python
|
bsd-2-clause
|
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
|
from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
+ model = Post
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
+ 'not_exist':
+ "No {} by that date and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
- return get_object_or_404(
- Post, **filter_dict)
+ if queryset is None:
+ queryset = self.get_queryset()
+ queryset = queryset.filter(**filter_dict)
+ try:
+ obj = queryset.get()
+ except queryset.model.DoesNotExist:
+ raise Http404(
+ self.errors['not_exist'].format(
+ queryset.model
+ ._meta.verbose_name))
+ return obj
|
Use GCBV queryset to get PostGetMixin obj.
|
## Code Before:
from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
return get_object_or_404(
Post, **filter_dict)
## Instruction:
Use GCBV queryset to get PostGetMixin obj.
## Code After:
from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
model = Post
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
'not_exist':
"No {} by that date and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
if queryset is None:
queryset = self.get_queryset()
queryset = queryset.filter(**filter_dict)
try:
obj = queryset.get()
except queryset.model.DoesNotExist:
raise Http404(
self.errors['not_exist'].format(
queryset.model
._meta.verbose_name))
return obj
|
from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
date_field = 'pub_date'
+ model = Post
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
+ 'not_exist':
+ "No {} by that date and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
date_field = self.date_field
slug_field = self.get_slug_field()
filter_dict = {
date_field + '__year': year,
date_field + '__month': month,
slug_field: slug,
}
- return get_object_or_404(
- Post, **filter_dict)
+ if queryset is None:
+ queryset = self.get_queryset()
+ queryset = queryset.filter(**filter_dict)
+ try:
+ obj = queryset.get()
+ except queryset.model.DoesNotExist:
+ raise Http404(
+ self.errors['not_exist'].format(
+ queryset.model
+ ._meta.verbose_name))
+ return obj
|
5d09fef9ee1f6b8627e372695a93be3236820f46
|
app/main/errors.py
|
app/main/errors.py
|
from flask import jsonify
from . import main
from ..models import ValidationError
@main.app_errorhandler(ValidationError)
def validatation_error(e):
return jsonify(error=e.message), 400
def generic_error_handler(e):
# TODO: log the error
headers = []
error = e.description
if e.code == 401:
headers = [('WWW-Authenticate', 'Bearer')]
elif e.code == 500:
error = "Internal error"
return jsonify(error=error), e.code, headers
for code in range(400, 599):
main.app_errorhandler(code)(generic_error_handler)
|
from flask import jsonify
from . import main
from ..models import ValidationError
@main.app_errorhandler(ValidationError)
def validatation_error(e):
return jsonify(error=e.message), 400
def generic_error_handler(e):
headers = []
code = getattr(e, 'code', 500)
error = getattr(e, 'description', 'Internal error')
if code == 401:
headers = [('WWW-Authenticate', 'Bearer')]
elif code == 500:
error = "Internal error"
return jsonify(error=error), code, headers
for code in range(400, 599):
main.app_errorhandler(code)(generic_error_handler)
|
Fix app error handler raising an attribute error
|
Fix app error handler raising an attribute error
We're using a single error handler to return a JSON response for
any error code.
The handler expects a flask HTTP error exception with `.code` and
`.description` attributes (like the ones raised by `abort`).
However, if the app raises an exception that's not handled by the
application code the error handler is called with the original
exception object instead. Depending on the exception, that object
might not contain code or description attributes.
In this case, an AttributeError in the error handler itself would
kill the WSGI worker and the application would fail to respond to
the request (leading to a 502 from the nginx proxy).
Replacing attribute access with `getattr` allows us to set the default
values to a 500 response with 'Internal error' for all non-HTTP
exceptions. We still get the error details in the logs, but we don't
want to display any additional information in the HTTP response.
Note: error handlers for HTTP 500 code are not triggered by Flask in
DEBUG mode, so this code usually doesn't run locally.
|
Python
|
mit
|
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
|
from flask import jsonify
from . import main
from ..models import ValidationError
@main.app_errorhandler(ValidationError)
def validatation_error(e):
return jsonify(error=e.message), 400
def generic_error_handler(e):
- # TODO: log the error
headers = []
- error = e.description
+ code = getattr(e, 'code', 500)
+ error = getattr(e, 'description', 'Internal error')
+
- if e.code == 401:
+ if code == 401:
headers = [('WWW-Authenticate', 'Bearer')]
- elif e.code == 500:
+ elif code == 500:
error = "Internal error"
- return jsonify(error=error), e.code, headers
+ return jsonify(error=error), code, headers
for code in range(400, 599):
main.app_errorhandler(code)(generic_error_handler)
|
Fix app error handler raising an attribute error
|
## Code Before:
from flask import jsonify
from . import main
from ..models import ValidationError
@main.app_errorhandler(ValidationError)
def validatation_error(e):
return jsonify(error=e.message), 400
def generic_error_handler(e):
# TODO: log the error
headers = []
error = e.description
if e.code == 401:
headers = [('WWW-Authenticate', 'Bearer')]
elif e.code == 500:
error = "Internal error"
return jsonify(error=error), e.code, headers
for code in range(400, 599):
main.app_errorhandler(code)(generic_error_handler)
## Instruction:
Fix app error handler raising an attribute error
## Code After:
from flask import jsonify
from . import main
from ..models import ValidationError
@main.app_errorhandler(ValidationError)
def validatation_error(e):
return jsonify(error=e.message), 400
def generic_error_handler(e):
headers = []
code = getattr(e, 'code', 500)
error = getattr(e, 'description', 'Internal error')
if code == 401:
headers = [('WWW-Authenticate', 'Bearer')]
elif code == 500:
error = "Internal error"
return jsonify(error=error), code, headers
for code in range(400, 599):
main.app_errorhandler(code)(generic_error_handler)
|
from flask import jsonify
from . import main
from ..models import ValidationError
@main.app_errorhandler(ValidationError)
def validatation_error(e):
return jsonify(error=e.message), 400
def generic_error_handler(e):
- # TODO: log the error
headers = []
- error = e.description
+ code = getattr(e, 'code', 500)
+ error = getattr(e, 'description', 'Internal error')
+
- if e.code == 401:
? --
+ if code == 401:
headers = [('WWW-Authenticate', 'Bearer')]
- elif e.code == 500:
? --
+ elif code == 500:
error = "Internal error"
- return jsonify(error=error), e.code, headers
? ---
+ return jsonify(error=error), code, headers
for code in range(400, 599):
main.app_errorhandler(code)(generic_error_handler)
|
5fe7e1e1cdccd8b54d6db2a64509923d8596a5f4
|
test_connector/__manifest__.py
|
test_connector/__manifest__.py
|
{'name': 'Connector Tests',
'summary': 'Automated tests for Connector, do not install.',
'version': '10.0.1.0.0',
'author': 'Camptocamp,Odoo Community Association (OCA)',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': ['connector',
],
'website': 'http://www.camptocamp.com',
'data': ['security/ir.model.access.csv',
],
'installable': True,
}
|
{'name': 'Connector Tests',
'summary': 'Automated tests for Connector, do not install.',
'description': 'Automated tests for Connector, do not install.',
'version': '10.0.1.0.0',
'author': 'Camptocamp,Odoo Community Association (OCA)',
'license': 'AGPL-3',
'category': 'Hidden',
'depends': ['connector',
],
'website': 'https://www.camptocamp.com',
'data': ['security/ir.model.access.csv',
],
'installable': True,
}
|
Add description in test addons to make pylint happier
|
Add description in test addons to make pylint happier
|
Python
|
agpl-3.0
|
OCA/connector,OCA/connector
|
{'name': 'Connector Tests',
'summary': 'Automated tests for Connector, do not install.',
+ 'description': 'Automated tests for Connector, do not install.',
'version': '10.0.1.0.0',
'author': 'Camptocamp,Odoo Community Association (OCA)',
'license': 'AGPL-3',
- 'category': 'Generic Modules',
+ 'category': 'Hidden',
'depends': ['connector',
],
- 'website': 'http://www.camptocamp.com',
+ 'website': 'https://www.camptocamp.com',
'data': ['security/ir.model.access.csv',
],
'installable': True,
}
|
Add description in test addons to make pylint happier
|
## Code Before:
{'name': 'Connector Tests',
'summary': 'Automated tests for Connector, do not install.',
'version': '10.0.1.0.0',
'author': 'Camptocamp,Odoo Community Association (OCA)',
'license': 'AGPL-3',
'category': 'Generic Modules',
'depends': ['connector',
],
'website': 'http://www.camptocamp.com',
'data': ['security/ir.model.access.csv',
],
'installable': True,
}
## Instruction:
Add description in test addons to make pylint happier
## Code After:
{'name': 'Connector Tests',
'summary': 'Automated tests for Connector, do not install.',
'description': 'Automated tests for Connector, do not install.',
'version': '10.0.1.0.0',
'author': 'Camptocamp,Odoo Community Association (OCA)',
'license': 'AGPL-3',
'category': 'Hidden',
'depends': ['connector',
],
'website': 'https://www.camptocamp.com',
'data': ['security/ir.model.access.csv',
],
'installable': True,
}
|
{'name': 'Connector Tests',
'summary': 'Automated tests for Connector, do not install.',
+ 'description': 'Automated tests for Connector, do not install.',
'version': '10.0.1.0.0',
'author': 'Camptocamp,Odoo Community Association (OCA)',
'license': 'AGPL-3',
- 'category': 'Generic Modules',
+ 'category': 'Hidden',
'depends': ['connector',
],
- 'website': 'http://www.camptocamp.com',
+ 'website': 'https://www.camptocamp.com',
? +
'data': ['security/ir.model.access.csv',
],
'installable': True,
}
|
13ba6bf5c12c46aa43c0060d40458fe453df9c33
|
ydf/yaml_ext.py
|
ydf/yaml_ext.py
|
import collections
from ruamel import yaml
from ruamel.yaml import resolver
class OrderedLoader(yaml.Loader):
"""
Extends the default YAML loader to use :class:`~collections.OrderedDict` for mapping
types.
"""
def __init__(self, *args, **kwargs):
super(OrderedLoader, self).__init__(*args, **kwargs)
self.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, self.construct_ordered_mapping)
@staticmethod
def construct_ordered_mapping(loader, node):
loader.flatten_mapping(node)
return collections.OrderedDict(loader.construct_pairs(node))
def load(stream):
"""
Load the given YAML string.
"""
return yaml.load(stream, OrderedLoader)
|
import collections
from ruamel import yaml
from ruamel.yaml import resolver
class OrderedRoundTripLoader(yaml.RoundTripLoader):
"""
Extends the default round trip YAML loader to use :class:`~collections.OrderedDict` for mapping
types.
"""
def __init__(self, *args, **kwargs):
super(OrderedRoundTripLoader, self).__init__(*args, **kwargs)
self.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, self.construct_ordered_mapping)
@staticmethod
def construct_ordered_mapping(loader, node):
loader.flatten_mapping(node)
return collections.OrderedDict(loader.construct_pairs(node))
def load_all(stream):
"""
Load all documents within the given YAML string.
:param stream: A valid YAML stream.
:return: Generator that yields each document found in the YAML stream.
"""
return yaml.load_all(stream, OrderedRoundTripLoader)
|
Switch to round trip loader to support multiple documents.
|
Switch to round trip loader to support multiple documents.
|
Python
|
apache-2.0
|
ahawker/ydf
|
import collections
from ruamel import yaml
from ruamel.yaml import resolver
- class OrderedLoader(yaml.Loader):
+ class OrderedRoundTripLoader(yaml.RoundTripLoader):
"""
- Extends the default YAML loader to use :class:`~collections.OrderedDict` for mapping
+ Extends the default round trip YAML loader to use :class:`~collections.OrderedDict` for mapping
types.
"""
def __init__(self, *args, **kwargs):
- super(OrderedLoader, self).__init__(*args, **kwargs)
+ super(OrderedRoundTripLoader, self).__init__(*args, **kwargs)
self.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, self.construct_ordered_mapping)
@staticmethod
def construct_ordered_mapping(loader, node):
loader.flatten_mapping(node)
return collections.OrderedDict(loader.construct_pairs(node))
- def load(stream):
+ def load_all(stream):
"""
- Load the given YAML string.
+ Load all documents within the given YAML string.
+
+ :param stream: A valid YAML stream.
+ :return: Generator that yields each document found in the YAML stream.
"""
- return yaml.load(stream, OrderedLoader)
+ return yaml.load_all(stream, OrderedRoundTripLoader)
|
Switch to round trip loader to support multiple documents.
|
## Code Before:
import collections
from ruamel import yaml
from ruamel.yaml import resolver
class OrderedLoader(yaml.Loader):
"""
Extends the default YAML loader to use :class:`~collections.OrderedDict` for mapping
types.
"""
def __init__(self, *args, **kwargs):
super(OrderedLoader, self).__init__(*args, **kwargs)
self.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, self.construct_ordered_mapping)
@staticmethod
def construct_ordered_mapping(loader, node):
loader.flatten_mapping(node)
return collections.OrderedDict(loader.construct_pairs(node))
def load(stream):
"""
Load the given YAML string.
"""
return yaml.load(stream, OrderedLoader)
## Instruction:
Switch to round trip loader to support multiple documents.
## Code After:
import collections
from ruamel import yaml
from ruamel.yaml import resolver
class OrderedRoundTripLoader(yaml.RoundTripLoader):
"""
Extends the default round trip YAML loader to use :class:`~collections.OrderedDict` for mapping
types.
"""
def __init__(self, *args, **kwargs):
super(OrderedRoundTripLoader, self).__init__(*args, **kwargs)
self.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, self.construct_ordered_mapping)
@staticmethod
def construct_ordered_mapping(loader, node):
loader.flatten_mapping(node)
return collections.OrderedDict(loader.construct_pairs(node))
def load_all(stream):
"""
Load all documents within the given YAML string.
:param stream: A valid YAML stream.
:return: Generator that yields each document found in the YAML stream.
"""
return yaml.load_all(stream, OrderedRoundTripLoader)
|
import collections
from ruamel import yaml
from ruamel.yaml import resolver
- class OrderedLoader(yaml.Loader):
+ class OrderedRoundTripLoader(yaml.RoundTripLoader):
? +++++++++ +++++++++
"""
- Extends the default YAML loader to use :class:`~collections.OrderedDict` for mapping
+ Extends the default round trip YAML loader to use :class:`~collections.OrderedDict` for mapping
? +++++++++++
types.
"""
def __init__(self, *args, **kwargs):
- super(OrderedLoader, self).__init__(*args, **kwargs)
+ super(OrderedRoundTripLoader, self).__init__(*args, **kwargs)
? +++++++++
self.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, self.construct_ordered_mapping)
@staticmethod
def construct_ordered_mapping(loader, node):
loader.flatten_mapping(node)
return collections.OrderedDict(loader.construct_pairs(node))
- def load(stream):
+ def load_all(stream):
? ++++
"""
- Load the given YAML string.
+ Load all documents within the given YAML string.
+
+ :param stream: A valid YAML stream.
+ :return: Generator that yields each document found in the YAML stream.
"""
- return yaml.load(stream, OrderedLoader)
+ return yaml.load_all(stream, OrderedRoundTripLoader)
? ++++ +++++++++
|
2d688f97b9869fdfed9237b91fdce287278e3c6c
|
wsgi.py
|
wsgi.py
|
import os
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
def application(environ, start_response):
index = get_index(environ)
transport.send(environ['wsgi.input'].read(), index)
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
|
import os
from queue import Queue
from threading import Thread
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
blocking_queue = Queue()
def send():
while True:
data, index = blocking_queue.get()
transport.send(data, index)
blocking_queue.task_done()
sender = Thread(target=send)
sender.daemon = True
sender.start()
def application(environ, start_response):
index = get_index(environ)
length = int(environ.get('CONTENT_LENGTH', '0'))
data = environ['wsgi.input'].read(length)
blocking_queue.put((data, index))
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
|
Send data to elasticsearch asynchronously.
|
Send data to elasticsearch asynchronously.
|
Python
|
mit
|
socialwifi/elasticsearch-raven,pozytywnie/elasticsearch-raven,serathius/elasticsearch-raven
|
import os
+ from queue import Queue
+ from threading import Thread
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
+ blocking_queue = Queue()
+
+
+ def send():
+ while True:
+ data, index = blocking_queue.get()
+ transport.send(data, index)
+ blocking_queue.task_done()
+
+
+ sender = Thread(target=send)
+ sender.daemon = True
+ sender.start()
def application(environ, start_response):
index = get_index(environ)
- transport.send(environ['wsgi.input'].read(), index)
+ length = int(environ.get('CONTENT_LENGTH', '0'))
+ data = environ['wsgi.input'].read(length)
+ blocking_queue.put((data, index))
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
|
Send data to elasticsearch asynchronously.
|
## Code Before:
import os
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
def application(environ, start_response):
index = get_index(environ)
transport.send(environ['wsgi.input'].read(), index)
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
## Instruction:
Send data to elasticsearch asynchronously.
## Code After:
import os
from queue import Queue
from threading import Thread
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
blocking_queue = Queue()
def send():
while True:
data, index = blocking_queue.get()
transport.send(data, index)
blocking_queue.task_done()
sender = Thread(target=send)
sender.daemon = True
sender.start()
def application(environ, start_response):
index = get_index(environ)
length = int(environ.get('CONTENT_LENGTH', '0'))
data = environ['wsgi.input'].read(length)
blocking_queue.put((data, index))
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
|
import os
+ from queue import Queue
+ from threading import Thread
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
+ blocking_queue = Queue()
+
+
+ def send():
+ while True:
+ data, index = blocking_queue.get()
+ transport.send(data, index)
+ blocking_queue.task_done()
+
+
+ sender = Thread(target=send)
+ sender.daemon = True
+ sender.start()
def application(environ, start_response):
index = get_index(environ)
- transport.send(environ['wsgi.input'].read(), index)
+ length = int(environ.get('CONTENT_LENGTH', '0'))
+ data = environ['wsgi.input'].read(length)
+ blocking_queue.put((data, index))
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
|
011ad6090e183ce359c0a74bbd2f2530e1d5178c
|
tests/test_repr.py
|
tests/test_repr.py
|
""" Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_before_spawn(self):
""" Exercise derived spawn.__str__() """
# given,
child = pexpect.spawn(None, None)
child.read_nonblocking = lambda size, timeout: b''
try:
child.expect('alpha', timeout=0.1)
except pexpect.TIMEOUT:
pass
else:
assert False, 'TIMEOUT exception expected. No exception aised.'
|
""" Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_before_spawn(self):
""" Exercise derived spawn.__str__() """
# given,
child = pexpect.spawn(None, None)
child.read_nonblocking = lambda size, timeout: b''
try:
child.expect('alpha', timeout=0.1)
except pexpect.TIMEOUT as e:
str(e) # Smoketest
else:
assert False, 'TIMEOUT exception expected. No exception raised.'
|
Check error repr can be str-ed
|
Check error repr can be str-ed
|
Python
|
isc
|
nodish/pexpect,nodish/pexpect,nodish/pexpect
|
""" Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_before_spawn(self):
""" Exercise derived spawn.__str__() """
# given,
child = pexpect.spawn(None, None)
child.read_nonblocking = lambda size, timeout: b''
try:
child.expect('alpha', timeout=0.1)
- except pexpect.TIMEOUT:
+ except pexpect.TIMEOUT as e:
- pass
+ str(e) # Smoketest
else:
- assert False, 'TIMEOUT exception expected. No exception aised.'
+ assert False, 'TIMEOUT exception expected. No exception raised.'
|
Check error repr can be str-ed
|
## Code Before:
""" Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_before_spawn(self):
""" Exercise derived spawn.__str__() """
# given,
child = pexpect.spawn(None, None)
child.read_nonblocking = lambda size, timeout: b''
try:
child.expect('alpha', timeout=0.1)
except pexpect.TIMEOUT:
pass
else:
assert False, 'TIMEOUT exception expected. No exception aised.'
## Instruction:
Check error repr can be str-ed
## Code After:
""" Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_before_spawn(self):
""" Exercise derived spawn.__str__() """
# given,
child = pexpect.spawn(None, None)
child.read_nonblocking = lambda size, timeout: b''
try:
child.expect('alpha', timeout=0.1)
except pexpect.TIMEOUT as e:
str(e) # Smoketest
else:
assert False, 'TIMEOUT exception expected. No exception raised.'
|
""" Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_before_spawn(self):
""" Exercise derived spawn.__str__() """
# given,
child = pexpect.spawn(None, None)
child.read_nonblocking = lambda size, timeout: b''
try:
child.expect('alpha', timeout=0.1)
- except pexpect.TIMEOUT:
+ except pexpect.TIMEOUT as e:
? +++++
- pass
+ str(e) # Smoketest
else:
- assert False, 'TIMEOUT exception expected. No exception aised.'
+ assert False, 'TIMEOUT exception expected. No exception raised.'
? +
|
7b1a0022b41dbf17de352e4686458e5250b28e49
|
quantityfield/widgets.py
|
quantityfield/widgets.py
|
import re
from django.forms.widgets import MultiWidget, Select, NumberInput
from . import ureg
class QuantityWidget(MultiWidget):
def get_choices(self, allowed_types=None):
allowed_types = allowed_types or dir(ureg)
return [(x, x) for x in allowed_types]
def __init__(self, attrs=None, base_units=None, allowed_types=None):
choices = self.get_choices(allowed_types)
self.base_units = base_units
attrs = attrs or {}
attrs.setdefault('step', 'any')
widgets = (
NumberInput(attrs=attrs),
Select(attrs=attrs, choices=choices)
)
super(QuantityWidget, self).__init__(widgets, attrs)
def decompress(self, value):
non_decimal = re.compile(r'[^\d.]+')
if value:
number_value = non_decimal.sub('', str(value))
return [number_value, self.base_units]
return [None, self.base_units]
|
import re
from django.forms.widgets import MultiWidget, Select, NumberInput
from . import ureg
class QuantityWidget(MultiWidget):
def get_choices(self, allowed_types=None):
allowed_types = allowed_types or dir(ureg)
return [(x, x) for x in allowed_types]
def __init__(self, attrs=None, base_units=None, allowed_types=None):
choices = self.get_choices(allowed_types)
self.base_units = base_units
attrs = attrs or {}
attrs.setdefault('step', 'any')
widgets = (
NumberInput(attrs=attrs),
Select(attrs=attrs, choices=choices)
)
super(QuantityWidget, self).__init__(widgets, attrs)
def decompress(self, value):
non_decimal = re.compile(r'[^\d.]+')
if value:
number_value = non_decimal.sub('', str(value))
return [number_value, self.base_units]
return [None, self.base_units]
|
Fix indentation error from conversion to spaces
|
Fix indentation error from conversion to spaces
|
Python
|
mit
|
bharling/django-pint,bharling/django-pint
|
import re
from django.forms.widgets import MultiWidget, Select, NumberInput
from . import ureg
class QuantityWidget(MultiWidget):
def get_choices(self, allowed_types=None):
allowed_types = allowed_types or dir(ureg)
return [(x, x) for x in allowed_types]
def __init__(self, attrs=None, base_units=None, allowed_types=None):
choices = self.get_choices(allowed_types)
self.base_units = base_units
attrs = attrs or {}
attrs.setdefault('step', 'any')
widgets = (
NumberInput(attrs=attrs),
Select(attrs=attrs, choices=choices)
)
super(QuantityWidget, self).__init__(widgets, attrs)
- def decompress(self, value):
+ def decompress(self, value):
- non_decimal = re.compile(r'[^\d.]+')
+ non_decimal = re.compile(r'[^\d.]+')
- if value:
+ if value:
- number_value = non_decimal.sub('', str(value))
+ number_value = non_decimal.sub('', str(value))
- return [number_value, self.base_units]
+ return [number_value, self.base_units]
- return [None, self.base_units]
+ return [None, self.base_units]
|
Fix indentation error from conversion to spaces
|
## Code Before:
import re
from django.forms.widgets import MultiWidget, Select, NumberInput
from . import ureg
class QuantityWidget(MultiWidget):
def get_choices(self, allowed_types=None):
allowed_types = allowed_types or dir(ureg)
return [(x, x) for x in allowed_types]
def __init__(self, attrs=None, base_units=None, allowed_types=None):
choices = self.get_choices(allowed_types)
self.base_units = base_units
attrs = attrs or {}
attrs.setdefault('step', 'any')
widgets = (
NumberInput(attrs=attrs),
Select(attrs=attrs, choices=choices)
)
super(QuantityWidget, self).__init__(widgets, attrs)
def decompress(self, value):
non_decimal = re.compile(r'[^\d.]+')
if value:
number_value = non_decimal.sub('', str(value))
return [number_value, self.base_units]
return [None, self.base_units]
## Instruction:
Fix indentation error from conversion to spaces
## Code After:
import re
from django.forms.widgets import MultiWidget, Select, NumberInput
from . import ureg
class QuantityWidget(MultiWidget):
def get_choices(self, allowed_types=None):
allowed_types = allowed_types or dir(ureg)
return [(x, x) for x in allowed_types]
def __init__(self, attrs=None, base_units=None, allowed_types=None):
choices = self.get_choices(allowed_types)
self.base_units = base_units
attrs = attrs or {}
attrs.setdefault('step', 'any')
widgets = (
NumberInput(attrs=attrs),
Select(attrs=attrs, choices=choices)
)
super(QuantityWidget, self).__init__(widgets, attrs)
def decompress(self, value):
non_decimal = re.compile(r'[^\d.]+')
if value:
number_value = non_decimal.sub('', str(value))
return [number_value, self.base_units]
return [None, self.base_units]
|
import re
from django.forms.widgets import MultiWidget, Select, NumberInput
from . import ureg
class QuantityWidget(MultiWidget):
def get_choices(self, allowed_types=None):
allowed_types = allowed_types or dir(ureg)
return [(x, x) for x in allowed_types]
def __init__(self, attrs=None, base_units=None, allowed_types=None):
choices = self.get_choices(allowed_types)
self.base_units = base_units
attrs = attrs or {}
attrs.setdefault('step', 'any')
widgets = (
NumberInput(attrs=attrs),
Select(attrs=attrs, choices=choices)
)
super(QuantityWidget, self).__init__(widgets, attrs)
- def decompress(self, value):
? ----
+ def decompress(self, value):
- non_decimal = re.compile(r'[^\d.]+')
? ----
+ non_decimal = re.compile(r'[^\d.]+')
- if value:
? ----
+ if value:
- number_value = non_decimal.sub('', str(value))
? ----
+ number_value = non_decimal.sub('', str(value))
- return [number_value, self.base_units]
? ----
+ return [number_value, self.base_units]
- return [None, self.base_units]
? ----
+ return [None, self.base_units]
|
c0ee0f27b21ed7c6eb97ad6b1fc1c7d72127c772
|
audio_pipeline/tb_ui/util/Resources.py
|
audio_pipeline/tb_ui/util/Resources.py
|
import uuid
import os
from audio_pipeline.util.AudioFileFactory import AudioFileFactory
from audio_pipeline.util import Exceptions
mbid_directory = "Ready To Filewalk"
picard_directory = "Picard Me!"
cache_limit = 30
cancel = -1
checked = 1
unchecked = 0
def has_mbid(track):
"""
Check whether or not the given track has an MBID.
"""
if track.mbid.value:
try:
id = uuid.UUID(track.mbid.value)
good = True
except ValueError as e:
good = False
else:
good = False
return good
def is_release(directory):
d = os.path.split(directory)[1]
track = False
# we'll set this to a DBPOWERAMP config later
#if InputPatterns.release_pattern.match(d):
for f in os.scandir(directory):
if f.is_file:
file_name = f.name
try:
track = AudioFileFactory.get(f.path)
except IOError:
track = False
continue
except Exceptions.UnsupportedFiletypeError:
track = False
continue
break
return track
|
import uuid
import os
from audio_pipeline.util.AudioFileFactory import AudioFileFactory
from audio_pipeline.util import Exceptions
mbid_directory = "Ready To Filewalk"
picard_directory = "Picard Me!"
cache_limit = 30
cancel = -1
checked = 1
unchecked = 0
def has_mbid(track):
"""
Check whether or not the given track has an MBID.
"""
if track.mbid.value:
try:
id = uuid.UUID(track.mbid.value)
good = True
except ValueError as e:
good = False
else:
good = False
return good
def is_release(directory):
track = False
# we'll set this to a DBPOWERAMP config later
#if InputPatterns.release_pattern.match(d):
for f in os.listdir(directory):
file_path = os.path.join(directory, f)
if os.path.isfile(file_path):
try:
track = AudioFileFactory.get(file_path)
except IOError:
track = False
continue
except Exceptions.UnsupportedFiletypeError:
track = False
continue
break
return track
|
Remove os.scandir usage (not in python 3.4)
|
Remove os.scandir usage (not in python 3.4)
|
Python
|
mit
|
hidat/audio_pipeline
|
import uuid
import os
from audio_pipeline.util.AudioFileFactory import AudioFileFactory
from audio_pipeline.util import Exceptions
mbid_directory = "Ready To Filewalk"
picard_directory = "Picard Me!"
cache_limit = 30
cancel = -1
checked = 1
unchecked = 0
def has_mbid(track):
"""
Check whether or not the given track has an MBID.
"""
if track.mbid.value:
try:
id = uuid.UUID(track.mbid.value)
good = True
except ValueError as e:
good = False
else:
good = False
return good
+
def is_release(directory):
- d = os.path.split(directory)[1]
track = False
# we'll set this to a DBPOWERAMP config later
#if InputPatterns.release_pattern.match(d):
- for f in os.scandir(directory):
+ for f in os.listdir(directory):
- if f.is_file:
- file_name = f.name
+ file_path = os.path.join(directory, f)
+ if os.path.isfile(file_path):
try:
- track = AudioFileFactory.get(f.path)
+ track = AudioFileFactory.get(file_path)
except IOError:
track = False
continue
except Exceptions.UnsupportedFiletypeError:
track = False
continue
break
return track
|
Remove os.scandir usage (not in python 3.4)
|
## Code Before:
import uuid
import os
from audio_pipeline.util.AudioFileFactory import AudioFileFactory
from audio_pipeline.util import Exceptions
mbid_directory = "Ready To Filewalk"
picard_directory = "Picard Me!"
cache_limit = 30
cancel = -1
checked = 1
unchecked = 0
def has_mbid(track):
"""
Check whether or not the given track has an MBID.
"""
if track.mbid.value:
try:
id = uuid.UUID(track.mbid.value)
good = True
except ValueError as e:
good = False
else:
good = False
return good
def is_release(directory):
d = os.path.split(directory)[1]
track = False
# we'll set this to a DBPOWERAMP config later
#if InputPatterns.release_pattern.match(d):
for f in os.scandir(directory):
if f.is_file:
file_name = f.name
try:
track = AudioFileFactory.get(f.path)
except IOError:
track = False
continue
except Exceptions.UnsupportedFiletypeError:
track = False
continue
break
return track
## Instruction:
Remove os.scandir usage (not in python 3.4)
## Code After:
import uuid
import os
from audio_pipeline.util.AudioFileFactory import AudioFileFactory
from audio_pipeline.util import Exceptions
mbid_directory = "Ready To Filewalk"
picard_directory = "Picard Me!"
cache_limit = 30
cancel = -1
checked = 1
unchecked = 0
def has_mbid(track):
"""
Check whether or not the given track has an MBID.
"""
if track.mbid.value:
try:
id = uuid.UUID(track.mbid.value)
good = True
except ValueError as e:
good = False
else:
good = False
return good
def is_release(directory):
track = False
# we'll set this to a DBPOWERAMP config later
#if InputPatterns.release_pattern.match(d):
for f in os.listdir(directory):
file_path = os.path.join(directory, f)
if os.path.isfile(file_path):
try:
track = AudioFileFactory.get(file_path)
except IOError:
track = False
continue
except Exceptions.UnsupportedFiletypeError:
track = False
continue
break
return track
|
import uuid
import os
from audio_pipeline.util.AudioFileFactory import AudioFileFactory
from audio_pipeline.util import Exceptions
mbid_directory = "Ready To Filewalk"
picard_directory = "Picard Me!"
cache_limit = 30
cancel = -1
checked = 1
unchecked = 0
def has_mbid(track):
"""
Check whether or not the given track has an MBID.
"""
if track.mbid.value:
try:
id = uuid.UUID(track.mbid.value)
good = True
except ValueError as e:
good = False
else:
good = False
return good
+
def is_release(directory):
- d = os.path.split(directory)[1]
track = False
# we'll set this to a DBPOWERAMP config later
#if InputPatterns.release_pattern.match(d):
- for f in os.scandir(directory):
? ^^^
+ for f in os.listdir(directory):
? ++ ^
- if f.is_file:
- file_name = f.name
+ file_path = os.path.join(directory, f)
+ if os.path.isfile(file_path):
try:
- track = AudioFileFactory.get(f.path)
? ^
+ track = AudioFileFactory.get(file_path)
? ^^^^
except IOError:
track = False
continue
except Exceptions.UnsupportedFiletypeError:
track = False
continue
break
return track
|
5dc5de9dab24cf698dc26db24d1e1697472c2e05
|
tests/integration/pillar/test_pillar_include.py
|
tests/integration/pillar/test_pillar_include.py
|
from __future__ import unicode_literals
from tests.support.case import ModuleCase
class PillarIncludeTest(ModuleCase):
def test_pillar_include(self):
'''
Test pillar include
'''
ret = self.minion_run('pillar.items')
assert 'a' in ret['element']
assert ret['element']['a'] == {'a': ['Entry A']}
assert 'b' in ret['element']
assert ret['element']['b'] == {'b': ['Entry B']}
def test_pillar_glob_include(self):
'''
Test pillar include via glob pattern
'''
ret = self.minion_run('pillar.items')
assert 'glob-a' in ret
assert ret['glob-a'] == 'Entry A'
assert 'glob-b' in ret
assert ret['glob-b'] == 'Entry B'
|
'''
Pillar include tests
'''
from __future__ import unicode_literals
from tests.support.case import ModuleCase
class PillarIncludeTest(ModuleCase):
def test_pillar_include(self):
'''
Test pillar include
'''
ret = self.minion_run('pillar.items')
assert 'a' in ret['element']
assert ret['element']['a'] == {'a': ['Entry A']}
assert 'b' in ret['element']
assert ret['element']['b'] == {'b': ['Entry B']}
def test_pillar_glob_include(self):
'''
Test pillar include via glob pattern
'''
ret = self.minion_run('pillar.items')
assert 'glob-a' in ret
assert ret['glob-a'] == 'Entry A'
assert 'glob-b' in ret
assert ret['glob-b'] == 'Entry B'
|
Use file encoding and add docstring
|
Use file encoding and add docstring
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
+ '''
+ Pillar include tests
+ '''
from __future__ import unicode_literals
+
from tests.support.case import ModuleCase
class PillarIncludeTest(ModuleCase):
def test_pillar_include(self):
'''
Test pillar include
'''
ret = self.minion_run('pillar.items')
assert 'a' in ret['element']
assert ret['element']['a'] == {'a': ['Entry A']}
assert 'b' in ret['element']
assert ret['element']['b'] == {'b': ['Entry B']}
def test_pillar_glob_include(self):
'''
Test pillar include via glob pattern
'''
ret = self.minion_run('pillar.items')
assert 'glob-a' in ret
assert ret['glob-a'] == 'Entry A'
assert 'glob-b' in ret
assert ret['glob-b'] == 'Entry B'
|
Use file encoding and add docstring
|
## Code Before:
from __future__ import unicode_literals
from tests.support.case import ModuleCase
class PillarIncludeTest(ModuleCase):
def test_pillar_include(self):
'''
Test pillar include
'''
ret = self.minion_run('pillar.items')
assert 'a' in ret['element']
assert ret['element']['a'] == {'a': ['Entry A']}
assert 'b' in ret['element']
assert ret['element']['b'] == {'b': ['Entry B']}
def test_pillar_glob_include(self):
'''
Test pillar include via glob pattern
'''
ret = self.minion_run('pillar.items')
assert 'glob-a' in ret
assert ret['glob-a'] == 'Entry A'
assert 'glob-b' in ret
assert ret['glob-b'] == 'Entry B'
## Instruction:
Use file encoding and add docstring
## Code After:
'''
Pillar include tests
'''
from __future__ import unicode_literals
from tests.support.case import ModuleCase
class PillarIncludeTest(ModuleCase):
def test_pillar_include(self):
'''
Test pillar include
'''
ret = self.minion_run('pillar.items')
assert 'a' in ret['element']
assert ret['element']['a'] == {'a': ['Entry A']}
assert 'b' in ret['element']
assert ret['element']['b'] == {'b': ['Entry B']}
def test_pillar_glob_include(self):
'''
Test pillar include via glob pattern
'''
ret = self.minion_run('pillar.items')
assert 'glob-a' in ret
assert ret['glob-a'] == 'Entry A'
assert 'glob-b' in ret
assert ret['glob-b'] == 'Entry B'
|
+ '''
+ Pillar include tests
+ '''
from __future__ import unicode_literals
+
from tests.support.case import ModuleCase
class PillarIncludeTest(ModuleCase):
def test_pillar_include(self):
'''
Test pillar include
'''
ret = self.minion_run('pillar.items')
assert 'a' in ret['element']
assert ret['element']['a'] == {'a': ['Entry A']}
assert 'b' in ret['element']
assert ret['element']['b'] == {'b': ['Entry B']}
def test_pillar_glob_include(self):
'''
Test pillar include via glob pattern
'''
ret = self.minion_run('pillar.items')
assert 'glob-a' in ret
assert ret['glob-a'] == 'Entry A'
assert 'glob-b' in ret
assert ret['glob-b'] == 'Entry B'
|
e8548c26df021d9eff7c056338e3442beeed9397
|
cactusbot/handlers/spam.py
|
cactusbot/handlers/spam.py
|
"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
built_message = ""
for chunk in packet:
if chunk["type"] == "text":
built_message += chunk["text"]
exceeds_caps = self.check_caps(built_message)
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
|
"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
exceeds_caps = self.check_caps(''.join(
chunk["text"] for chunk in packet if chunk["type"] == chunk["text"]
))
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
|
Move for loop to generator comprehension
|
Move for loop to generator comprehension
|
Python
|
mit
|
CactusDev/CactusBot
|
"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
+
- built_message = ""
- for chunk in packet:
- if chunk["type"] == "text":
- built_message += chunk["text"]
- exceeds_caps = self.check_caps(built_message)
+ exceeds_caps = self.check_caps(''.join(
+ chunk["text"] for chunk in packet if chunk["type"] == chunk["text"]
+ ))
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
|
Move for loop to generator comprehension
|
## Code Before:
"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
built_message = ""
for chunk in packet:
if chunk["type"] == "text":
built_message += chunk["text"]
exceeds_caps = self.check_caps(built_message)
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
## Instruction:
Move for loop to generator comprehension
## Code After:
"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
exceeds_caps = self.check_caps(''.join(
chunk["text"] for chunk in packet if chunk["type"] == chunk["text"]
))
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
|
"""Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
+
- built_message = ""
- for chunk in packet:
- if chunk["type"] == "text":
- built_message += chunk["text"]
- exceeds_caps = self.check_caps(built_message)
? ^^ ^^^^^^^^^^^
+ exceeds_caps = self.check_caps(''.join(
? ^^^^^ ^^
+ chunk["text"] for chunk in packet if chunk["type"] == chunk["text"]
+ ))
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
"""Check for links in the message."""
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
"""Check for emotes in the message."""
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
"""Check for caps in the message."""
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
|
40bfd177cea186bc975fdc51ab61cf4d9e7026a3
|
tests/testapp/manage.py
|
tests/testapp/manage.py
|
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
try:
import dynamic_admin #@UnusedImport
except ImportError:
import sys, os
sys.path.append('%s/../..' % os.getcwd())
if __name__ == "__main__":
execute_manager(settings)
|
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
try:
import dynamic_admin #@UnusedImport
except ImportError:
import sys, os
sys.path.append(os.path.abspath('%s/../..' % os.getcwd()))
if __name__ == "__main__":
execute_manager(settings)
|
Make sure to use abspath when adding dynamic_choices to sys.path
|
Make sure to use abspath when adding dynamic_choices to sys.path
|
Python
|
mit
|
charettes/django-dynamic-choices,charettes/django-dynamic-choices,charettes/django-dynamic-choices
|
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
try:
import dynamic_admin #@UnusedImport
except ImportError:
import sys, os
- sys.path.append('%s/../..' % os.getcwd())
+ sys.path.append(os.path.abspath('%s/../..' % os.getcwd()))
if __name__ == "__main__":
execute_manager(settings)
|
Make sure to use abspath when adding dynamic_choices to sys.path
|
## Code Before:
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
try:
import dynamic_admin #@UnusedImport
except ImportError:
import sys, os
sys.path.append('%s/../..' % os.getcwd())
if __name__ == "__main__":
execute_manager(settings)
## Instruction:
Make sure to use abspath when adding dynamic_choices to sys.path
## Code After:
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
try:
import dynamic_admin #@UnusedImport
except ImportError:
import sys, os
sys.path.append(os.path.abspath('%s/../..' % os.getcwd()))
if __name__ == "__main__":
execute_manager(settings)
|
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
try:
import dynamic_admin #@UnusedImport
except ImportError:
import sys, os
- sys.path.append('%s/../..' % os.getcwd())
+ sys.path.append(os.path.abspath('%s/../..' % os.getcwd()))
? ++++++++++++++++ +
if __name__ == "__main__":
execute_manager(settings)
|
3234d929d22d7504d89753ce6351d0efe1bfa8ac
|
whitepy/lexer.py
|
whitepy/lexer.py
|
from .lexerconstants import *
from .ws_token import Tokeniser
class Lexer(object):
def __init__(self, line):
self.line = line
self.pos = 0
self.tokens = []
def _get_int(self):
token = Tokeniser()
if self.line[-1] == '\n':
const = 'INT'
token.scan(self.line, self.pos, const)
else:
# TODO: Add error handling for invalid integer
pass
return token
def _get_token(self, const):
token = Tokeniser()
token.scan(self.line, self.pos, const)
return token
def get_all_tokens(self):
while self.pos < len(self.line):
const = IMP_CONST if self.pos is 0 else eval(
"{}_CONST".format(self.tokens[0].type))
token = self._get_token(const)
self.tokens.append(token)
self.pos = self.pos + len(token.value)
if token.type == 'PUSH':
self.tokens.append(self._get_int())
self.pos = len(self.line)
|
from .lexerconstants import *
from .ws_token import Tokeniser
class IntError(ValueError):
'''Exception when invalid integer is found'''
class Lexer(object):
def __init__(self, line):
self.line = line
self.pos = 0
self.tokens = []
def _get_int(self):
token = Tokeniser()
if self.line[-1] == '\n':
const = 'INT'
token.scan(self.line, self.pos, const)
else:
raise IntError
return token
def _get_token(self, const):
token = Tokeniser()
token.scan(self.line, self.pos, const)
return token
def get_all_tokens(self):
while self.pos < len(self.line):
const = IMP_CONST if self.pos is 0 else eval(
"{}_CONST".format(self.tokens[0].type))
token = self._get_token(const)
self.tokens.append(token)
self.pos = self.pos + len(token.value)
if token.type == 'PUSH':
self.tokens.append(self._get_int())
self.pos = len(self.line)
|
Add Execption for invalid Integer
|
Add Execption for invalid Integer
Exception class created for invalid integer and raise it if a bad integer is
found
|
Python
|
apache-2.0
|
yasn77/whitepy
|
from .lexerconstants import *
from .ws_token import Tokeniser
+
+
+ class IntError(ValueError):
+ '''Exception when invalid integer is found'''
class Lexer(object):
def __init__(self, line):
self.line = line
self.pos = 0
self.tokens = []
def _get_int(self):
token = Tokeniser()
if self.line[-1] == '\n':
const = 'INT'
token.scan(self.line, self.pos, const)
else:
+ raise IntError
- # TODO: Add error handling for invalid integer
- pass
return token
def _get_token(self, const):
token = Tokeniser()
token.scan(self.line, self.pos, const)
return token
def get_all_tokens(self):
while self.pos < len(self.line):
const = IMP_CONST if self.pos is 0 else eval(
"{}_CONST".format(self.tokens[0].type))
token = self._get_token(const)
self.tokens.append(token)
self.pos = self.pos + len(token.value)
if token.type == 'PUSH':
self.tokens.append(self._get_int())
self.pos = len(self.line)
|
Add Execption for invalid Integer
|
## Code Before:
from .lexerconstants import *
from .ws_token import Tokeniser
class Lexer(object):
def __init__(self, line):
self.line = line
self.pos = 0
self.tokens = []
def _get_int(self):
token = Tokeniser()
if self.line[-1] == '\n':
const = 'INT'
token.scan(self.line, self.pos, const)
else:
# TODO: Add error handling for invalid integer
pass
return token
def _get_token(self, const):
token = Tokeniser()
token.scan(self.line, self.pos, const)
return token
def get_all_tokens(self):
while self.pos < len(self.line):
const = IMP_CONST if self.pos is 0 else eval(
"{}_CONST".format(self.tokens[0].type))
token = self._get_token(const)
self.tokens.append(token)
self.pos = self.pos + len(token.value)
if token.type == 'PUSH':
self.tokens.append(self._get_int())
self.pos = len(self.line)
## Instruction:
Add Execption for invalid Integer
## Code After:
from .lexerconstants import *
from .ws_token import Tokeniser
class IntError(ValueError):
'''Exception when invalid integer is found'''
class Lexer(object):
def __init__(self, line):
self.line = line
self.pos = 0
self.tokens = []
def _get_int(self):
token = Tokeniser()
if self.line[-1] == '\n':
const = 'INT'
token.scan(self.line, self.pos, const)
else:
raise IntError
return token
def _get_token(self, const):
token = Tokeniser()
token.scan(self.line, self.pos, const)
return token
def get_all_tokens(self):
while self.pos < len(self.line):
const = IMP_CONST if self.pos is 0 else eval(
"{}_CONST".format(self.tokens[0].type))
token = self._get_token(const)
self.tokens.append(token)
self.pos = self.pos + len(token.value)
if token.type == 'PUSH':
self.tokens.append(self._get_int())
self.pos = len(self.line)
|
from .lexerconstants import *
from .ws_token import Tokeniser
+
+
+ class IntError(ValueError):
+ '''Exception when invalid integer is found'''
class Lexer(object):
def __init__(self, line):
self.line = line
self.pos = 0
self.tokens = []
def _get_int(self):
token = Tokeniser()
if self.line[-1] == '\n':
const = 'INT'
token.scan(self.line, self.pos, const)
else:
+ raise IntError
- # TODO: Add error handling for invalid integer
- pass
return token
def _get_token(self, const):
token = Tokeniser()
token.scan(self.line, self.pos, const)
return token
def get_all_tokens(self):
while self.pos < len(self.line):
const = IMP_CONST if self.pos is 0 else eval(
"{}_CONST".format(self.tokens[0].type))
token = self._get_token(const)
self.tokens.append(token)
self.pos = self.pos + len(token.value)
if token.type == 'PUSH':
self.tokens.append(self._get_int())
self.pos = len(self.line)
|
ca3366bfdec91797c0a5406a5ba8094c4d13a233
|
comics/feedback/forms.py
|
comics/feedback/forms.py
|
from bootstrap.forms import BootstrapForm
from django import forms
class FeedbackForm(BootstrapForm):
message = forms.CharField(label="What's on your heart",
help_text='Remember to sign with you mail address if you want a reply.',
widget=forms.Textarea(attrs={'rows': 5, 'cols': 100}))
|
from bootstrap.forms import BootstrapForm
from django import forms
class FeedbackForm(BootstrapForm):
message = forms.CharField(label="What's on your heart",
help_text='Sign with your email address if you want a reply.',
widget=forms.Textarea(attrs={'rows': 5, 'cols': 100}))
|
Fix typo in feedback form help text
|
Fix typo in feedback form help text
|
Python
|
agpl-3.0
|
datagutten/comics,jodal/comics,jodal/comics,jodal/comics,datagutten/comics,jodal/comics,datagutten/comics,datagutten/comics
|
from bootstrap.forms import BootstrapForm
from django import forms
class FeedbackForm(BootstrapForm):
message = forms.CharField(label="What's on your heart",
- help_text='Remember to sign with you mail address if you want a reply.',
+ help_text='Sign with your email address if you want a reply.',
widget=forms.Textarea(attrs={'rows': 5, 'cols': 100}))
|
Fix typo in feedback form help text
|
## Code Before:
from bootstrap.forms import BootstrapForm
from django import forms
class FeedbackForm(BootstrapForm):
message = forms.CharField(label="What's on your heart",
help_text='Remember to sign with you mail address if you want a reply.',
widget=forms.Textarea(attrs={'rows': 5, 'cols': 100}))
## Instruction:
Fix typo in feedback form help text
## Code After:
from bootstrap.forms import BootstrapForm
from django import forms
class FeedbackForm(BootstrapForm):
message = forms.CharField(label="What's on your heart",
help_text='Sign with your email address if you want a reply.',
widget=forms.Textarea(attrs={'rows': 5, 'cols': 100}))
|
from bootstrap.forms import BootstrapForm
from django import forms
class FeedbackForm(BootstrapForm):
message = forms.CharField(label="What's on your heart",
- help_text='Remember to sign with you mail address if you want a reply.',
? ^^^^^^^^^^^^^
+ help_text='Sign with your email address if you want a reply.',
? ^ + +
widget=forms.Textarea(attrs={'rows': 5, 'cols': 100}))
|
fde67686d2bd685e31cfc0e156314476b057db78
|
xudd/tests/test_demos.py
|
xudd/tests/test_demos.py
|
from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
### def test_ihc_lotsamessages():
|
from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
def test_lotsamessages_ihc():
"""
Test the lotsamessages demo with inter-hive communication
"""
assert lotsamessages.main(
num_experiments=20, num_steps=20, subprocesses=4) is True
### def test_ihc_lotsamessages():
|
Add inter-hive communication lotsamessages test
|
Add inter-hive communication lotsamessages test
|
Python
|
apache-2.0
|
xudd/xudd
|
from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
+
+ def test_lotsamessages_ihc():
+ """
+ Test the lotsamessages demo with inter-hive communication
+ """
+ assert lotsamessages.main(
+ num_experiments=20, num_steps=20, subprocesses=4) is True
+
+
### def test_ihc_lotsamessages():
|
Add inter-hive communication lotsamessages test
|
## Code Before:
from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
### def test_ihc_lotsamessages():
## Instruction:
Add inter-hive communication lotsamessages test
## Code After:
from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
def test_lotsamessages_ihc():
"""
Test the lotsamessages demo with inter-hive communication
"""
assert lotsamessages.main(
num_experiments=20, num_steps=20, subprocesses=4) is True
### def test_ihc_lotsamessages():
|
from xudd.demos import special_hive
from xudd.demos import lotsamessages
def test_special_hive():
"""
This demo tests that demos are actually actors and are in fact subclassable.
"""
special_hive.main()
def test_lotsamessages():
"""
Test the lotsamessages demo (but not with too many messages ;))
"""
assert lotsamessages.main(num_experiments=20, num_steps=20) is True
+
+ def test_lotsamessages_ihc():
+ """
+ Test the lotsamessages demo with inter-hive communication
+ """
+ assert lotsamessages.main(
+ num_experiments=20, num_steps=20, subprocesses=4) is True
+
+
### def test_ihc_lotsamessages():
|
db64ca09e57da414d92888de1b52fade810d855e
|
handlers/downloadMapHandler.py
|
handlers/downloadMapHandler.py
|
from helpers import requestHelper
import requests
import glob
# Exception tracking
import tornado.web
import tornado.gen
import sys
import traceback
from raven.contrib.tornado import SentryMixin
MODULE_NAME = "direct_download"
class handler(SentryMixin, requestHelper.asyncRequestHandler):
"""
Handler for /d/
"""
@tornado.web.asynchronous
@tornado.gen.engine
def asyncGet(self, bid):
try:
self.set_status(302)
url = "http://m.zxq.co/{}.osz".format(bid)
#url = "https://bloodcat.com/osu/s/{}".format(bid)
self.add_header("location", url)
print(url)
#f = requests.get(url)
#self.write(str(f))
except:
log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc()))
if glob.sentry:
yield tornado.gen.Task(self.captureException, exc_info=True)
#finally:
# self.finish()
|
from helpers import requestHelper
import requests
import glob
# Exception tracking
import tornado.web
import tornado.gen
import sys
import traceback
from raven.contrib.tornado import SentryMixin
MODULE_NAME = "direct_download"
class handler(SentryMixin, requestHelper.asyncRequestHandler):
"""
Handler for /d/
"""
@tornado.web.asynchronous
@tornado.gen.engine
def asyncGet(self, bid):
try:
self.set_status(302, "Moved Temporarily")
url = "http://m.zxq.co/{}.osz".format(bid)
self.add_header("Location", url)
self.add_header("Cache-Control", "no-cache")
self.add_header("Pragma", "no-cache")
print(url)
#f = requests.get(url)
#self.write(str(f))
except:
log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc()))
if glob.sentry:
yield tornado.gen.Task(self.captureException, exc_info=True)
#finally:
# self.finish()
|
Add some headers in osu! direct download
|
Add some headers in osu! direct download
|
Python
|
agpl-3.0
|
osuripple/lets,osuripple/lets
|
from helpers import requestHelper
import requests
import glob
# Exception tracking
import tornado.web
import tornado.gen
import sys
import traceback
from raven.contrib.tornado import SentryMixin
MODULE_NAME = "direct_download"
class handler(SentryMixin, requestHelper.asyncRequestHandler):
"""
Handler for /d/
"""
@tornado.web.asynchronous
@tornado.gen.engine
def asyncGet(self, bid):
try:
- self.set_status(302)
+ self.set_status(302, "Moved Temporarily")
url = "http://m.zxq.co/{}.osz".format(bid)
- #url = "https://bloodcat.com/osu/s/{}".format(bid)
- self.add_header("location", url)
+ self.add_header("Location", url)
+ self.add_header("Cache-Control", "no-cache")
+ self.add_header("Pragma", "no-cache")
print(url)
#f = requests.get(url)
#self.write(str(f))
except:
log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc()))
if glob.sentry:
yield tornado.gen.Task(self.captureException, exc_info=True)
#finally:
# self.finish()
|
Add some headers in osu! direct download
|
## Code Before:
from helpers import requestHelper
import requests
import glob
# Exception tracking
import tornado.web
import tornado.gen
import sys
import traceback
from raven.contrib.tornado import SentryMixin
MODULE_NAME = "direct_download"
class handler(SentryMixin, requestHelper.asyncRequestHandler):
"""
Handler for /d/
"""
@tornado.web.asynchronous
@tornado.gen.engine
def asyncGet(self, bid):
try:
self.set_status(302)
url = "http://m.zxq.co/{}.osz".format(bid)
#url = "https://bloodcat.com/osu/s/{}".format(bid)
self.add_header("location", url)
print(url)
#f = requests.get(url)
#self.write(str(f))
except:
log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc()))
if glob.sentry:
yield tornado.gen.Task(self.captureException, exc_info=True)
#finally:
# self.finish()
## Instruction:
Add some headers in osu! direct download
## Code After:
from helpers import requestHelper
import requests
import glob
# Exception tracking
import tornado.web
import tornado.gen
import sys
import traceback
from raven.contrib.tornado import SentryMixin
MODULE_NAME = "direct_download"
class handler(SentryMixin, requestHelper.asyncRequestHandler):
"""
Handler for /d/
"""
@tornado.web.asynchronous
@tornado.gen.engine
def asyncGet(self, bid):
try:
self.set_status(302, "Moved Temporarily")
url = "http://m.zxq.co/{}.osz".format(bid)
self.add_header("Location", url)
self.add_header("Cache-Control", "no-cache")
self.add_header("Pragma", "no-cache")
print(url)
#f = requests.get(url)
#self.write(str(f))
except:
log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc()))
if glob.sentry:
yield tornado.gen.Task(self.captureException, exc_info=True)
#finally:
# self.finish()
|
from helpers import requestHelper
import requests
import glob
# Exception tracking
import tornado.web
import tornado.gen
import sys
import traceback
from raven.contrib.tornado import SentryMixin
MODULE_NAME = "direct_download"
class handler(SentryMixin, requestHelper.asyncRequestHandler):
"""
Handler for /d/
"""
@tornado.web.asynchronous
@tornado.gen.engine
def asyncGet(self, bid):
try:
- self.set_status(302)
+ self.set_status(302, "Moved Temporarily")
url = "http://m.zxq.co/{}.osz".format(bid)
- #url = "https://bloodcat.com/osu/s/{}".format(bid)
- self.add_header("location", url)
? ^
+ self.add_header("Location", url)
? ^
+ self.add_header("Cache-Control", "no-cache")
+ self.add_header("Pragma", "no-cache")
print(url)
#f = requests.get(url)
#self.write(str(f))
except:
log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc()))
if glob.sentry:
yield tornado.gen.Task(self.captureException, exc_info=True)
#finally:
# self.finish()
|
cd3929203e758367c3ded00a554f531aedb79f05
|
blaze/tests/test_blfuncs.py
|
blaze/tests/test_blfuncs.py
|
from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[(_add, 'f8(f8,f8)'),
(_add, 'c16(c16,c16)')])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
|
from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[('f8(f8,f8)', _add),
('c16(c16,c16)', _add)])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
|
Fix usage of urlparse. and re-order list of key, value dict specification.
|
Fix usage of urlparse. and re-order list of key, value dict specification.
|
Python
|
bsd-3-clause
|
xlhtc007/blaze,AbhiAgarwal/blaze,mrocklin/blaze,aterrel/blaze,markflorisson/blaze-core,AbhiAgarwal/blaze,jcrist/blaze,markflorisson/blaze-core,mwiebe/blaze,AbhiAgarwal/blaze,markflorisson/blaze-core,maxalbert/blaze,alexmojaki/blaze,scls19fr/blaze,cpcloud/blaze,aterrel/blaze,mrocklin/blaze,jcrist/blaze,xlhtc007/blaze,FrancescAlted/blaze,nkhuyu/blaze,maxalbert/blaze,caseyclements/blaze,mwiebe/blaze,mwiebe/blaze,AbhiAgarwal/blaze,LiaoPan/blaze,aterrel/blaze,ChinaQuants/blaze,FrancescAlted/blaze,ContinuumIO/blaze,jdmcbr/blaze,mwiebe/blaze,dwillmer/blaze,FrancescAlted/blaze,cowlicks/blaze,ContinuumIO/blaze,dwillmer/blaze,LiaoPan/blaze,nkhuyu/blaze,scls19fr/blaze,FrancescAlted/blaze,markflorisson/blaze-core,alexmojaki/blaze,cpcloud/blaze,jdmcbr/blaze,ChinaQuants/blaze,caseyclements/blaze,cowlicks/blaze
|
from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
- add = BlazeFunc('add',[(_add, 'f8(f8,f8)'),
+ add = BlazeFunc('add',[('f8(f8,f8)', _add),
- (_add, 'c16(c16,c16)')])
+ ('c16(c16,c16)', _add)])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
|
Fix usage of urlparse. and re-order list of key, value dict specification.
|
## Code Before:
from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[(_add, 'f8(f8,f8)'),
(_add, 'c16(c16,c16)')])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
## Instruction:
Fix usage of urlparse. and re-order list of key, value dict specification.
## Code After:
from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
add = BlazeFunc('add',[('f8(f8,f8)', _add),
('c16(c16,c16)', _add)])
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
|
from blaze.blfuncs import BlazeFunc
from blaze.datashape import double, complex128 as c128
from blaze.blaze_kernels import BlazeElementKernel
import blaze
def _add(a,b):
return a + b
def _mul(a,b):
return a * b
- add = BlazeFunc('add',[(_add, 'f8(f8,f8)'),
? ------
+ add = BlazeFunc('add',[('f8(f8,f8)', _add),
? ++++++
- (_add, 'c16(c16,c16)')])
? ------
+ ('c16(c16,c16)', _add)])
? ++++++
mul = BlazeFunc('mul', {(double,)*3: _mul})
a = blaze.array([1,2,3],dshape=double)
b = blaze.array([2,3,4],dshape=double)
c = add(a,b)
d = mul(c,c)
d._data = d._data.fuse()
|
04e5083006ee1faffbbdc73bd71b4601ff1db3ae
|
tests/workers/test_merge.py
|
tests/workers/test_merge.py
|
import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
|
import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
def test_on_idle_with_commits_and_merges(self):
mocked_want_to_merge = MagicMock()
mocked_commit = MagicMock()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy",
want_to_merge=mocked_want_to_merge)
worker.commit = mocked_commit
commits, merges = worker.on_idle("commits", "merges")
mocked_commit.assert_called_once_with("commits")
assert mocked_want_to_merge.set.call_count == 1
assert commits == []
assert merges == []
|
Test merge worker with commits and merges
|
test: Test merge worker with commits and merges
|
Python
|
apache-2.0
|
rowhit/gitfs,bussiere/gitfs,PressLabs/gitfs,PressLabs/gitfs,ksmaheshkumar/gitfs
|
import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
+ def test_on_idle_with_commits_and_merges(self):
+ mocked_want_to_merge = MagicMock()
+ mocked_commit = MagicMock()
+
+ worker = MergeWorker("name", "email", "name", "email",
+ strategy="strategy",
+ want_to_merge=mocked_want_to_merge)
+ worker.commit = mocked_commit
+
+ commits, merges = worker.on_idle("commits", "merges")
+
+ mocked_commit.assert_called_once_with("commits")
+ assert mocked_want_to_merge.set.call_count == 1
+ assert commits == []
+ assert merges == []
+
|
Test merge worker with commits and merges
|
## Code Before:
import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
## Instruction:
Test merge worker with commits and merges
## Code After:
import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
def test_on_idle_with_commits_and_merges(self):
mocked_want_to_merge = MagicMock()
mocked_commit = MagicMock()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy",
want_to_merge=mocked_want_to_merge)
worker.commit = mocked_commit
commits, merges = worker.on_idle("commits", "merges")
mocked_commit.assert_called_once_with("commits")
assert mocked_want_to_merge.set.call_count == 1
assert commits == []
assert merges == []
|
import pytest
from mock import patch, MagicMock
from gitfs.worker.merge import MergeWorker
class TestMergeWorker(object):
def test_run(self):
mocked_queue = MagicMock()
mocked_idle = MagicMock(side_effect=ValueError)
mocked_queue.get.side_effect = ValueError()
worker = MergeWorker("name", "email", "name", "email",
strategy="strategy", merge_queue=mocked_queue)
worker.on_idle = mocked_idle
worker.timeout = 1
with pytest.raises(ValueError):
worker.run()
mocked_queue.get.assert_called_once_with(timeout=1, block=True)
mocked_idle.assert_called_once_with([], [])
+
+ def test_on_idle_with_commits_and_merges(self):
+ mocked_want_to_merge = MagicMock()
+ mocked_commit = MagicMock()
+
+ worker = MergeWorker("name", "email", "name", "email",
+ strategy="strategy",
+ want_to_merge=mocked_want_to_merge)
+ worker.commit = mocked_commit
+
+ commits, merges = worker.on_idle("commits", "merges")
+
+ mocked_commit.assert_called_once_with("commits")
+ assert mocked_want_to_merge.set.call_count == 1
+ assert commits == []
+ assert merges == []
|
bf0990f1e5dda5e78c859dd625638357da5b1ef4
|
sir/schema/modelext.py
|
sir/schema/modelext.py
|
from mbdata.models import Area, Artist, Label, Recording, ReleaseGroup, Work
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
|
from mbdata.models import Area, Artist, Label, LinkAttribute, Recording, ReleaseGroup, Work
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
class CustomLinkAttribute(LinkAttribute):
link = relationship('Link', foreign_keys=[LinkAttribute.link_id], innerjoin=True,
backref="attributes")
|
Add a backref from Link to LinkAttribute
|
Add a backref from Link to LinkAttribute
|
Python
|
mit
|
jeffweeksio/sir
|
- from mbdata.models import Area, Artist, Label, Recording, ReleaseGroup, Work
+ from mbdata.models import Area, Artist, Label, LinkAttribute, Recording, ReleaseGroup, Work
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
+
+ class CustomLinkAttribute(LinkAttribute):
+ link = relationship('Link', foreign_keys=[LinkAttribute.link_id], innerjoin=True,
+ backref="attributes")
+
|
Add a backref from Link to LinkAttribute
|
## Code Before:
from mbdata.models import Area, Artist, Label, Recording, ReleaseGroup, Work
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
## Instruction:
Add a backref from Link to LinkAttribute
## Code After:
from mbdata.models import Area, Artist, Label, LinkAttribute, Recording, ReleaseGroup, Work
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
class CustomLinkAttribute(LinkAttribute):
link = relationship('Link', foreign_keys=[LinkAttribute.link_id], innerjoin=True,
backref="attributes")
|
- from mbdata.models import Area, Artist, Label, Recording, ReleaseGroup, Work
+ from mbdata.models import Area, Artist, Label, LinkAttribute, Recording, ReleaseGroup, Work
? +++++++++++++++
from sqlalchemy import exc as sa_exc
from sqlalchemy.orm import relationship
from warnings import simplefilter
# Ignore SQLAlchemys warnings that we're overriding some attributes
simplefilter(action="ignore", category=sa_exc.SAWarning)
class CustomArea(Area):
aliases = relationship("AreaAlias")
class CustomArtist(Artist):
aliases = relationship("ArtistAlias")
area = relationship('CustomArea', foreign_keys=[Artist.area_id])
begin_area = relationship('CustomArea', foreign_keys=[Artist.begin_area_id])
end_area = relationship('CustomArea', foreign_keys=[Artist.end_area_id])
class CustomLabel(Label):
aliases = relationship("LabelAlias")
class CustomRecording(Recording):
tracks = relationship("Track")
class CustomReleaseGroup(ReleaseGroup):
releases = relationship("Release")
class CustomWork(Work):
aliases = relationship("WorkAlias")
artist_links = relationship("LinkArtistWork")
+
+
+ class CustomLinkAttribute(LinkAttribute):
+ link = relationship('Link', foreign_keys=[LinkAttribute.link_id], innerjoin=True,
+ backref="attributes")
|
fe37ef9248f8658296e6f465d380d639d6047a5d
|
aspen/server/diesel_.py
|
aspen/server/diesel_.py
|
import diesel
from aspen.server import BaseEngine
from diesel.protocols import wsgi
class Engine(BaseEngine):
app = None # a diesel app instance
def bind(self):
self.app = wsgi.WSGIApplication( self.website
, self.website.address[1]
, self.website.address[0]
)
def start(self):
self.app.run()
def stop(self):
try:
self.app.halt()
except diesel.app.ApplicationEnd:
pass # Only you can prevent log spam.
def start_restarter(self, check_all):
def loop():
while True:
check_all()
diesel.sleep(0.5)
self.app.add_loop(diesel.Loop(loop))
|
import diesel
from aspen.server import BaseEngine
from diesel.protocols import wsgi
class Engine(BaseEngine):
diesel_app = None # a diesel diesel_app instance
def bind(self):
self.diesel_app = wsgi.WSGIApplication( self.website
, self.website.address[1]
, self.website.address[0]
)
def start(self):
self.diesel_app.run()
def stop(self):
try:
if self.diesel_app is not None:
self.diesel_app.halt()
except diesel.app.ApplicationEnd:
pass # Only you can prevent log spam.
def start_restarter(self, check_all):
def loop():
while True:
check_all()
diesel.sleep(0.5)
self.diesel_app.add_loop(diesel.Loop(loop))
|
Reduce log spam from diesel.
|
Reduce log spam from diesel.
|
Python
|
mit
|
gratipay/aspen.py,gratipay/aspen.py
|
import diesel
from aspen.server import BaseEngine
from diesel.protocols import wsgi
class Engine(BaseEngine):
- app = None # a diesel app instance
+ diesel_app = None # a diesel diesel_app instance
def bind(self):
- self.app = wsgi.WSGIApplication( self.website
+ self.diesel_app = wsgi.WSGIApplication( self.website
- , self.website.address[1]
+ , self.website.address[1]
- , self.website.address[0]
+ , self.website.address[0]
- )
+ )
def start(self):
- self.app.run()
+ self.diesel_app.run()
def stop(self):
try:
+ if self.diesel_app is not None:
- self.app.halt()
+ self.diesel_app.halt()
except diesel.app.ApplicationEnd:
pass # Only you can prevent log spam.
def start_restarter(self, check_all):
def loop():
while True:
check_all()
diesel.sleep(0.5)
- self.app.add_loop(diesel.Loop(loop))
+ self.diesel_app.add_loop(diesel.Loop(loop))
|
Reduce log spam from diesel.
|
## Code Before:
import diesel
from aspen.server import BaseEngine
from diesel.protocols import wsgi
class Engine(BaseEngine):
app = None # a diesel app instance
def bind(self):
self.app = wsgi.WSGIApplication( self.website
, self.website.address[1]
, self.website.address[0]
)
def start(self):
self.app.run()
def stop(self):
try:
self.app.halt()
except diesel.app.ApplicationEnd:
pass # Only you can prevent log spam.
def start_restarter(self, check_all):
def loop():
while True:
check_all()
diesel.sleep(0.5)
self.app.add_loop(diesel.Loop(loop))
## Instruction:
Reduce log spam from diesel.
## Code After:
import diesel
from aspen.server import BaseEngine
from diesel.protocols import wsgi
class Engine(BaseEngine):
diesel_app = None # a diesel diesel_app instance
def bind(self):
self.diesel_app = wsgi.WSGIApplication( self.website
, self.website.address[1]
, self.website.address[0]
)
def start(self):
self.diesel_app.run()
def stop(self):
try:
if self.diesel_app is not None:
self.diesel_app.halt()
except diesel.app.ApplicationEnd:
pass # Only you can prevent log spam.
def start_restarter(self, check_all):
def loop():
while True:
check_all()
diesel.sleep(0.5)
self.diesel_app.add_loop(diesel.Loop(loop))
|
import diesel
from aspen.server import BaseEngine
from diesel.protocols import wsgi
class Engine(BaseEngine):
- app = None # a diesel app instance
+ diesel_app = None # a diesel diesel_app instance
? +++++++ +++++++
def bind(self):
- self.app = wsgi.WSGIApplication( self.website
+ self.diesel_app = wsgi.WSGIApplication( self.website
? +++++++
- , self.website.address[1]
+ , self.website.address[1]
? +++++++
- , self.website.address[0]
+ , self.website.address[0]
? +++++++
- )
+ )
? +++++++
def start(self):
- self.app.run()
+ self.diesel_app.run()
? +++++++
def stop(self):
try:
+ if self.diesel_app is not None:
- self.app.halt()
+ self.diesel_app.halt()
? ++++ +++++++
except diesel.app.ApplicationEnd:
pass # Only you can prevent log spam.
def start_restarter(self, check_all):
def loop():
while True:
check_all()
diesel.sleep(0.5)
- self.app.add_loop(diesel.Loop(loop))
+ self.diesel_app.add_loop(diesel.Loop(loop))
? +++++++
|
3ced8676d474df3149bf78519e918cfa3b6b0ec3
|
src/dal_gm2m/fields.py
|
src/dal_gm2m/fields.py
|
"""GM2MField support for autocomplete fields."""
class GM2MFieldMixin(object):
"""GM2MField ror FutureModelForm."""
def value_from_object(self, instance, name):
"""Return the list of objects in the GM2MField relation."""
return None if not instance.pk else [
x for x in getattr(instance, name).all()]
def save_relation_data(self, instance, name, value):
"""Save the relation into the GM2MField."""
setattr(instance, name, value)
|
"""GM2MField support for autocomplete fields."""
class GM2MFieldMixin(object):
"""GM2MField ror FutureModelForm."""
def value_from_object(self, instance, name):
"""Return the list of objects in the GM2MField relation."""
return None if not instance.pk else [
getattr(x, 'gm2m_tgt', x)
for x in getattr(instance, name).all()
]
def save_relation_data(self, instance, name, value):
"""Save the relation into the GM2MField."""
setattr(instance, name, value)
|
Support new versions with django-gm2m
|
Support new versions with django-gm2m
|
Python
|
mit
|
yourlabs/django-autocomplete-light,yourlabs/django-autocomplete-light,yourlabs/django-autocomplete-light,yourlabs/django-autocomplete-light
|
"""GM2MField support for autocomplete fields."""
class GM2MFieldMixin(object):
"""GM2MField ror FutureModelForm."""
def value_from_object(self, instance, name):
"""Return the list of objects in the GM2MField relation."""
return None if not instance.pk else [
+ getattr(x, 'gm2m_tgt', x)
- x for x in getattr(instance, name).all()]
+ for x in getattr(instance, name).all()
+ ]
def save_relation_data(self, instance, name, value):
"""Save the relation into the GM2MField."""
setattr(instance, name, value)
|
Support new versions with django-gm2m
|
## Code Before:
"""GM2MField support for autocomplete fields."""
class GM2MFieldMixin(object):
"""GM2MField ror FutureModelForm."""
def value_from_object(self, instance, name):
"""Return the list of objects in the GM2MField relation."""
return None if not instance.pk else [
x for x in getattr(instance, name).all()]
def save_relation_data(self, instance, name, value):
"""Save the relation into the GM2MField."""
setattr(instance, name, value)
## Instruction:
Support new versions with django-gm2m
## Code After:
"""GM2MField support for autocomplete fields."""
class GM2MFieldMixin(object):
"""GM2MField ror FutureModelForm."""
def value_from_object(self, instance, name):
"""Return the list of objects in the GM2MField relation."""
return None if not instance.pk else [
getattr(x, 'gm2m_tgt', x)
for x in getattr(instance, name).all()
]
def save_relation_data(self, instance, name, value):
"""Save the relation into the GM2MField."""
setattr(instance, name, value)
|
"""GM2MField support for autocomplete fields."""
class GM2MFieldMixin(object):
"""GM2MField ror FutureModelForm."""
def value_from_object(self, instance, name):
"""Return the list of objects in the GM2MField relation."""
return None if not instance.pk else [
+ getattr(x, 'gm2m_tgt', x)
- x for x in getattr(instance, name).all()]
? -- -
+ for x in getattr(instance, name).all()
+ ]
def save_relation_data(self, instance, name, value):
"""Save the relation into the GM2MField."""
setattr(instance, name, value)
|
c0512873d1f558768c174c64faf419e03b63e24b
|
pijobs/flashjob.py
|
pijobs/flashjob.py
|
import scrollphat
from pijobs.scrollphatjob import ScrollphatJob
class FlashJob(ScrollphatJob):
def run(self):
scrollphat.clear()
for i in range(int(self.options['loop'])):
scrollphat.set_pixels(lambda x, y: True, True)
self.sleep_interval()
scrollphat.clear()
self.sleep_interval()
self.sleep()
|
import scrollphat
from pijobs.scrollphatjob import ScrollphatJob
class FlashJob(ScrollphatJob):
def run(self):
scrollphat.clear()
for i in range(int(self.options['loop'])):
scrollphat.set_pixels(lambda x, y: True, True)
self.sleep_interval()
scrollphat.clear()
self.sleep_interval()
self.sleep()
def default_options(self):
return {
'loop': 5,
'brightness': 10,
'interval': 0.2,
}
|
Add default options for FlashJob.
|
Add default options for FlashJob.
|
Python
|
mit
|
ollej/piapi,ollej/piapi
|
import scrollphat
from pijobs.scrollphatjob import ScrollphatJob
class FlashJob(ScrollphatJob):
def run(self):
scrollphat.clear()
for i in range(int(self.options['loop'])):
scrollphat.set_pixels(lambda x, y: True, True)
self.sleep_interval()
scrollphat.clear()
self.sleep_interval()
self.sleep()
+ def default_options(self):
+ return {
+ 'loop': 5,
+ 'brightness': 10,
+ 'interval': 0.2,
+ }
|
Add default options for FlashJob.
|
## Code Before:
import scrollphat
from pijobs.scrollphatjob import ScrollphatJob
class FlashJob(ScrollphatJob):
def run(self):
scrollphat.clear()
for i in range(int(self.options['loop'])):
scrollphat.set_pixels(lambda x, y: True, True)
self.sleep_interval()
scrollphat.clear()
self.sleep_interval()
self.sleep()
## Instruction:
Add default options for FlashJob.
## Code After:
import scrollphat
from pijobs.scrollphatjob import ScrollphatJob
class FlashJob(ScrollphatJob):
def run(self):
scrollphat.clear()
for i in range(int(self.options['loop'])):
scrollphat.set_pixels(lambda x, y: True, True)
self.sleep_interval()
scrollphat.clear()
self.sleep_interval()
self.sleep()
def default_options(self):
return {
'loop': 5,
'brightness': 10,
'interval': 0.2,
}
|
import scrollphat
from pijobs.scrollphatjob import ScrollphatJob
class FlashJob(ScrollphatJob):
def run(self):
scrollphat.clear()
for i in range(int(self.options['loop'])):
scrollphat.set_pixels(lambda x, y: True, True)
self.sleep_interval()
scrollphat.clear()
self.sleep_interval()
self.sleep()
+ def default_options(self):
+ return {
+ 'loop': 5,
+ 'brightness': 10,
+ 'interval': 0.2,
+ }
|
97696fafb6ce556781c02a130ae5f0e610c9bf45
|
test/selenium/src/lib/file_ops.py
|
test/selenium/src/lib/file_ops.py
|
import os
import logging
logger = logging.getLogger(__name__)
def create_directory(path):
"""
Creates a directory if it doesn't already exist.
"""
# Check if path is a file_path or a dir_path. Dir path is a string that
# ends with os.sep
if path[-1] != os.sep:
path, file_name = os.path.split(path)
if not os.path.exists(path):
logger.info("Creating directory: %s", path)
os.makedirs(path)
def get_unique_postfix(file_path, extension):
postfix = 0
new_path = file_path + str(postfix) + extension
while os.path.isfile(new_path):
postfix += 1
new_path = file_path + str(postfix) + extension
return new_path
def delete_directory_contents(path):
for file_name in os.listdir(path):
os.remove(path + os.sep + file_name)
|
import logging
import os
import shutil
logger = logging.getLogger(__name__)
def create_directory(path):
"""
Creates a directory if it doesn't already exist.
"""
# Check if path is a file_path or a dir_path. Dir path is a string that
# ends with os.sep
if path[-1] != os.sep:
path, file_name = os.path.split(path)
if not os.path.exists(path):
logger.info("Creating directory: %s", path)
os.makedirs(path)
def get_unique_postfix(file_path, extension):
"""Add numeric postfix for file."""
postfix = 0
new_path = file_path + str(postfix) + extension
while os.path.isfile(new_path):
postfix += 1
new_path = file_path + str(postfix) + extension
return new_path
def delete_directory_contents(path):
"""Remove all files and sub-dir in provided path."""
shutil.rmtree(path)
|
Delete sub folders in log directory
|
Delete sub folders in log directory
|
Python
|
apache-2.0
|
AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,plamut/ggrc-core
|
+ import logging
import os
+ import shutil
- import logging
-
logger = logging.getLogger(__name__)
def create_directory(path):
"""
Creates a directory if it doesn't already exist.
"""
# Check if path is a file_path or a dir_path. Dir path is a string that
# ends with os.sep
if path[-1] != os.sep:
path, file_name = os.path.split(path)
if not os.path.exists(path):
logger.info("Creating directory: %s", path)
os.makedirs(path)
def get_unique_postfix(file_path, extension):
+ """Add numeric postfix for file."""
postfix = 0
new_path = file_path + str(postfix) + extension
while os.path.isfile(new_path):
postfix += 1
new_path = file_path + str(postfix) + extension
return new_path
def delete_directory_contents(path):
- for file_name in os.listdir(path):
- os.remove(path + os.sep + file_name)
+ """Remove all files and sub-dir in provided path."""
+ shutil.rmtree(path)
|
Delete sub folders in log directory
|
## Code Before:
import os
import logging
logger = logging.getLogger(__name__)
def create_directory(path):
"""
Creates a directory if it doesn't already exist.
"""
# Check if path is a file_path or a dir_path. Dir path is a string that
# ends with os.sep
if path[-1] != os.sep:
path, file_name = os.path.split(path)
if not os.path.exists(path):
logger.info("Creating directory: %s", path)
os.makedirs(path)
def get_unique_postfix(file_path, extension):
postfix = 0
new_path = file_path + str(postfix) + extension
while os.path.isfile(new_path):
postfix += 1
new_path = file_path + str(postfix) + extension
return new_path
def delete_directory_contents(path):
for file_name in os.listdir(path):
os.remove(path + os.sep + file_name)
## Instruction:
Delete sub folders in log directory
## Code After:
import logging
import os
import shutil
logger = logging.getLogger(__name__)
def create_directory(path):
"""
Creates a directory if it doesn't already exist.
"""
# Check if path is a file_path or a dir_path. Dir path is a string that
# ends with os.sep
if path[-1] != os.sep:
path, file_name = os.path.split(path)
if not os.path.exists(path):
logger.info("Creating directory: %s", path)
os.makedirs(path)
def get_unique_postfix(file_path, extension):
"""Add numeric postfix for file."""
postfix = 0
new_path = file_path + str(postfix) + extension
while os.path.isfile(new_path):
postfix += 1
new_path = file_path + str(postfix) + extension
return new_path
def delete_directory_contents(path):
"""Remove all files and sub-dir in provided path."""
shutil.rmtree(path)
|
+ import logging
import os
+ import shutil
- import logging
-
logger = logging.getLogger(__name__)
def create_directory(path):
"""
Creates a directory if it doesn't already exist.
"""
# Check if path is a file_path or a dir_path. Dir path is a string that
# ends with os.sep
if path[-1] != os.sep:
path, file_name = os.path.split(path)
if not os.path.exists(path):
logger.info("Creating directory: %s", path)
os.makedirs(path)
def get_unique_postfix(file_path, extension):
+ """Add numeric postfix for file."""
postfix = 0
new_path = file_path + str(postfix) + extension
while os.path.isfile(new_path):
postfix += 1
new_path = file_path + str(postfix) + extension
return new_path
def delete_directory_contents(path):
- for file_name in os.listdir(path):
- os.remove(path + os.sep + file_name)
+ """Remove all files and sub-dir in provided path."""
+ shutil.rmtree(path)
|
e85bfff982d14b556d2cab5d3b5535c37333cc3e
|
normandy/control/views.py
|
normandy/control/views.py
|
from django.views import generic
from django.contrib.auth.mixins import LoginRequiredMixin
from normandy.recipes.models import Recipe
class IndexView(LoginRequiredMixin, generic.ListView):
template_name = 'control/index.html'
context_object_name = 'all_recipes_list'
login_url = '/control/login/'
def get_queryset(self):
return Recipe.objects.order_by('-start_time')[:5]
class EditView(LoginRequiredMixin, generic.DetailView):
model = Recipe
template_name = 'control/edit_recipe.html'
login_url = '/control/login/'
|
from django.views import generic
from django.contrib.auth.mixins import LoginRequiredMixin
from normandy.recipes.models import Recipe
class IndexView(LoginRequiredMixin, generic.ListView):
template_name = 'control/index.html'
context_object_name = 'all_recipes_list'
login_url = '/control/login/'
def get_queryset(self):
return Recipe.objects.all()
class EditView(LoginRequiredMixin, generic.DetailView):
model = Recipe
template_name = 'control/edit_recipe.html'
login_url = '/control/login/'
|
Remove limit on recipe list object
|
Remove limit on recipe list object
|
Python
|
mpl-2.0
|
Osmose/normandy,mozilla/normandy,mozilla/normandy,Osmose/normandy,Osmose/normandy,mozilla/normandy,mozilla/normandy,Osmose/normandy
|
from django.views import generic
from django.contrib.auth.mixins import LoginRequiredMixin
from normandy.recipes.models import Recipe
class IndexView(LoginRequiredMixin, generic.ListView):
template_name = 'control/index.html'
context_object_name = 'all_recipes_list'
login_url = '/control/login/'
def get_queryset(self):
- return Recipe.objects.order_by('-start_time')[:5]
+ return Recipe.objects.all()
class EditView(LoginRequiredMixin, generic.DetailView):
model = Recipe
template_name = 'control/edit_recipe.html'
login_url = '/control/login/'
|
Remove limit on recipe list object
|
## Code Before:
from django.views import generic
from django.contrib.auth.mixins import LoginRequiredMixin
from normandy.recipes.models import Recipe
class IndexView(LoginRequiredMixin, generic.ListView):
template_name = 'control/index.html'
context_object_name = 'all_recipes_list'
login_url = '/control/login/'
def get_queryset(self):
return Recipe.objects.order_by('-start_time')[:5]
class EditView(LoginRequiredMixin, generic.DetailView):
model = Recipe
template_name = 'control/edit_recipe.html'
login_url = '/control/login/'
## Instruction:
Remove limit on recipe list object
## Code After:
from django.views import generic
from django.contrib.auth.mixins import LoginRequiredMixin
from normandy.recipes.models import Recipe
class IndexView(LoginRequiredMixin, generic.ListView):
template_name = 'control/index.html'
context_object_name = 'all_recipes_list'
login_url = '/control/login/'
def get_queryset(self):
return Recipe.objects.all()
class EditView(LoginRequiredMixin, generic.DetailView):
model = Recipe
template_name = 'control/edit_recipe.html'
login_url = '/control/login/'
|
from django.views import generic
from django.contrib.auth.mixins import LoginRequiredMixin
from normandy.recipes.models import Recipe
class IndexView(LoginRequiredMixin, generic.ListView):
template_name = 'control/index.html'
context_object_name = 'all_recipes_list'
login_url = '/control/login/'
def get_queryset(self):
- return Recipe.objects.order_by('-start_time')[:5]
+ return Recipe.objects.all()
class EditView(LoginRequiredMixin, generic.DetailView):
model = Recipe
template_name = 'control/edit_recipe.html'
login_url = '/control/login/'
|
fedf78926b7c135f0f86934975a2b70aa1256884
|
app/models.py
|
app/models.py
|
from datetime import datetime
from werkzeug.security import generate_password_hash, check_password_hash
from . import db
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
email = db.Column(db.String(64),
nullable=False,
unique=True,
index=True)
username = db.Column(db.String(64),
nullable=False,
unique=True,
index=True)
is_admin = db.Column(db.Boolean)
password_hash = db.Column(db.String(128))
name = db.Column(db.String(64))
member_since = db.Column(db.DateTime(), default = datetime.utcnow)
@property
def password(self):
raise AttributeError('Password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
|
from datetime import datetime
from flask.ext.login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
from . import db, login_manager
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
email = db.Column(db.String(64),
nullable=False,
unique=True,
index=True)
username = db.Column(db.String(64),
nullable=False,
unique=True,
index=True)
is_admin = db.Column(db.Boolean)
password_hash = db.Column(db.String(128))
name = db.Column(db.String(64))
member_since = db.Column(db.DateTime(), default = datetime.utcnow)
@property
def password(self):
raise AttributeError('Password is not a readable attribute')
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
|
Add user_loader function for loading a user
|
Add user_loader function for loading a user
|
Python
|
mit
|
finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is
|
from datetime import datetime
+
+ from flask.ext.login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
- from . import db
+ from . import db, login_manager
- class User(db.Model):
+ class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
email = db.Column(db.String(64),
nullable=False,
unique=True,
index=True)
username = db.Column(db.String(64),
nullable=False,
unique=True,
index=True)
is_admin = db.Column(db.Boolean)
password_hash = db.Column(db.String(128))
name = db.Column(db.String(64))
member_since = db.Column(db.DateTime(), default = datetime.utcnow)
@property
def password(self):
raise AttributeError('Password is not a readable attribute')
+
+ @login_manager.user_loader
+ def load_user(user_id):
+ return User.query.get(int(user_id))
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
|
Add user_loader function for loading a user
|
## Code Before:
from datetime import datetime
from werkzeug.security import generate_password_hash, check_password_hash
from . import db
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
email = db.Column(db.String(64),
nullable=False,
unique=True,
index=True)
username = db.Column(db.String(64),
nullable=False,
unique=True,
index=True)
is_admin = db.Column(db.Boolean)
password_hash = db.Column(db.String(128))
name = db.Column(db.String(64))
member_since = db.Column(db.DateTime(), default = datetime.utcnow)
@property
def password(self):
raise AttributeError('Password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
## Instruction:
Add user_loader function for loading a user
## Code After:
from datetime import datetime
from flask.ext.login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
from . import db, login_manager
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
email = db.Column(db.String(64),
nullable=False,
unique=True,
index=True)
username = db.Column(db.String(64),
nullable=False,
unique=True,
index=True)
is_admin = db.Column(db.Boolean)
password_hash = db.Column(db.String(128))
name = db.Column(db.String(64))
member_since = db.Column(db.DateTime(), default = datetime.utcnow)
@property
def password(self):
raise AttributeError('Password is not a readable attribute')
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
|
from datetime import datetime
+
+ from flask.ext.login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
- from . import db
+ from . import db, login_manager
- class User(db.Model):
+ class User(UserMixin, db.Model):
? +++++++++++
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
email = db.Column(db.String(64),
nullable=False,
unique=True,
index=True)
username = db.Column(db.String(64),
nullable=False,
unique=True,
index=True)
is_admin = db.Column(db.Boolean)
password_hash = db.Column(db.String(128))
name = db.Column(db.String(64))
member_since = db.Column(db.DateTime(), default = datetime.utcnow)
@property
def password(self):
raise AttributeError('Password is not a readable attribute')
+
+ @login_manager.user_loader
+ def load_user(user_id):
+ return User.query.get(int(user_id))
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
|
5e7cce09a6e6a847dad1714973fddb53d60c4c3f
|
yawf_sample/simple/models.py
|
yawf_sample/simple/models.py
|
from django.db import models
import reversion
from yawf.revision import RevisionModelMixin
class WINDOW_OPEN_STATUS:
MINIMIZED = 'minimized'
MAXIMIZED = 'maximized'
NORMAL = 'normal'
types = (MINIMIZED, MAXIMIZED, NORMAL)
choices = zip(types, types)
@reversion.register
class Window(RevisionModelMixin, models.Model):
title = models.CharField(max_length=255)
width = models.IntegerField()
height = models.IntegerField()
workflow_type = 'simple'
open_status = models.CharField(
max_length=32,
choices=WINDOW_OPEN_STATUS.choices,
default='init',
editable=False)
|
from django.db import models
import reversion
from yawf.revision import RevisionModelMixin
class WINDOW_OPEN_STATUS:
MINIMIZED = 'minimized'
MAXIMIZED = 'maximized'
NORMAL = 'normal'
types = (MINIMIZED, MAXIMIZED, NORMAL)
choices = zip(types, types)
class Window(RevisionModelMixin, models.Model):
title = models.CharField(max_length=255)
width = models.IntegerField()
height = models.IntegerField()
workflow_type = 'simple'
open_status = models.CharField(
max_length=32,
choices=WINDOW_OPEN_STATUS.choices,
default='init',
editable=False)
reversion.register(Window)
|
Fix reversion register in sample app
|
Fix reversion register in sample app
|
Python
|
mit
|
freevoid/yawf
|
from django.db import models
import reversion
from yawf.revision import RevisionModelMixin
class WINDOW_OPEN_STATUS:
MINIMIZED = 'minimized'
MAXIMIZED = 'maximized'
NORMAL = 'normal'
types = (MINIMIZED, MAXIMIZED, NORMAL)
choices = zip(types, types)
- @reversion.register
class Window(RevisionModelMixin, models.Model):
title = models.CharField(max_length=255)
width = models.IntegerField()
height = models.IntegerField()
workflow_type = 'simple'
open_status = models.CharField(
max_length=32,
choices=WINDOW_OPEN_STATUS.choices,
default='init',
editable=False)
+ reversion.register(Window)
+
|
Fix reversion register in sample app
|
## Code Before:
from django.db import models
import reversion
from yawf.revision import RevisionModelMixin
class WINDOW_OPEN_STATUS:
MINIMIZED = 'minimized'
MAXIMIZED = 'maximized'
NORMAL = 'normal'
types = (MINIMIZED, MAXIMIZED, NORMAL)
choices = zip(types, types)
@reversion.register
class Window(RevisionModelMixin, models.Model):
title = models.CharField(max_length=255)
width = models.IntegerField()
height = models.IntegerField()
workflow_type = 'simple'
open_status = models.CharField(
max_length=32,
choices=WINDOW_OPEN_STATUS.choices,
default='init',
editable=False)
## Instruction:
Fix reversion register in sample app
## Code After:
from django.db import models
import reversion
from yawf.revision import RevisionModelMixin
class WINDOW_OPEN_STATUS:
MINIMIZED = 'minimized'
MAXIMIZED = 'maximized'
NORMAL = 'normal'
types = (MINIMIZED, MAXIMIZED, NORMAL)
choices = zip(types, types)
class Window(RevisionModelMixin, models.Model):
title = models.CharField(max_length=255)
width = models.IntegerField()
height = models.IntegerField()
workflow_type = 'simple'
open_status = models.CharField(
max_length=32,
choices=WINDOW_OPEN_STATUS.choices,
default='init',
editable=False)
reversion.register(Window)
|
from django.db import models
import reversion
from yawf.revision import RevisionModelMixin
class WINDOW_OPEN_STATUS:
MINIMIZED = 'minimized'
MAXIMIZED = 'maximized'
NORMAL = 'normal'
types = (MINIMIZED, MAXIMIZED, NORMAL)
choices = zip(types, types)
- @reversion.register
class Window(RevisionModelMixin, models.Model):
title = models.CharField(max_length=255)
width = models.IntegerField()
height = models.IntegerField()
workflow_type = 'simple'
open_status = models.CharField(
max_length=32,
choices=WINDOW_OPEN_STATUS.choices,
default='init',
editable=False)
+
+ reversion.register(Window)
|
c52edc120f38acb079fa364cdb684fc2052d4727
|
corehq/messaging/smsbackends/trumpia/urls.py
|
corehq/messaging/smsbackends/trumpia/urls.py
|
from django.conf.urls import url
from corehq.messaging.smsbackends.trumpia.views import TrumpiaIncomingView
urlpatterns = [
url(r'^sms/(?P<api_key>[\w-]+)/?$', TrumpiaIncomingView.as_view(),
name=TrumpiaIncomingView.urlname),
]
|
from django.conf.urls import url
from corehq.apps.hqwebapp.decorators import waf_allow
from corehq.messaging.smsbackends.trumpia.views import TrumpiaIncomingView
urlpatterns = [
url(r'^sms/(?P<api_key>[\w-]+)/?$', waf_allow('XSS_QUERYSTRING')(TrumpiaIncomingView.as_view()),
name=TrumpiaIncomingView.urlname),
]
|
Annotate trumpia url to say it allows XML in the querystring
|
Annotate trumpia url to say it allows XML in the querystring
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from django.conf.urls import url
+
+ from corehq.apps.hqwebapp.decorators import waf_allow
from corehq.messaging.smsbackends.trumpia.views import TrumpiaIncomingView
urlpatterns = [
- url(r'^sms/(?P<api_key>[\w-]+)/?$', TrumpiaIncomingView.as_view(),
+ url(r'^sms/(?P<api_key>[\w-]+)/?$', waf_allow('XSS_QUERYSTRING')(TrumpiaIncomingView.as_view()),
name=TrumpiaIncomingView.urlname),
]
|
Annotate trumpia url to say it allows XML in the querystring
|
## Code Before:
from django.conf.urls import url
from corehq.messaging.smsbackends.trumpia.views import TrumpiaIncomingView
urlpatterns = [
url(r'^sms/(?P<api_key>[\w-]+)/?$', TrumpiaIncomingView.as_view(),
name=TrumpiaIncomingView.urlname),
]
## Instruction:
Annotate trumpia url to say it allows XML in the querystring
## Code After:
from django.conf.urls import url
from corehq.apps.hqwebapp.decorators import waf_allow
from corehq.messaging.smsbackends.trumpia.views import TrumpiaIncomingView
urlpatterns = [
url(r'^sms/(?P<api_key>[\w-]+)/?$', waf_allow('XSS_QUERYSTRING')(TrumpiaIncomingView.as_view()),
name=TrumpiaIncomingView.urlname),
]
|
from django.conf.urls import url
+
+ from corehq.apps.hqwebapp.decorators import waf_allow
from corehq.messaging.smsbackends.trumpia.views import TrumpiaIncomingView
urlpatterns = [
- url(r'^sms/(?P<api_key>[\w-]+)/?$', TrumpiaIncomingView.as_view(),
+ url(r'^sms/(?P<api_key>[\w-]+)/?$', waf_allow('XSS_QUERYSTRING')(TrumpiaIncomingView.as_view()),
? +++++++++++++++++++++++++++++ +
name=TrumpiaIncomingView.urlname),
]
|
824c46b7d3953e1933a72def4edf058a577487ea
|
byceps/services/attendance/transfer/models.py
|
byceps/services/attendance/transfer/models.py
|
from attr import attrib, attrs
from ....services.seating.models.seat import Seat
from ....services.user.models.user import User
@attrs(slots=True) # Not yet frozen b/c models are not immutable.
class Attendee:
user = attrib(type=User)
seat = attrib(type=Seat)
checked_in = attrib(type=bool)
|
from dataclasses import dataclass
from ....services.seating.models.seat import Seat
from ....services.user.models.user import User
@dataclass # Not yet frozen b/c models are not immutable.
class Attendee:
user: User
seat: Seat
checked_in: bool
|
Use `dataclass` instead of `attr` for attendance model
|
Use `dataclass` instead of `attr` for attendance model
|
Python
|
bsd-3-clause
|
m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps
|
- from attr import attrib, attrs
+ from dataclasses import dataclass
from ....services.seating.models.seat import Seat
from ....services.user.models.user import User
- @attrs(slots=True) # Not yet frozen b/c models are not immutable.
+ @dataclass # Not yet frozen b/c models are not immutable.
class Attendee:
- user = attrib(type=User)
- seat = attrib(type=Seat)
- checked_in = attrib(type=bool)
+ user: User
+ seat: Seat
+ checked_in: bool
|
Use `dataclass` instead of `attr` for attendance model
|
## Code Before:
from attr import attrib, attrs
from ....services.seating.models.seat import Seat
from ....services.user.models.user import User
@attrs(slots=True) # Not yet frozen b/c models are not immutable.
class Attendee:
user = attrib(type=User)
seat = attrib(type=Seat)
checked_in = attrib(type=bool)
## Instruction:
Use `dataclass` instead of `attr` for attendance model
## Code After:
from dataclasses import dataclass
from ....services.seating.models.seat import Seat
from ....services.user.models.user import User
@dataclass # Not yet frozen b/c models are not immutable.
class Attendee:
user: User
seat: Seat
checked_in: bool
|
- from attr import attrib, attrs
+ from dataclasses import dataclass
from ....services.seating.models.seat import Seat
from ....services.user.models.user import User
- @attrs(slots=True) # Not yet frozen b/c models are not immutable.
? ^^ - ----------
+ @dataclass # Not yet frozen b/c models are not immutable.
? + ^^^^
class Attendee:
- user = attrib(type=User)
- seat = attrib(type=Seat)
- checked_in = attrib(type=bool)
+ user: User
+ seat: Seat
+ checked_in: bool
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.