commit
stringlengths 40
40
| old_file
stringlengths 4
106
| new_file
stringlengths 4
106
| old_contents
stringlengths 10
2.94k
| new_contents
stringlengths 21
2.95k
| subject
stringlengths 16
444
| message
stringlengths 17
2.63k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 7
43k
| ndiff
stringlengths 52
3.31k
| instruction
stringlengths 16
444
| content
stringlengths 133
4.32k
| diff
stringlengths 49
3.61k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4298e82a3dc4c6577b41b4acbb73ff7bb5795002
|
src/django_registration/backends/one_step/views.py
|
src/django_registration/backends/one_step/views.py
|
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
new_user = authenticate(
username=getattr(new_user, User.USERNAME_FIELD),
password=form.cleaned_data['password1']
)
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
|
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
new_user = authenticate(**{
User.USERNAME_FIELD: getattr(new_user, User.USERNAME_FIELD),
'password': form.cleaned_data['password1']
})
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
|
Make the one-step backend a little more robust with custom users.
|
Make the one-step backend a little more robust with custom users.
|
Python
|
bsd-3-clause
|
ubernostrum/django-registration
|
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
- new_user = authenticate(
+ new_user = authenticate(**{
- username=getattr(new_user, User.USERNAME_FIELD),
+ User.USERNAME_FIELD: getattr(new_user, User.USERNAME_FIELD),
- password=form.cleaned_data['password1']
+ 'password': form.cleaned_data['password1']
- )
+ })
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
|
Make the one-step backend a little more robust with custom users.
|
## Code Before:
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
new_user = authenticate(
username=getattr(new_user, User.USERNAME_FIELD),
password=form.cleaned_data['password1']
)
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
## Instruction:
Make the one-step backend a little more robust with custom users.
## Code After:
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
new_user = authenticate(**{
User.USERNAME_FIELD: getattr(new_user, User.USERNAME_FIELD),
'password': form.cleaned_data['password1']
})
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
|
from django.contrib.auth import authenticate, get_user_model, login
from django.urls import reverse_lazy
from django_registration import signals
from django_registration.views import RegistrationView as BaseRegistrationView
User = get_user_model()
class RegistrationView(BaseRegistrationView):
"""
Registration via the simplest possible process: a user supplies a
username, email address and password (the bare minimum for a
useful account), and is immediately signed up and logged in.
"""
success_url = reverse_lazy('registration_complete')
def register(self, form):
new_user = form.save()
- new_user = authenticate(
+ new_user = authenticate(**{
? +++
- username=getattr(new_user, User.USERNAME_FIELD),
? ^ ^^^^^
+ User.USERNAME_FIELD: getattr(new_user, User.USERNAME_FIELD),
? ^ ^^^^^^^^^^^^^^^^^
- password=form.cleaned_data['password1']
? ^
+ 'password': form.cleaned_data['password1']
? + ^^^
- )
+ })
? +
login(self.request, new_user)
signals.user_registered.send(
sender=self.__class__,
user=new_user,
request=self.request
)
return new_user
|
48d0dc98fd859ea1d8cf25370fe0be9ac1350448
|
selftest/subdir/proc.py
|
selftest/subdir/proc.py
|
@test
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
|
@test(fail=True)
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
fail("The pipe didn't break, but that's okay")
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
|
Mark broken pipe test with expected failure
|
Mark broken pipe test with expected failure
|
Python
|
bsd-2-clause
|
depp/idiotest,depp/idiotest
|
- @test
+ @test(fail=True)
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
+ fail("The pipe didn't break, but that's okay")
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
|
Mark broken pipe test with expected failure
|
## Code Before:
@test
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
## Instruction:
Mark broken pipe test with expected failure
## Code After:
@test(fail=True)
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
fail("The pipe didn't break, but that's okay")
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
|
- @test
+ @test(fail=True)
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
+ fail("The pipe didn't break, but that's okay")
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
|
d76398b40844e969439d495d4ea3604e5b2011b4
|
mock-recipe-server/test_mock_server.py
|
mock-recipe-server/test_mock_server.py
|
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
recipe_data = recipe_path.read()
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
signed_recipe_data = recipe_path.add('signed').read()
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
|
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
try:
recipe_data = recipe_path.read()
signed_recipe_data = recipe_path.add('signed').read()
except FileNotFoundError:
# Some error testcases are purposefully missing files,
# so we just skip checking those.
continue
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
|
Handle error testcases in mock-server tests.
|
Handle error testcases in mock-server tests.
|
Python
|
mpl-2.0
|
Osmose/normandy,Osmose/normandy,mozilla/normandy,mozilla/normandy,mozilla/normandy,Osmose/normandy,Osmose/normandy,mozilla/normandy
|
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
+ try:
- recipe_data = recipe_path.read()
+ recipe_data = recipe_path.read()
+ signed_recipe_data = recipe_path.add('signed').read()
+ except FileNotFoundError:
+ # Some error testcases are purposefully missing files,
+ # so we just skip checking those.
+ continue
+
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
- signed_recipe_data = recipe_path.add('signed').read()
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
|
Handle error testcases in mock-server tests.
|
## Code Before:
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
recipe_data = recipe_path.read()
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
signed_recipe_data = recipe_path.add('signed').read()
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
## Instruction:
Handle error testcases in mock-server tests.
## Code After:
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
try:
recipe_data = recipe_path.read()
signed_recipe_data = recipe_path.add('signed').read()
except FileNotFoundError:
# Some error testcases are purposefully missing files,
# so we just skip checking those.
continue
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
|
from utils import APIPath
def test_testcase_difference(root_path):
"""Ensure that different testcases output different data."""
recipes = set()
testcase_paths = (
APIPath(path, 'http://example.com')
for path in root_path.path.iterdir() if path.is_dir()
)
for testcase_path in testcase_paths:
recipe_path = testcase_path.add('api', 'v1', 'recipe')
+ try:
- recipe_data = recipe_path.read()
+ recipe_data = recipe_path.read()
? ++++
+ signed_recipe_data = recipe_path.add('signed').read()
+ except FileNotFoundError:
+ # Some error testcases are purposefully missing files,
+ # so we just skip checking those.
+ continue
+
assert recipe_data not in recipes
recipes.add(recipe_data)
# This asserts both that testcases have differing signed data
# and that a single testcase does not have the same data for
# signed and unsigned endpoints.
- signed_recipe_data = recipe_path.add('signed').read()
assert signed_recipe_data not in recipes
recipes.add(signed_recipe_data)
|
4e243ade9b96c5ea6e68c27593fb578c52c85f1a
|
huffman.py
|
huffman.py
|
class Node:
def __init__(self):
self.name = ''
self.weight = 0
self.code = ''
def initSet(self, name, weight):
self.name = name
self.weight = weight
|
class Node:
def __init__(self):
self.name = ''
self.weight = 0
self.code = ''
def initSet(self, name, weight):
self.name = name
self.weight = weight
def setRoot(self, root):
self.root = root
def setLeft(self, left):
self.left = left
def setRight(self, right):
self.right = right
def addCode(self, code):
self.code = code + self.code
|
Add functions about setting the parent & children nodes and codes.
|
Add functions about setting the parent & children nodes and codes.
|
Python
|
mit
|
hane1818/Algorithm_HW3_huffman_code
|
class Node:
def __init__(self):
self.name = ''
self.weight = 0
self.code = ''
def initSet(self, name, weight):
self.name = name
self.weight = weight
+ def setRoot(self, root):
+ self.root = root
+
+ def setLeft(self, left):
+ self.left = left
+
+ def setRight(self, right):
+ self.right = right
+
+ def addCode(self, code):
+ self.code = code + self.code
+
|
Add functions about setting the parent & children nodes and codes.
|
## Code Before:
class Node:
def __init__(self):
self.name = ''
self.weight = 0
self.code = ''
def initSet(self, name, weight):
self.name = name
self.weight = weight
## Instruction:
Add functions about setting the parent & children nodes and codes.
## Code After:
class Node:
def __init__(self):
self.name = ''
self.weight = 0
self.code = ''
def initSet(self, name, weight):
self.name = name
self.weight = weight
def setRoot(self, root):
self.root = root
def setLeft(self, left):
self.left = left
def setRight(self, right):
self.right = right
def addCode(self, code):
self.code = code + self.code
|
class Node:
def __init__(self):
self.name = ''
self.weight = 0
self.code = ''
def initSet(self, name, weight):
self.name = name
self.weight = weight
+
+ def setRoot(self, root):
+ self.root = root
+
+ def setLeft(self, left):
+ self.left = left
+
+ def setRight(self, right):
+ self.right = right
+
+ def addCode(self, code):
+ self.code = code + self.code
|
228b53836e9569fa901de341d7486f85152e67f9
|
txircd/modules/rfc/cmode_t.py
|
txircd/modules/rfc/cmode_t.py
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class TopicLockMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "TopicLockMode"
core = True
affectedActions = { "commandpermission-TOPIC": 10 }
def channelModes(self):
return [ ("t", ModeType.NoParam, self) ]
def actions(self):
return [ ("modeactioncheck-channel-t-commandpermission-TOPIC", 10, self.channelHasMode) ]
def channelHasMode(self, channel, user, data):
if "t" in channel.modes:
return ""
return None
def apply(self, actionType, channel, param, user, data):
if not self.ircd.runActionUntilValue("checkchannellevel", "topic", channel, user):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, channel.name, "You do not have access to change the topic on this channel")
return False
return None
topicLockMode = TopicLockMode()
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class TopicLockMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "TopicLockMode"
core = True
affectedActions = { "commandpermission-TOPIC": 10 }
def channelModes(self):
return [ ("t", ModeType.NoParam, self) ]
def actions(self):
return [ ("modeactioncheck-channel-t-commandpermission-TOPIC", 10, self.channelHasMode) ]
def channelHasMode(self, channel, user, data):
if "t" in channel.modes:
return ""
return None
def apply(self, actionType, channel, param, user, data):
if "topic" not in data:
return None
if not self.ircd.runActionUntilValue("checkchannellevel", "topic", channel, user):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, channel.name, "You do not have access to change the topic on this channel")
return False
return None
topicLockMode = TopicLockMode()
|
Fix non-chanops not being able to query the topic
|
Fix non-chanops not being able to query the topic
|
Python
|
bsd-3-clause
|
Heufneutje/txircd,ElementalAlchemist/txircd
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class TopicLockMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "TopicLockMode"
core = True
affectedActions = { "commandpermission-TOPIC": 10 }
def channelModes(self):
return [ ("t", ModeType.NoParam, self) ]
def actions(self):
return [ ("modeactioncheck-channel-t-commandpermission-TOPIC", 10, self.channelHasMode) ]
def channelHasMode(self, channel, user, data):
if "t" in channel.modes:
return ""
return None
def apply(self, actionType, channel, param, user, data):
+ if "topic" not in data:
+ return None
if not self.ircd.runActionUntilValue("checkchannellevel", "topic", channel, user):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, channel.name, "You do not have access to change the topic on this channel")
return False
return None
topicLockMode = TopicLockMode()
|
Fix non-chanops not being able to query the topic
|
## Code Before:
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class TopicLockMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "TopicLockMode"
core = True
affectedActions = { "commandpermission-TOPIC": 10 }
def channelModes(self):
return [ ("t", ModeType.NoParam, self) ]
def actions(self):
return [ ("modeactioncheck-channel-t-commandpermission-TOPIC", 10, self.channelHasMode) ]
def channelHasMode(self, channel, user, data):
if "t" in channel.modes:
return ""
return None
def apply(self, actionType, channel, param, user, data):
if not self.ircd.runActionUntilValue("checkchannellevel", "topic", channel, user):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, channel.name, "You do not have access to change the topic on this channel")
return False
return None
topicLockMode = TopicLockMode()
## Instruction:
Fix non-chanops not being able to query the topic
## Code After:
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class TopicLockMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "TopicLockMode"
core = True
affectedActions = { "commandpermission-TOPIC": 10 }
def channelModes(self):
return [ ("t", ModeType.NoParam, self) ]
def actions(self):
return [ ("modeactioncheck-channel-t-commandpermission-TOPIC", 10, self.channelHasMode) ]
def channelHasMode(self, channel, user, data):
if "t" in channel.modes:
return ""
return None
def apply(self, actionType, channel, param, user, data):
if "topic" not in data:
return None
if not self.ircd.runActionUntilValue("checkchannellevel", "topic", channel, user):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, channel.name, "You do not have access to change the topic on this channel")
return False
return None
topicLockMode = TopicLockMode()
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class TopicLockMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "TopicLockMode"
core = True
affectedActions = { "commandpermission-TOPIC": 10 }
def channelModes(self):
return [ ("t", ModeType.NoParam, self) ]
def actions(self):
return [ ("modeactioncheck-channel-t-commandpermission-TOPIC", 10, self.channelHasMode) ]
def channelHasMode(self, channel, user, data):
if "t" in channel.modes:
return ""
return None
def apply(self, actionType, channel, param, user, data):
+ if "topic" not in data:
+ return None
if not self.ircd.runActionUntilValue("checkchannellevel", "topic", channel, user):
user.sendMessage(irc.ERR_CHANOPRIVSNEEDED, channel.name, "You do not have access to change the topic on this channel")
return False
return None
topicLockMode = TopicLockMode()
|
c833f55999f6fd9029626d1b794c86b2b5b11256
|
post_office/test_settings.py
|
post_office/test_settings.py
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
},
'post_office': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
}
}
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'post_office',
)
SECRET_KEY = 'a'
ROOT_URLCONF = 'post_office.test_urls'
DEFAULT_FROM_EMAIL = '[email protected]'
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
},
'post_office': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
}
}
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'post_office',
)
SECRET_KEY = 'a'
ROOT_URLCONF = 'post_office.test_urls'
DEFAULT_FROM_EMAIL = '[email protected]'
TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
|
Use "DjangoTestSuiteRunner" to in Django 1.6.
|
Use "DjangoTestSuiteRunner" to in Django 1.6.
|
Python
|
mit
|
CasherWest/django-post_office,carrerasrodrigo/django-post_office,fapelhanz/django-post_office,RafRaf/django-post_office,ui/django-post_office,jrief/django-post_office,yprez/django-post_office,JostCrow/django-post_office,ui/django-post_office,LeGast00n/django-post_office,CasherWest/django-post_office,ekohl/django-post_office
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
},
'post_office': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
}
}
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'post_office',
)
SECRET_KEY = 'a'
ROOT_URLCONF = 'post_office.test_urls'
DEFAULT_FROM_EMAIL = '[email protected]'
+
+ TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
|
Use "DjangoTestSuiteRunner" to in Django 1.6.
|
## Code Before:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
},
'post_office': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
}
}
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'post_office',
)
SECRET_KEY = 'a'
ROOT_URLCONF = 'post_office.test_urls'
DEFAULT_FROM_EMAIL = '[email protected]'
## Instruction:
Use "DjangoTestSuiteRunner" to in Django 1.6.
## Code After:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
},
'post_office': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
}
}
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'post_office',
)
SECRET_KEY = 'a'
ROOT_URLCONF = 'post_office.test_urls'
DEFAULT_FROM_EMAIL = '[email protected]'
TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
|
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
},
'post_office': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'TIMEOUT': 36000,
'KEY_PREFIX': 'post-office',
}
}
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'post_office',
)
SECRET_KEY = 'a'
ROOT_URLCONF = 'post_office.test_urls'
DEFAULT_FROM_EMAIL = '[email protected]'
+
+ TEST_RUNNER = 'django.test.simple.DjangoTestSuiteRunner'
|
316d9557002c54c5dd03f2a740367946b997d06a
|
src/foremast/utils/generate_encoded_user_data.py
|
src/foremast/utils/generate_encoded_user_data.py
|
"""Generate base64 encoded User Data."""
import base64
from ..utils import get_template
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
|
"""Generate base64 encoded User Data."""
import base64
from .get_template import get_template
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
|
Use new relative import within directory
|
fix: Use new relative import within directory
See also: PSOBAT-1197
|
Python
|
apache-2.0
|
gogoair/foremast,gogoair/foremast
|
"""Generate base64 encoded User Data."""
import base64
- from ..utils import get_template
+ from .get_template import get_template
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
|
Use new relative import within directory
|
## Code Before:
"""Generate base64 encoded User Data."""
import base64
from ..utils import get_template
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
## Instruction:
Use new relative import within directory
## Code After:
"""Generate base64 encoded User Data."""
import base64
from .get_template import get_template
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
|
"""Generate base64 encoded User Data."""
import base64
- from ..utils import get_template
? ^^ ^ ^
+ from .get_template import get_template
? ^^ ^^^^^ ^^^
def generate_encoded_user_data(env='dev',
region='us-east-1',
app_name='',
group_name=''):
r"""Generate base64 encoded User Data.
Args:
env (str): Deployment environment, e.g. dev, stage.
region (str): AWS Region, e.g. us-east-1.
app_name (str): Application name, e.g. coreforrest.
group_name (str): Application group nane, e.g. core.
Returns:
str: base64 encoded User Data script.
#!/bin/bash
export CLOUD_ENVIRONMENT=dev
export CLOUD_APP=coreforrest
export CLOUD_APP_GROUP=forrest
export CLOUD_STACK=forrest
export EC2_REGION=us-east-1
export CLOUD_DOMAIN=dev.example.com
printenv | grep 'CLOUD\|EC2' | awk '$0="export "$0'>> /etc/gogo/cloud_env
"""
user_data = get_template(template_file='user_data.sh.j2',
env=env,
region=region,
app_name=app_name,
group_name=group_name, )
return base64.b64encode(user_data.encode()).decode()
|
e3a3f55b0db2a5ed323e23dc0d949378a9871a15
|
nex/parsing/general_text_parser.py
|
nex/parsing/general_text_parser.py
|
from ..tokens import BuiltToken
from .common_parsing import pg as common_pg
gen_txt_pg = common_pg.copy_to_extend()
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
general_text_parser = gen_txt_pg.build()
|
from ..rply import ParserGenerator
from ..tokens import BuiltToken
term_types = ['SPACE', 'RELAX', 'LEFT_BRACE', 'BALANCED_TEXT_AND_RIGHT_BRACE']
gen_txt_pg = ParserGenerator(term_types, cache_id="general_text")
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
@gen_txt_pg.production('optional_spaces : SPACE optional_spaces')
@gen_txt_pg.production('optional_spaces : empty')
def optional_spaces(p):
return None
@gen_txt_pg.production('empty :')
def empty(p):
return None
general_text_parser = gen_txt_pg.build()
|
Duplicate small parts to make general text parser independent and simple
|
Duplicate small parts to make general text parser independent and simple
|
Python
|
mit
|
eddiejessup/nex
|
+ from ..rply import ParserGenerator
+
from ..tokens import BuiltToken
+ term_types = ['SPACE', 'RELAX', 'LEFT_BRACE', 'BALANCED_TEXT_AND_RIGHT_BRACE']
+ gen_txt_pg = ParserGenerator(term_types, cache_id="general_text")
- from .common_parsing import pg as common_pg
-
-
- gen_txt_pg = common_pg.copy_to_extend()
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
+ @gen_txt_pg.production('optional_spaces : SPACE optional_spaces')
+ @gen_txt_pg.production('optional_spaces : empty')
+ def optional_spaces(p):
+ return None
+
+
+ @gen_txt_pg.production('empty :')
+ def empty(p):
+ return None
+
+
general_text_parser = gen_txt_pg.build()
|
Duplicate small parts to make general text parser independent and simple
|
## Code Before:
from ..tokens import BuiltToken
from .common_parsing import pg as common_pg
gen_txt_pg = common_pg.copy_to_extend()
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
general_text_parser = gen_txt_pg.build()
## Instruction:
Duplicate small parts to make general text parser independent and simple
## Code After:
from ..rply import ParserGenerator
from ..tokens import BuiltToken
term_types = ['SPACE', 'RELAX', 'LEFT_BRACE', 'BALANCED_TEXT_AND_RIGHT_BRACE']
gen_txt_pg = ParserGenerator(term_types, cache_id="general_text")
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
@gen_txt_pg.production('optional_spaces : SPACE optional_spaces')
@gen_txt_pg.production('optional_spaces : empty')
def optional_spaces(p):
return None
@gen_txt_pg.production('empty :')
def empty(p):
return None
general_text_parser = gen_txt_pg.build()
|
+ from ..rply import ParserGenerator
+
from ..tokens import BuiltToken
+ term_types = ['SPACE', 'RELAX', 'LEFT_BRACE', 'BALANCED_TEXT_AND_RIGHT_BRACE']
+ gen_txt_pg = ParserGenerator(term_types, cache_id="general_text")
- from .common_parsing import pg as common_pg
-
-
- gen_txt_pg = common_pg.copy_to_extend()
@gen_txt_pg.production('general_text : filler LEFT_BRACE BALANCED_TEXT_AND_RIGHT_BRACE')
def general_text(p):
return BuiltToken(type_='general_text', value=p[2].value,
position_like=p)
@gen_txt_pg.production('filler : optional_spaces')
@gen_txt_pg.production('filler : filler RELAX optional_spaces')
def filler(p):
return None
+ @gen_txt_pg.production('optional_spaces : SPACE optional_spaces')
+ @gen_txt_pg.production('optional_spaces : empty')
+ def optional_spaces(p):
+ return None
+
+
+ @gen_txt_pg.production('empty :')
+ def empty(p):
+ return None
+
+
general_text_parser = gen_txt_pg.build()
|
ca758b2813ae77b795c4318d7d5566cd47ab0ec7
|
postgres/operations.py
|
postgres/operations.py
|
from django.db.migrations.operations.base import Operation
from django.db import connection
from psycopg2.extras import register_composite
class LoadSQLFromScript(Operation):
def __init__(self, filename):
self.filename = filename
@property
def reversible(self):
return False
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute(open(self.filename).read().replace('%', '%%'))
class CreateCompositeType(Operation):
def __init__(self, name=None, fields=None):
self.name = name
self.fields = fields
@property
def reversible(self):
return True
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('CREATE TYPE %s AS (%s)' % (
self.name, ", ".join(["%s %s" % field for field in self.fields])
))
def state_backwards(self, app_label, state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('DROP TYPE %s' % self.name)
|
from django.db.migrations.operations.base import Operation
from django.db import connection
from psycopg2.extras import register_composite
from .fields.composite import composite_type_created
class LoadSQLFromScript(Operation):
def __init__(self, filename):
self.filename = filename
@property
def reversible(self):
return False
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute(open(self.filename).read().replace('%', '%%'))
class CreateCompositeType(Operation):
def __init__(self, name=None, fields=None):
self.name = name
self.fields = fields
@property
def reversible(self):
return True
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('CREATE TYPE %s AS (%s)' % (
self.name, ", ".join(["%s %s" % field for field in self.fields])
))
composite_type_created.send(sender=self.__class__, db_type=self.name)
def state_backwards(self, app_label, state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('DROP TYPE %s' % self.name)
|
Send a signal after creation of composite field.
|
Send a signal after creation of composite field.
|
Python
|
bsd-3-clause
|
wlanslovenija/django-postgres
|
from django.db.migrations.operations.base import Operation
from django.db import connection
from psycopg2.extras import register_composite
+ from .fields.composite import composite_type_created
+
class LoadSQLFromScript(Operation):
def __init__(self, filename):
self.filename = filename
@property
def reversible(self):
return False
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute(open(self.filename).read().replace('%', '%%'))
class CreateCompositeType(Operation):
def __init__(self, name=None, fields=None):
self.name = name
self.fields = fields
@property
def reversible(self):
return True
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('CREATE TYPE %s AS (%s)' % (
self.name, ", ".join(["%s %s" % field for field in self.fields])
))
+ composite_type_created.send(sender=self.__class__, db_type=self.name)
def state_backwards(self, app_label, state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('DROP TYPE %s' % self.name)
|
Send a signal after creation of composite field.
|
## Code Before:
from django.db.migrations.operations.base import Operation
from django.db import connection
from psycopg2.extras import register_composite
class LoadSQLFromScript(Operation):
def __init__(self, filename):
self.filename = filename
@property
def reversible(self):
return False
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute(open(self.filename).read().replace('%', '%%'))
class CreateCompositeType(Operation):
def __init__(self, name=None, fields=None):
self.name = name
self.fields = fields
@property
def reversible(self):
return True
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('CREATE TYPE %s AS (%s)' % (
self.name, ", ".join(["%s %s" % field for field in self.fields])
))
def state_backwards(self, app_label, state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('DROP TYPE %s' % self.name)
## Instruction:
Send a signal after creation of composite field.
## Code After:
from django.db.migrations.operations.base import Operation
from django.db import connection
from psycopg2.extras import register_composite
from .fields.composite import composite_type_created
class LoadSQLFromScript(Operation):
def __init__(self, filename):
self.filename = filename
@property
def reversible(self):
return False
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute(open(self.filename).read().replace('%', '%%'))
class CreateCompositeType(Operation):
def __init__(self, name=None, fields=None):
self.name = name
self.fields = fields
@property
def reversible(self):
return True
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('CREATE TYPE %s AS (%s)' % (
self.name, ", ".join(["%s %s" % field for field in self.fields])
))
composite_type_created.send(sender=self.__class__, db_type=self.name)
def state_backwards(self, app_label, state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('DROP TYPE %s' % self.name)
|
from django.db.migrations.operations.base import Operation
from django.db import connection
from psycopg2.extras import register_composite
+ from .fields.composite import composite_type_created
+
class LoadSQLFromScript(Operation):
def __init__(self, filename):
self.filename = filename
@property
def reversible(self):
return False
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute(open(self.filename).read().replace('%', '%%'))
class CreateCompositeType(Operation):
def __init__(self, name=None, fields=None):
self.name = name
self.fields = fields
@property
def reversible(self):
return True
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('CREATE TYPE %s AS (%s)' % (
self.name, ", ".join(["%s %s" % field for field in self.fields])
))
+ composite_type_created.send(sender=self.__class__, db_type=self.name)
def state_backwards(self, app_label, state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('DROP TYPE %s' % self.name)
|
d240faeec0dea1b9dbefe080b479276ea19d2a0b
|
apps/explorer/tests/test_views.py
|
apps/explorer/tests/test_views.py
|
from django.core.urlresolvers import reverse
from apps.core.factories import PIXELER_PASSWORD, PixelerFactory
from apps.core.tests import CoreFixturesTestCase
from apps.core.management.commands.make_development_fixtures import (
make_development_fixtures
)
class PixelSetListViewTestCase(CoreFixturesTestCase):
def setUp(self):
self.user = PixelerFactory(
is_active=True,
is_staff=True,
is_superuser=True,
)
self.client.login(
username=self.user.username,
password=PIXELER_PASSWORD,
)
self.url = reverse('explorer:pixelset_list')
def test_renders_pixelset_list_template(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'explorer/pixelset_list.html')
def test_renders_empty_message(self):
response = self.client.get(self.url)
expected = (
'<td colspan="8" class="empty">'
'No pixel set has been submitted yet'
'</td>'
)
self.assertContains(response, expected, html=True)
def test_renders_pixelset_list(self):
make_development_fixtures(n_pixel_sets=12)
response = self.client.get(self.url)
self.assertContains(
response,
'<tr class="pixelset">',
count=10
)
|
from django.core.urlresolvers import reverse
from apps.core.factories import PIXELER_PASSWORD, PixelerFactory
from apps.core.tests import CoreFixturesTestCase
from apps.core.management.commands.make_development_fixtures import (
make_development_fixtures
)
class PixelSetListViewTestCase(CoreFixturesTestCase):
def setUp(self):
self.user = PixelerFactory(
is_active=True,
is_staff=True,
is_superuser=True,
)
self.client.login(
username=self.user.username,
password=PIXELER_PASSWORD,
)
self.url = reverse('explorer:pixelset_list')
def test_renders_pixelset_list_template(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'explorer/pixelset_list.html')
def test_renders_empty_message(self):
response = self.client.get(self.url)
expected = (
'<td colspan="8" class="empty">'
'No pixel set matches your query'
'</td>'
)
self.assertContains(response, expected, html=True)
def test_renders_pixelset_list(self):
make_development_fixtures(n_pixel_sets=12)
response = self.client.get(self.url)
self.assertContains(
response,
'<tr class="pixelset">',
count=10
)
|
Fix empty pixel set list message
|
Fix empty pixel set list message
|
Python
|
bsd-3-clause
|
Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel
|
from django.core.urlresolvers import reverse
from apps.core.factories import PIXELER_PASSWORD, PixelerFactory
from apps.core.tests import CoreFixturesTestCase
from apps.core.management.commands.make_development_fixtures import (
make_development_fixtures
)
class PixelSetListViewTestCase(CoreFixturesTestCase):
def setUp(self):
self.user = PixelerFactory(
is_active=True,
is_staff=True,
is_superuser=True,
)
self.client.login(
username=self.user.username,
password=PIXELER_PASSWORD,
)
self.url = reverse('explorer:pixelset_list')
def test_renders_pixelset_list_template(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'explorer/pixelset_list.html')
def test_renders_empty_message(self):
response = self.client.get(self.url)
expected = (
'<td colspan="8" class="empty">'
- 'No pixel set has been submitted yet'
+ 'No pixel set matches your query'
'</td>'
)
self.assertContains(response, expected, html=True)
def test_renders_pixelset_list(self):
make_development_fixtures(n_pixel_sets=12)
response = self.client.get(self.url)
self.assertContains(
response,
'<tr class="pixelset">',
count=10
)
|
Fix empty pixel set list message
|
## Code Before:
from django.core.urlresolvers import reverse
from apps.core.factories import PIXELER_PASSWORD, PixelerFactory
from apps.core.tests import CoreFixturesTestCase
from apps.core.management.commands.make_development_fixtures import (
make_development_fixtures
)
class PixelSetListViewTestCase(CoreFixturesTestCase):
def setUp(self):
self.user = PixelerFactory(
is_active=True,
is_staff=True,
is_superuser=True,
)
self.client.login(
username=self.user.username,
password=PIXELER_PASSWORD,
)
self.url = reverse('explorer:pixelset_list')
def test_renders_pixelset_list_template(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'explorer/pixelset_list.html')
def test_renders_empty_message(self):
response = self.client.get(self.url)
expected = (
'<td colspan="8" class="empty">'
'No pixel set has been submitted yet'
'</td>'
)
self.assertContains(response, expected, html=True)
def test_renders_pixelset_list(self):
make_development_fixtures(n_pixel_sets=12)
response = self.client.get(self.url)
self.assertContains(
response,
'<tr class="pixelset">',
count=10
)
## Instruction:
Fix empty pixel set list message
## Code After:
from django.core.urlresolvers import reverse
from apps.core.factories import PIXELER_PASSWORD, PixelerFactory
from apps.core.tests import CoreFixturesTestCase
from apps.core.management.commands.make_development_fixtures import (
make_development_fixtures
)
class PixelSetListViewTestCase(CoreFixturesTestCase):
def setUp(self):
self.user = PixelerFactory(
is_active=True,
is_staff=True,
is_superuser=True,
)
self.client.login(
username=self.user.username,
password=PIXELER_PASSWORD,
)
self.url = reverse('explorer:pixelset_list')
def test_renders_pixelset_list_template(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'explorer/pixelset_list.html')
def test_renders_empty_message(self):
response = self.client.get(self.url)
expected = (
'<td colspan="8" class="empty">'
'No pixel set matches your query'
'</td>'
)
self.assertContains(response, expected, html=True)
def test_renders_pixelset_list(self):
make_development_fixtures(n_pixel_sets=12)
response = self.client.get(self.url)
self.assertContains(
response,
'<tr class="pixelset">',
count=10
)
|
from django.core.urlresolvers import reverse
from apps.core.factories import PIXELER_PASSWORD, PixelerFactory
from apps.core.tests import CoreFixturesTestCase
from apps.core.management.commands.make_development_fixtures import (
make_development_fixtures
)
class PixelSetListViewTestCase(CoreFixturesTestCase):
def setUp(self):
self.user = PixelerFactory(
is_active=True,
is_staff=True,
is_superuser=True,
)
self.client.login(
username=self.user.username,
password=PIXELER_PASSWORD,
)
self.url = reverse('explorer:pixelset_list')
def test_renders_pixelset_list_template(self):
response = self.client.get(self.url)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'explorer/pixelset_list.html')
def test_renders_empty_message(self):
response = self.client.get(self.url)
expected = (
'<td colspan="8" class="empty">'
- 'No pixel set has been submitted yet'
+ 'No pixel set matches your query'
'</td>'
)
self.assertContains(response, expected, html=True)
def test_renders_pixelset_list(self):
make_development_fixtures(n_pixel_sets=12)
response = self.client.get(self.url)
self.assertContains(
response,
'<tr class="pixelset">',
count=10
)
|
7af8ee5ca8a036ae2339187b689507989d43aaa6
|
elmo/moon_tracker/utils.py
|
elmo/moon_tracker/utils.py
|
def user_can_view_scans(user, moon):
return (
user_can_delete_scans(user, moon) or
user.has_perm('eve_sde.can_view_scans', moon.planet.system) or
user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation) or
user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation.region)
)
def user_can_add_scans(user, moon):
return (
user_can_delete_scans(user, moon) or
user.has_perm('eve_sde.can_add_scans', moon.planet.system) or
user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation) or
user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation.region)
)
def user_can_delete_scans(user, moon):
return (
user.has_perm('eve_sde.can_delete_scans', moon.planet.system) or
user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation) or
user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation.region)
)
|
def user_can_view_scans(user, moon):
return (
user_can_delete_scans(user, moon) or
user.has_perm('eve_sde.sys_can_view_scans', moon.planet.system) or
user.has_perm('eve_sde.con_can_view_scans', moon.planet.system.constellation) or
user.has_perm('eve_sde.reg_can_view_scans', moon.planet.system.constellation.region)
)
def user_can_add_scans(user, moon):
return (
user_can_delete_scans(user, moon) or
user.has_perm('eve_sde.sys_can_add_scans', moon.planet.system) or
user.has_perm('eve_sde.con_can_add_scans', moon.planet.system.constellation) or
user.has_perm('eve_sde.reg_can_add_scans', moon.planet.system.constellation.region)
)
def user_can_delete_scans(user, moon):
return (
user.has_perm('eve_sde.sys_can_delete_scans', moon.planet.system) or
user.has_perm('eve_sde.con_can_delete_scans', moon.planet.system.constellation) or
user.has_perm('eve_sde.reg_can_delete_scans', moon.planet.system.constellation.region)
)
|
Update the permission helper functions.
|
Update the permission helper functions.
|
Python
|
mit
|
StephenSwat/eve_lunar_mining_organiser,StephenSwat/eve_lunar_mining_organiser
|
def user_can_view_scans(user, moon):
return (
user_can_delete_scans(user, moon) or
- user.has_perm('eve_sde.can_view_scans', moon.planet.system) or
+ user.has_perm('eve_sde.sys_can_view_scans', moon.planet.system) or
- user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation) or
+ user.has_perm('eve_sde.con_can_view_scans', moon.planet.system.constellation) or
- user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation.region)
+ user.has_perm('eve_sde.reg_can_view_scans', moon.planet.system.constellation.region)
)
def user_can_add_scans(user, moon):
return (
user_can_delete_scans(user, moon) or
- user.has_perm('eve_sde.can_add_scans', moon.planet.system) or
+ user.has_perm('eve_sde.sys_can_add_scans', moon.planet.system) or
- user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation) or
+ user.has_perm('eve_sde.con_can_add_scans', moon.planet.system.constellation) or
- user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation.region)
+ user.has_perm('eve_sde.reg_can_add_scans', moon.planet.system.constellation.region)
)
def user_can_delete_scans(user, moon):
return (
- user.has_perm('eve_sde.can_delete_scans', moon.planet.system) or
+ user.has_perm('eve_sde.sys_can_delete_scans', moon.planet.system) or
- user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation) or
+ user.has_perm('eve_sde.con_can_delete_scans', moon.planet.system.constellation) or
- user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation.region)
+ user.has_perm('eve_sde.reg_can_delete_scans', moon.planet.system.constellation.region)
)
|
Update the permission helper functions.
|
## Code Before:
def user_can_view_scans(user, moon):
return (
user_can_delete_scans(user, moon) or
user.has_perm('eve_sde.can_view_scans', moon.planet.system) or
user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation) or
user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation.region)
)
def user_can_add_scans(user, moon):
return (
user_can_delete_scans(user, moon) or
user.has_perm('eve_sde.can_add_scans', moon.planet.system) or
user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation) or
user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation.region)
)
def user_can_delete_scans(user, moon):
return (
user.has_perm('eve_sde.can_delete_scans', moon.planet.system) or
user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation) or
user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation.region)
)
## Instruction:
Update the permission helper functions.
## Code After:
def user_can_view_scans(user, moon):
return (
user_can_delete_scans(user, moon) or
user.has_perm('eve_sde.sys_can_view_scans', moon.planet.system) or
user.has_perm('eve_sde.con_can_view_scans', moon.planet.system.constellation) or
user.has_perm('eve_sde.reg_can_view_scans', moon.planet.system.constellation.region)
)
def user_can_add_scans(user, moon):
return (
user_can_delete_scans(user, moon) or
user.has_perm('eve_sde.sys_can_add_scans', moon.planet.system) or
user.has_perm('eve_sde.con_can_add_scans', moon.planet.system.constellation) or
user.has_perm('eve_sde.reg_can_add_scans', moon.planet.system.constellation.region)
)
def user_can_delete_scans(user, moon):
return (
user.has_perm('eve_sde.sys_can_delete_scans', moon.planet.system) or
user.has_perm('eve_sde.con_can_delete_scans', moon.planet.system.constellation) or
user.has_perm('eve_sde.reg_can_delete_scans', moon.planet.system.constellation.region)
)
|
def user_can_view_scans(user, moon):
return (
user_can_delete_scans(user, moon) or
- user.has_perm('eve_sde.can_view_scans', moon.planet.system) or
+ user.has_perm('eve_sde.sys_can_view_scans', moon.planet.system) or
? ++++
- user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation) or
+ user.has_perm('eve_sde.con_can_view_scans', moon.planet.system.constellation) or
? ++++
- user.has_perm('eve_sde.can_view_scans', moon.planet.system.constellation.region)
+ user.has_perm('eve_sde.reg_can_view_scans', moon.planet.system.constellation.region)
? ++++
)
def user_can_add_scans(user, moon):
return (
user_can_delete_scans(user, moon) or
- user.has_perm('eve_sde.can_add_scans', moon.planet.system) or
+ user.has_perm('eve_sde.sys_can_add_scans', moon.planet.system) or
? ++++
- user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation) or
+ user.has_perm('eve_sde.con_can_add_scans', moon.planet.system.constellation) or
? ++++
- user.has_perm('eve_sde.can_add_scans', moon.planet.system.constellation.region)
+ user.has_perm('eve_sde.reg_can_add_scans', moon.planet.system.constellation.region)
? ++++
)
def user_can_delete_scans(user, moon):
return (
- user.has_perm('eve_sde.can_delete_scans', moon.planet.system) or
+ user.has_perm('eve_sde.sys_can_delete_scans', moon.planet.system) or
? ++++
- user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation) or
+ user.has_perm('eve_sde.con_can_delete_scans', moon.planet.system.constellation) or
? ++++
- user.has_perm('eve_sde.can_delete_scans', moon.planet.system.constellation.region)
+ user.has_perm('eve_sde.reg_can_delete_scans', moon.planet.system.constellation.region)
? ++++
)
|
6869d5edd706d95c8cadbd1945b29fdd3bfecd6b
|
blaze/datashape/unification.py
|
blaze/datashape/unification.py
|
from numpy import promote_types
from coretypes import Fixed, Range, TypeVar, Record, \
CType, Enum, top, dynamic
class Incommensurable(Exception):
def __init__(self, space, dim):
self.space = space
self.dim = dim
def __str__(self):
return "No way of unifying (%s) (%s)" % (
self.space, self.dim
)
def unify(a, b):
"""
Unification of Datashapes.
"""
ta = type(a)
tb = type(b)
# --
# Unification over BlazeT has two zeros
if ta == top or tb == top:
return top
if ta == dynamic or tb == dynamic:
return top
# --
if (ta,tb) == (Fixed, Fixed):
if a.val == b.val:
return Fixed(a.val)
else:
return Enum(a.val, b.val)
# --
if (ta,tb) == (TypeVar, Fixed):
return TypeVar('x0')
if (ta,tb) == (Fixed, TypeVar):
return TypeVar('x0')
# --
if (ta,tb) == (Record, Record):
c = a.d.items() + b.d.items()
return Record(**dict(c))
# --
if (ta,tb) == (Fixed, Range):
return Range(min(a.val, b.lower), max(a.val, b.upper))
if (ta,tb) == (Range, Fixed):
return Range(min(a.lower, b.val), max(a.val, b.val))
if (ta,tb) == (Range, Range):
return Range(min(a.lower, b.lower), max(b.upper, b.upper))
# --
#if (ta,tb) == (Union, Union):
#return Union(a.parameters + b.parameters)
# --
if (ta,tb) == (CType, CType):
return CType.from_str(promote_types(a.name, b.name).name)
raise Incommensurable(a,b)
|
from numpy import promote_types
from blaze.datashape.coretypes import TypeVar
from blaze.expr.typeinference import infer
class Incommensurable(TypeError):
pass
def unify(sig, concrete=True):
"""
Unification of Datashapes.
"""
resolved = infer(sig)
if all(not isinstance(a, TypeVar) for a in resolved):
return resolved
|
Remove very old type unifier, for robust one
|
Remove very old type unifier, for robust one
|
Python
|
bsd-2-clause
|
seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core,seibert/blaze-core
|
from numpy import promote_types
- from coretypes import Fixed, Range, TypeVar, Record, \
- CType, Enum, top, dynamic
+ from blaze.datashape.coretypes import TypeVar
+ from blaze.expr.typeinference import infer
- class Incommensurable(Exception):
+ class Incommensurable(TypeError):
+ pass
- def __init__(self, space, dim):
- self.space = space
- self.dim = dim
+ def unify(sig, concrete=True):
- def __str__(self):
- return "No way of unifying (%s) (%s)" % (
- self.space, self.dim
- )
-
- def unify(a, b):
"""
Unification of Datashapes.
"""
- ta = type(a)
- tb = type(b)
+ resolved = infer(sig)
+ if all(not isinstance(a, TypeVar) for a in resolved):
+ return resolved
- # --
-
- # Unification over BlazeT has two zeros
-
- if ta == top or tb == top:
- return top
-
- if ta == dynamic or tb == dynamic:
- return top
-
- # --
-
- if (ta,tb) == (Fixed, Fixed):
- if a.val == b.val:
- return Fixed(a.val)
- else:
- return Enum(a.val, b.val)
-
- # --
-
- if (ta,tb) == (TypeVar, Fixed):
- return TypeVar('x0')
-
- if (ta,tb) == (Fixed, TypeVar):
- return TypeVar('x0')
-
- # --
-
- if (ta,tb) == (Record, Record):
- c = a.d.items() + b.d.items()
- return Record(**dict(c))
-
- # --
-
- if (ta,tb) == (Fixed, Range):
- return Range(min(a.val, b.lower), max(a.val, b.upper))
-
- if (ta,tb) == (Range, Fixed):
- return Range(min(a.lower, b.val), max(a.val, b.val))
-
- if (ta,tb) == (Range, Range):
- return Range(min(a.lower, b.lower), max(b.upper, b.upper))
-
- # --
-
- #if (ta,tb) == (Union, Union):
- #return Union(a.parameters + b.parameters)
-
- # --
-
- if (ta,tb) == (CType, CType):
- return CType.from_str(promote_types(a.name, b.name).name)
-
- raise Incommensurable(a,b)
-
|
Remove very old type unifier, for robust one
|
## Code Before:
from numpy import promote_types
from coretypes import Fixed, Range, TypeVar, Record, \
CType, Enum, top, dynamic
class Incommensurable(Exception):
def __init__(self, space, dim):
self.space = space
self.dim = dim
def __str__(self):
return "No way of unifying (%s) (%s)" % (
self.space, self.dim
)
def unify(a, b):
"""
Unification of Datashapes.
"""
ta = type(a)
tb = type(b)
# --
# Unification over BlazeT has two zeros
if ta == top or tb == top:
return top
if ta == dynamic or tb == dynamic:
return top
# --
if (ta,tb) == (Fixed, Fixed):
if a.val == b.val:
return Fixed(a.val)
else:
return Enum(a.val, b.val)
# --
if (ta,tb) == (TypeVar, Fixed):
return TypeVar('x0')
if (ta,tb) == (Fixed, TypeVar):
return TypeVar('x0')
# --
if (ta,tb) == (Record, Record):
c = a.d.items() + b.d.items()
return Record(**dict(c))
# --
if (ta,tb) == (Fixed, Range):
return Range(min(a.val, b.lower), max(a.val, b.upper))
if (ta,tb) == (Range, Fixed):
return Range(min(a.lower, b.val), max(a.val, b.val))
if (ta,tb) == (Range, Range):
return Range(min(a.lower, b.lower), max(b.upper, b.upper))
# --
#if (ta,tb) == (Union, Union):
#return Union(a.parameters + b.parameters)
# --
if (ta,tb) == (CType, CType):
return CType.from_str(promote_types(a.name, b.name).name)
raise Incommensurable(a,b)
## Instruction:
Remove very old type unifier, for robust one
## Code After:
from numpy import promote_types
from blaze.datashape.coretypes import TypeVar
from blaze.expr.typeinference import infer
class Incommensurable(TypeError):
pass
def unify(sig, concrete=True):
"""
Unification of Datashapes.
"""
resolved = infer(sig)
if all(not isinstance(a, TypeVar) for a in resolved):
return resolved
|
from numpy import promote_types
- from coretypes import Fixed, Range, TypeVar, Record, \
- CType, Enum, top, dynamic
+ from blaze.datashape.coretypes import TypeVar
+ from blaze.expr.typeinference import infer
- class Incommensurable(Exception):
? ^^^^^^ ^
+ class Incommensurable(TypeError):
? ++++ ^^ ^
+ pass
- def __init__(self, space, dim):
- self.space = space
- self.dim = dim
+ def unify(sig, concrete=True):
- def __str__(self):
- return "No way of unifying (%s) (%s)" % (
- self.space, self.dim
- )
-
- def unify(a, b):
"""
Unification of Datashapes.
"""
+ resolved = infer(sig)
+ if all(not isinstance(a, TypeVar) for a in resolved):
- ta = type(a)
- tb = type(b)
-
- # --
-
- # Unification over BlazeT has two zeros
-
- if ta == top or tb == top:
- return top
? ^ ^
+ return resolved
? ^^^ ^^^^
-
- if ta == dynamic or tb == dynamic:
- return top
-
- # --
-
- if (ta,tb) == (Fixed, Fixed):
- if a.val == b.val:
- return Fixed(a.val)
- else:
- return Enum(a.val, b.val)
-
- # --
-
- if (ta,tb) == (TypeVar, Fixed):
- return TypeVar('x0')
-
- if (ta,tb) == (Fixed, TypeVar):
- return TypeVar('x0')
-
- # --
-
- if (ta,tb) == (Record, Record):
- c = a.d.items() + b.d.items()
- return Record(**dict(c))
-
- # --
-
- if (ta,tb) == (Fixed, Range):
- return Range(min(a.val, b.lower), max(a.val, b.upper))
-
- if (ta,tb) == (Range, Fixed):
- return Range(min(a.lower, b.val), max(a.val, b.val))
-
- if (ta,tb) == (Range, Range):
- return Range(min(a.lower, b.lower), max(b.upper, b.upper))
-
- # --
-
- #if (ta,tb) == (Union, Union):
- #return Union(a.parameters + b.parameters)
-
- # --
-
- if (ta,tb) == (CType, CType):
- return CType.from_str(promote_types(a.name, b.name).name)
-
- raise Incommensurable(a,b)
|
c0dc0c644fd8912d58deb416955e85259d22618e
|
tests/github_controller/test_request_parsing.py
|
tests/github_controller/test_request_parsing.py
|
import pytest
from app.controllers.github_controller import GithubController
pytestmark = pytest.mark.asyncio
async def test_get_req_json(gh_sut: GithubController, mock_request):
assert await gh_sut.get_request_json(mock_request) == 'json'
async def test_get_req_event_header(gh_sut: GithubController, mock_request):
assert await gh_sut.get_request_event_header(mock_request) == 'event'
|
import pytest
from app.controllers.github_controller import GithubController
pytestmark = pytest.mark.asyncio
async def test_get_req_json(gh_sut: GithubController, mock_request):
assert await gh_sut.get_request_json(mock_request) == {'json': 'json'}
async def test_get_req_event_header(gh_sut: GithubController, mock_request):
assert await gh_sut.get_request_event_header(mock_request) == 'event'
|
Fix test in request parsing
|
Fix test in request parsing
|
Python
|
mit
|
futuresimple/triggear
|
import pytest
from app.controllers.github_controller import GithubController
pytestmark = pytest.mark.asyncio
async def test_get_req_json(gh_sut: GithubController, mock_request):
- assert await gh_sut.get_request_json(mock_request) == 'json'
+ assert await gh_sut.get_request_json(mock_request) == {'json': 'json'}
async def test_get_req_event_header(gh_sut: GithubController, mock_request):
assert await gh_sut.get_request_event_header(mock_request) == 'event'
|
Fix test in request parsing
|
## Code Before:
import pytest
from app.controllers.github_controller import GithubController
pytestmark = pytest.mark.asyncio
async def test_get_req_json(gh_sut: GithubController, mock_request):
assert await gh_sut.get_request_json(mock_request) == 'json'
async def test_get_req_event_header(gh_sut: GithubController, mock_request):
assert await gh_sut.get_request_event_header(mock_request) == 'event'
## Instruction:
Fix test in request parsing
## Code After:
import pytest
from app.controllers.github_controller import GithubController
pytestmark = pytest.mark.asyncio
async def test_get_req_json(gh_sut: GithubController, mock_request):
assert await gh_sut.get_request_json(mock_request) == {'json': 'json'}
async def test_get_req_event_header(gh_sut: GithubController, mock_request):
assert await gh_sut.get_request_event_header(mock_request) == 'event'
|
import pytest
from app.controllers.github_controller import GithubController
pytestmark = pytest.mark.asyncio
async def test_get_req_json(gh_sut: GithubController, mock_request):
- assert await gh_sut.get_request_json(mock_request) == 'json'
+ assert await gh_sut.get_request_json(mock_request) == {'json': 'json'}
? + +++++++++
async def test_get_req_event_header(gh_sut: GithubController, mock_request):
assert await gh_sut.get_request_event_header(mock_request) == 'event'
|
3f17f454172d15e9279e00ccc2acfb931bf685f1
|
transmutagen/tests/test_origen.py
|
transmutagen/tests/test_origen.py
|
import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
tape9, time, nuc, phi = datafile.split()[0]
assert 'table_4' in data
assert 'nuclide' in data['table_4']
nuclides = data['table_4']['nuclides']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
assert set.intersection(*comb) == set()
|
import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
tape9, time, nuc, phi = datafile.split()
assert 'table_4' in data
assert 'nuclide' in data['table_4']
nuclide = data['table_4']['nuclide']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
a, b = comb
for common in set.intersection(set(nuclide[a]), set(nuclide[b])):
array_a, array_b = nuclide[a][common], nuclide[b][common]
assert np.allclose(array_a, 0) \
or np.allclose(array_b, 0)
# or np.allclose(array_a, array_b)
|
Add a sanity test for the data
|
Add a sanity test for the data
|
Python
|
bsd-3-clause
|
ergs/transmutagen,ergs/transmutagen
|
import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
- tape9, time, nuc, phi = datafile.split()[0]
+ tape9, time, nuc, phi = datafile.split()
assert 'table_4' in data
assert 'nuclide' in data['table_4']
- nuclides = data['table_4']['nuclides']
+ nuclide = data['table_4']['nuclide']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
- assert set.intersection(*comb) == set()
+ a, b = comb
+ for common in set.intersection(set(nuclide[a]), set(nuclide[b])):
+ array_a, array_b = nuclide[a][common], nuclide[b][common]
+ assert np.allclose(array_a, 0) \
+ or np.allclose(array_b, 0)
+ # or np.allclose(array_a, array_b)
|
Add a sanity test for the data
|
## Code Before:
import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
tape9, time, nuc, phi = datafile.split()[0]
assert 'table_4' in data
assert 'nuclide' in data['table_4']
nuclides = data['table_4']['nuclides']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
assert set.intersection(*comb) == set()
## Instruction:
Add a sanity test for the data
## Code After:
import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
tape9, time, nuc, phi = datafile.split()
assert 'table_4' in data
assert 'nuclide' in data['table_4']
nuclide = data['table_4']['nuclide']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
a, b = comb
for common in set.intersection(set(nuclide[a]), set(nuclide[b])):
array_a, array_b = nuclide[a][common], nuclide[b][common]
assert np.allclose(array_a, 0) \
or np.allclose(array_b, 0)
# or np.allclose(array_a, array_b)
|
import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
- tape9, time, nuc, phi = datafile.split()[0]
? ---
+ tape9, time, nuc, phi = datafile.split()
assert 'table_4' in data
assert 'nuclide' in data['table_4']
- nuclides = data['table_4']['nuclides']
? - -
+ nuclide = data['table_4']['nuclide']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
- assert set.intersection(*comb) == set()
+ a, b = comb
+ for common in set.intersection(set(nuclide[a]), set(nuclide[b])):
+ array_a, array_b = nuclide[a][common], nuclide[b][common]
+ assert np.allclose(array_a, 0) \
+ or np.allclose(array_b, 0)
+ # or np.allclose(array_a, array_b)
|
bd5ac74d2aaed956a1db4db2482076470d8c150f
|
google-oauth-userid/app.py
|
google-oauth-userid/app.py
|
from gevent.wsgi import WSGIServer
from flask import Flask, redirect, url_for
from flask_dance.contrib.google import make_google_blueprint, google
from werkzeug.contrib.fixers import ProxyFix
import os
app = Flask(__name__)
app.wsgi_app = ProxyFix(app.wsgi_app)
app.secret_key = os.urandom(64)
blueprint = make_google_blueprint(
client_id=os.environ.get('GOOGLE_CLIENT_ID', ''),
client_secret=os.environ.get('GOOGLE_CLIENT_SECRET', ''),
scope=['profile']
)
app.register_blueprint(blueprint, url_prefix='/login')
@app.route('/')
def index():
if not google.authorized:
return redirect(url_for('google.login'))
resp = google.get('/oauth2/v2/userinfo')
assert resp.ok, resp.text
return '<h2>Your Google OAuth ID is: {}</h2>'.format(resp.json()["id"])
if __name__ == "__main__":
http_server = WSGIServer(('0.0.0.0', 8080), app)
print('serving on {}:{}'.format('0.0.0.0', 8080))
http_server.serve_forever()
|
from gevent.wsgi import WSGIServer
from flask import Flask, redirect, url_for
from flask_dance.contrib.google import make_google_blueprint, google
from werkzeug.contrib.fixers import ProxyFix
import os
app = Flask(__name__)
app.wsgi_app = ProxyFix(app.wsgi_app)
app.secret_key = os.urandom(64)
blueprint = make_google_blueprint(
client_id=os.environ.get('GOOGLE_CLIENT_ID', ''),
client_secret=os.environ.get('GOOGLE_CLIENT_SECRET', ''),
scope=['https://www.googleapis.com/auth/userinfo.profile']
)
app.register_blueprint(blueprint, url_prefix='/login')
@app.route('/')
def index():
if not google.authorized:
return redirect(url_for('google.login'))
resp = google.get('/oauth2/v2/userinfo')
assert resp.ok, resp.text
return '<h2>Your Google OAuth ID is: {}</h2>'.format(resp.json()["id"])
if __name__ == "__main__":
http_server = WSGIServer(('0.0.0.0', 8080), app)
print('serving on {}:{}'.format('0.0.0.0', 8080))
http_server.serve_forever()
|
Update scope to use changed profile
|
Update scope to use changed profile
|
Python
|
mit
|
openshift-cs/OpenShift-Troubleshooting-Templates,openshift-cs/OpenShift-Troubleshooting-Templates
|
from gevent.wsgi import WSGIServer
from flask import Flask, redirect, url_for
from flask_dance.contrib.google import make_google_blueprint, google
from werkzeug.contrib.fixers import ProxyFix
import os
app = Flask(__name__)
app.wsgi_app = ProxyFix(app.wsgi_app)
app.secret_key = os.urandom(64)
blueprint = make_google_blueprint(
client_id=os.environ.get('GOOGLE_CLIENT_ID', ''),
client_secret=os.environ.get('GOOGLE_CLIENT_SECRET', ''),
- scope=['profile']
+ scope=['https://www.googleapis.com/auth/userinfo.profile']
)
app.register_blueprint(blueprint, url_prefix='/login')
@app.route('/')
def index():
if not google.authorized:
return redirect(url_for('google.login'))
resp = google.get('/oauth2/v2/userinfo')
assert resp.ok, resp.text
return '<h2>Your Google OAuth ID is: {}</h2>'.format(resp.json()["id"])
if __name__ == "__main__":
http_server = WSGIServer(('0.0.0.0', 8080), app)
print('serving on {}:{}'.format('0.0.0.0', 8080))
http_server.serve_forever()
|
Update scope to use changed profile
|
## Code Before:
from gevent.wsgi import WSGIServer
from flask import Flask, redirect, url_for
from flask_dance.contrib.google import make_google_blueprint, google
from werkzeug.contrib.fixers import ProxyFix
import os
app = Flask(__name__)
app.wsgi_app = ProxyFix(app.wsgi_app)
app.secret_key = os.urandom(64)
blueprint = make_google_blueprint(
client_id=os.environ.get('GOOGLE_CLIENT_ID', ''),
client_secret=os.environ.get('GOOGLE_CLIENT_SECRET', ''),
scope=['profile']
)
app.register_blueprint(blueprint, url_prefix='/login')
@app.route('/')
def index():
if not google.authorized:
return redirect(url_for('google.login'))
resp = google.get('/oauth2/v2/userinfo')
assert resp.ok, resp.text
return '<h2>Your Google OAuth ID is: {}</h2>'.format(resp.json()["id"])
if __name__ == "__main__":
http_server = WSGIServer(('0.0.0.0', 8080), app)
print('serving on {}:{}'.format('0.0.0.0', 8080))
http_server.serve_forever()
## Instruction:
Update scope to use changed profile
## Code After:
from gevent.wsgi import WSGIServer
from flask import Flask, redirect, url_for
from flask_dance.contrib.google import make_google_blueprint, google
from werkzeug.contrib.fixers import ProxyFix
import os
app = Flask(__name__)
app.wsgi_app = ProxyFix(app.wsgi_app)
app.secret_key = os.urandom(64)
blueprint = make_google_blueprint(
client_id=os.environ.get('GOOGLE_CLIENT_ID', ''),
client_secret=os.environ.get('GOOGLE_CLIENT_SECRET', ''),
scope=['https://www.googleapis.com/auth/userinfo.profile']
)
app.register_blueprint(blueprint, url_prefix='/login')
@app.route('/')
def index():
if not google.authorized:
return redirect(url_for('google.login'))
resp = google.get('/oauth2/v2/userinfo')
assert resp.ok, resp.text
return '<h2>Your Google OAuth ID is: {}</h2>'.format(resp.json()["id"])
if __name__ == "__main__":
http_server = WSGIServer(('0.0.0.0', 8080), app)
print('serving on {}:{}'.format('0.0.0.0', 8080))
http_server.serve_forever()
|
from gevent.wsgi import WSGIServer
from flask import Flask, redirect, url_for
from flask_dance.contrib.google import make_google_blueprint, google
from werkzeug.contrib.fixers import ProxyFix
import os
app = Flask(__name__)
app.wsgi_app = ProxyFix(app.wsgi_app)
app.secret_key = os.urandom(64)
blueprint = make_google_blueprint(
client_id=os.environ.get('GOOGLE_CLIENT_ID', ''),
client_secret=os.environ.get('GOOGLE_CLIENT_SECRET', ''),
- scope=['profile']
+ scope=['https://www.googleapis.com/auth/userinfo.profile']
)
app.register_blueprint(blueprint, url_prefix='/login')
@app.route('/')
def index():
if not google.authorized:
return redirect(url_for('google.login'))
resp = google.get('/oauth2/v2/userinfo')
assert resp.ok, resp.text
return '<h2>Your Google OAuth ID is: {}</h2>'.format(resp.json()["id"])
if __name__ == "__main__":
http_server = WSGIServer(('0.0.0.0', 8080), app)
print('serving on {}:{}'.format('0.0.0.0', 8080))
http_server.serve_forever()
|
653832ebc7b18599b9aab2f230b190b14e71cd3d
|
models.py
|
models.py
|
from google.appengine.ext import db
class User(db.Model):
name = db.UserProperty(auto_current_user = True, auto_current_user_add = True)
date = db.DateTimeProperty(auto_now = True, auto_now_add = True)
#token = db.StringProperty()
services = db.StringListProperty()
|
class User(db.Model):
name = db.UserProperty(auto_current_user = True, auto_current_user_add = True)
date = db.DateTimeProperty(auto_now = True, auto_now_add = True)
#token = db.StringProperty()
services = db.StringListProperty()
|
Clean import modules hey hey.
|
Clean import modules hey hey.
|
Python
|
mpl-2.0
|
BYK/fb2goog,BYK/fb2goog,BYK/fb2goog
|
- from google.appengine.ext import db
-
class User(db.Model):
name = db.UserProperty(auto_current_user = True, auto_current_user_add = True)
date = db.DateTimeProperty(auto_now = True, auto_now_add = True)
#token = db.StringProperty()
services = db.StringListProperty()
|
Clean import modules hey hey.
|
## Code Before:
from google.appengine.ext import db
class User(db.Model):
name = db.UserProperty(auto_current_user = True, auto_current_user_add = True)
date = db.DateTimeProperty(auto_now = True, auto_now_add = True)
#token = db.StringProperty()
services = db.StringListProperty()
## Instruction:
Clean import modules hey hey.
## Code After:
class User(db.Model):
name = db.UserProperty(auto_current_user = True, auto_current_user_add = True)
date = db.DateTimeProperty(auto_now = True, auto_now_add = True)
#token = db.StringProperty()
services = db.StringListProperty()
|
- from google.appengine.ext import db
-
class User(db.Model):
name = db.UserProperty(auto_current_user = True, auto_current_user_add = True)
date = db.DateTimeProperty(auto_now = True, auto_now_add = True)
#token = db.StringProperty()
services = db.StringListProperty()
|
042c11e298dd76c16ef84b2ee1d96d75de6203d4
|
print_traceback.py
|
print_traceback.py
|
import sys
import traceback
class CustomException(Exception):
def __init__(self, *args, **kwargs):
super(CustomException, self).__init__(*args, **kwargs)
def custom_function():
raise CustomException('Test to raise custom exception.')
try:
custom_function()
except:
_type, _value, _traceback = sys.exc_info()
for _entry in traceback.format_tb(_traceback):
print(_entry)
|
import sys
import traceback
class CustomException(Exception):
def __init__(self, *args, **kwargs):
super(CustomException, self).__init__(*args, **kwargs)
def custom_deep_function():
raise CustomException('Test to raise custom exception.')
def custom_function():
custom_deep_function()
try:
custom_function()
except:
_type, _value, _traceback = sys.exc_info()
print(''.join(str(entry) for entry in traceback.format_tb(_traceback, limit=10)))
|
Print stack trace of exception.
|
Print stack trace of exception.
|
Python
|
mit
|
iandmyhand/python-utils
|
import sys
import traceback
class CustomException(Exception):
def __init__(self, *args, **kwargs):
super(CustomException, self).__init__(*args, **kwargs)
+ def custom_deep_function():
+ raise CustomException('Test to raise custom exception.')
+
def custom_function():
- raise CustomException('Test to raise custom exception.')
+ custom_deep_function()
try:
custom_function()
except:
_type, _value, _traceback = sys.exc_info()
+ print(''.join(str(entry) for entry in traceback.format_tb(_traceback, limit=10)))
- for _entry in traceback.format_tb(_traceback):
- print(_entry)
|
Print stack trace of exception.
|
## Code Before:
import sys
import traceback
class CustomException(Exception):
def __init__(self, *args, **kwargs):
super(CustomException, self).__init__(*args, **kwargs)
def custom_function():
raise CustomException('Test to raise custom exception.')
try:
custom_function()
except:
_type, _value, _traceback = sys.exc_info()
for _entry in traceback.format_tb(_traceback):
print(_entry)
## Instruction:
Print stack trace of exception.
## Code After:
import sys
import traceback
class CustomException(Exception):
def __init__(self, *args, **kwargs):
super(CustomException, self).__init__(*args, **kwargs)
def custom_deep_function():
raise CustomException('Test to raise custom exception.')
def custom_function():
custom_deep_function()
try:
custom_function()
except:
_type, _value, _traceback = sys.exc_info()
print(''.join(str(entry) for entry in traceback.format_tb(_traceback, limit=10)))
|
import sys
import traceback
class CustomException(Exception):
def __init__(self, *args, **kwargs):
super(CustomException, self).__init__(*args, **kwargs)
+ def custom_deep_function():
+ raise CustomException('Test to raise custom exception.')
+
def custom_function():
- raise CustomException('Test to raise custom exception.')
+ custom_deep_function()
try:
custom_function()
except:
_type, _value, _traceback = sys.exc_info()
+ print(''.join(str(entry) for entry in traceback.format_tb(_traceback, limit=10)))
- for _entry in traceback.format_tb(_traceback):
- print(_entry)
|
cdc6390ec88a14b339cb336fcc0d77e747aae99a
|
sieve/sieve.py
|
sieve/sieve.py
|
def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
|
def sieve(n):
return list(primes(n))
def primes(n):
if n < 2:
raise StopIteration
yield 2
not_prime = set()
for i in range(3, n+1, 2):
if i not in not_prime:
not_prime.update(range(i*i, n+1, i))
yield i
|
Revert back to a generator - it's actually slight faster
|
Revert back to a generator - it's actually slight faster
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
def sieve(n):
+ return list(primes(n))
+
+
+ def primes(n):
if n < 2:
- return []
+ raise StopIteration
+ yield 2
not_prime = set()
- prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
- prime.append(i)
not_prime.update(range(i*i, n+1, i))
- return prime
+ yield i
|
Revert back to a generator - it's actually slight faster
|
## Code Before:
def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
## Instruction:
Revert back to a generator - it's actually slight faster
## Code After:
def sieve(n):
return list(primes(n))
def primes(n):
if n < 2:
raise StopIteration
yield 2
not_prime = set()
for i in range(3, n+1, 2):
if i not in not_prime:
not_prime.update(range(i*i, n+1, i))
yield i
|
def sieve(n):
+ return list(primes(n))
+
+
+ def primes(n):
if n < 2:
- return []
+ raise StopIteration
+ yield 2
not_prime = set()
- prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
- prime.append(i)
not_prime.update(range(i*i, n+1, i))
- return prime
+ yield i
|
20c8d494519b3d54bc3981aebdad18871deef3cb
|
src/sentry/auth/manager.py
|
src/sentry/auth/manager.py
|
from __future__ import absolute_import, print_function
__all__ = ['ProviderManager']
from .exceptions import ProviderNotRegistered
# Ideally this and PluginManager abstracted from the same base, but
# InstanceManager has become convulated and wasteful
class ProviderManager(object):
def __init__(self):
self.__values = {}
def __iter__(self):
return self.__values.iteritems()
def get(self, name, **kwargs):
try:
cls = self.__values[name]
except KeyError:
raise ProviderNotRegistered(name)
return cls(name=name, **kwargs)
def exists(self, name):
return name in self.__values
def register(self, name, cls):
self.__values[name] = cls
def unregister(self, name, cls):
if self.__values[name] != cls:
raise ProviderNotRegistered(name)
del self.__values[name]
|
from __future__ import absolute_import, print_function
__all__ = ['ProviderManager']
from .exceptions import ProviderNotRegistered
# Ideally this and PluginManager abstracted from the same base, but
# InstanceManager has become convulated and wasteful
class ProviderManager(object):
def __init__(self):
self.__values = {}
def __iter__(self):
return self.__values.iteritems()
def get(self, key, **kwargs):
try:
cls = self.__values[key]
except KeyError:
raise ProviderNotRegistered(key)
return cls(key=key, **kwargs)
def exists(self, key):
return key in self.__values
def register(self, key, cls):
self.__values[key] = cls
def unregister(self, key, cls):
if self.__values[key] != cls:
raise ProviderNotRegistered(key)
del self.__values[key]
|
Revert back to using key
|
Revert back to using key
|
Python
|
bsd-3-clause
|
argonemyth/sentry,gencer/sentry,JamesMura/sentry,vperron/sentry,nicholasserra/sentry,alexm92/sentry,jokey2k/sentry,zenefits/sentry,jokey2k/sentry,zenefits/sentry,Kryz/sentry,llonchj/sentry,llonchj/sentry,daevaorn/sentry,zenefits/sentry,ewdurbin/sentry,TedaLIEz/sentry,boneyao/sentry,nicholasserra/sentry,felixbuenemann/sentry,BuildingLink/sentry,vperron/sentry,daevaorn/sentry,mitsuhiko/sentry,looker/sentry,JTCunning/sentry,kevinlondon/sentry,looker/sentry,wujuguang/sentry,looker/sentry,ifduyue/sentry,drcapulet/sentry,mvaled/sentry,fuziontech/sentry,fotinakis/sentry,ngonzalvez/sentry,BuildingLink/sentry,zenefits/sentry,alexm92/sentry,fotinakis/sentry,ifduyue/sentry,JamesMura/sentry,fotinakis/sentry,kevinastone/sentry,1tush/sentry,jean/sentry,daevaorn/sentry,gencer/sentry,ngonzalvez/sentry,kevinastone/sentry,songyi199111/sentry,alexm92/sentry,hongliang5623/sentry,hongliang5623/sentry,korealerts1/sentry,BuildingLink/sentry,pauloschilling/sentry,mvaled/sentry,fuziontech/sentry,mvaled/sentry,ifduyue/sentry,BayanGroup/sentry,Natim/sentry,ifduyue/sentry,JTCunning/sentry,zenefits/sentry,songyi199111/sentry,wong2/sentry,gencer/sentry,1tush/sentry,beeftornado/sentry,JTCunning/sentry,JackDanger/sentry,imankulov/sentry,jean/sentry,daevaorn/sentry,gg7/sentry,boneyao/sentry,nicholasserra/sentry,jean/sentry,TedaLIEz/sentry,argonemyth/sentry,korealerts1/sentry,wujuguang/sentry,wong2/sentry,Natim/sentry,mvaled/sentry,felixbuenemann/sentry,gencer/sentry,BayanGroup/sentry,jokey2k/sentry,pauloschilling/sentry,gg7/sentry,TedaLIEz/sentry,mitsuhiko/sentry,imankulov/sentry,ewdurbin/sentry,BayanGroup/sentry,beeftornado/sentry,JackDanger/sentry,imankulov/sentry,ifduyue/sentry,Kryz/sentry,JamesMura/sentry,1tush/sentry,kevinastone/sentry,fotinakis/sentry,songyi199111/sentry,BuildingLink/sentry,Natim/sentry,JamesMura/sentry,ngonzalvez/sentry,drcapulet/sentry,drcapulet/sentry,felixbuenemann/sentry,argonemyth/sentry,pauloschilling/sentry,llonchj/sentry,mvaled/sentry,jean/sentry,hongliang5623/sentry,beeftornado/sentry,jean/sentry,mvaled/sentry,JackDanger/sentry,ewdurbin/sentry,boneyao/sentry,wong2/sentry,looker/sentry,Kryz/sentry,gg7/sentry,vperron/sentry,looker/sentry,wujuguang/sentry,JamesMura/sentry,gencer/sentry,BuildingLink/sentry,kevinlondon/sentry,korealerts1/sentry,fuziontech/sentry,kevinlondon/sentry
|
from __future__ import absolute_import, print_function
__all__ = ['ProviderManager']
from .exceptions import ProviderNotRegistered
# Ideally this and PluginManager abstracted from the same base, but
# InstanceManager has become convulated and wasteful
class ProviderManager(object):
def __init__(self):
self.__values = {}
def __iter__(self):
return self.__values.iteritems()
- def get(self, name, **kwargs):
+ def get(self, key, **kwargs):
try:
- cls = self.__values[name]
+ cls = self.__values[key]
except KeyError:
- raise ProviderNotRegistered(name)
+ raise ProviderNotRegistered(key)
- return cls(name=name, **kwargs)
+ return cls(key=key, **kwargs)
- def exists(self, name):
+ def exists(self, key):
- return name in self.__values
+ return key in self.__values
- def register(self, name, cls):
+ def register(self, key, cls):
- self.__values[name] = cls
+ self.__values[key] = cls
- def unregister(self, name, cls):
+ def unregister(self, key, cls):
- if self.__values[name] != cls:
+ if self.__values[key] != cls:
- raise ProviderNotRegistered(name)
+ raise ProviderNotRegistered(key)
- del self.__values[name]
+ del self.__values[key]
|
Revert back to using key
|
## Code Before:
from __future__ import absolute_import, print_function
__all__ = ['ProviderManager']
from .exceptions import ProviderNotRegistered
# Ideally this and PluginManager abstracted from the same base, but
# InstanceManager has become convulated and wasteful
class ProviderManager(object):
def __init__(self):
self.__values = {}
def __iter__(self):
return self.__values.iteritems()
def get(self, name, **kwargs):
try:
cls = self.__values[name]
except KeyError:
raise ProviderNotRegistered(name)
return cls(name=name, **kwargs)
def exists(self, name):
return name in self.__values
def register(self, name, cls):
self.__values[name] = cls
def unregister(self, name, cls):
if self.__values[name] != cls:
raise ProviderNotRegistered(name)
del self.__values[name]
## Instruction:
Revert back to using key
## Code After:
from __future__ import absolute_import, print_function
__all__ = ['ProviderManager']
from .exceptions import ProviderNotRegistered
# Ideally this and PluginManager abstracted from the same base, but
# InstanceManager has become convulated and wasteful
class ProviderManager(object):
def __init__(self):
self.__values = {}
def __iter__(self):
return self.__values.iteritems()
def get(self, key, **kwargs):
try:
cls = self.__values[key]
except KeyError:
raise ProviderNotRegistered(key)
return cls(key=key, **kwargs)
def exists(self, key):
return key in self.__values
def register(self, key, cls):
self.__values[key] = cls
def unregister(self, key, cls):
if self.__values[key] != cls:
raise ProviderNotRegistered(key)
del self.__values[key]
|
from __future__ import absolute_import, print_function
__all__ = ['ProviderManager']
from .exceptions import ProviderNotRegistered
# Ideally this and PluginManager abstracted from the same base, but
# InstanceManager has become convulated and wasteful
class ProviderManager(object):
def __init__(self):
self.__values = {}
def __iter__(self):
return self.__values.iteritems()
- def get(self, name, **kwargs):
? ^^^
+ def get(self, key, **kwargs):
? ^ +
try:
- cls = self.__values[name]
? ^^^
+ cls = self.__values[key]
? ^ +
except KeyError:
- raise ProviderNotRegistered(name)
? ^^^
+ raise ProviderNotRegistered(key)
? ^ +
- return cls(name=name, **kwargs)
? ^^^ ^^^
+ return cls(key=key, **kwargs)
? ^ + ^ +
- def exists(self, name):
? ^^^
+ def exists(self, key):
? ^ +
- return name in self.__values
? ^^^
+ return key in self.__values
? ^ +
- def register(self, name, cls):
? ^^^
+ def register(self, key, cls):
? ^ +
- self.__values[name] = cls
? ^^^
+ self.__values[key] = cls
? ^ +
- def unregister(self, name, cls):
? ^^^
+ def unregister(self, key, cls):
? ^ +
- if self.__values[name] != cls:
? ^^^
+ if self.__values[key] != cls:
? ^ +
- raise ProviderNotRegistered(name)
? ^^^
+ raise ProviderNotRegistered(key)
? ^ +
- del self.__values[name]
? ^^^
+ del self.__values[key]
? ^ +
|
0ba9fa847a8b605363b298ecad40cb2fc5870cbb
|
build_modules.py
|
build_modules.py
|
import os, sys, subprocess, shutil
def check_for_module_builder():
if os.path.exists("voxel_native/scripts/"):
return
print("Downloading P3DModuleBuilder...")
cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"]
try:
output = subprocess.check_output(cmd, stderr=sys.stderr)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Couldn't download P3DModuleBuilder.")
sys.exit(-1)
def build_modules():
print("Building native modules...")
check_for_module_builder()
cmd = [sys.executable, "-B", "-m", "voxel_native.build"]
try:
output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Error building the native modules.")
sys.exit(-1)
shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd")
if __name__ == "__main__":
build_modules()
|
import os, sys, subprocess, shutil
def check_for_module_builder():
if os.path.exists("voxel_native/scripts/"):
return
print("Downloading P3DModuleBuilder...")
cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"]
try:
output = subprocess.check_output(cmd, stderr=sys.stderr)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Couldn't download P3DModuleBuilder.")
sys.exit(-1)
def build_modules():
print("Building native modules...")
check_for_module_builder()
cmd = [sys.executable, "-B", "-m", "voxel_native.build"]
try:
output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Error building the native modules.")
sys.exit(-1)
from voxel_native.scripts.common import is_macos, is_windows, is_linux
if is_windows():
shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd")
elif is_macos() or is_linux():
shutil.move("voxel_native/voxel_native.so", "voxel/voxel_native.so")
if __name__ == "__main__":
build_modules()
|
Update build script to work correctly on macOS and linux.
|
Update build script to work correctly on macOS and linux.
|
Python
|
mit
|
treamology/panda3d-voxels,treamology/panda3d-voxels,treamology/panda3d-voxels
|
import os, sys, subprocess, shutil
def check_for_module_builder():
if os.path.exists("voxel_native/scripts/"):
return
print("Downloading P3DModuleBuilder...")
cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"]
try:
output = subprocess.check_output(cmd, stderr=sys.stderr)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Couldn't download P3DModuleBuilder.")
sys.exit(-1)
def build_modules():
print("Building native modules...")
check_for_module_builder()
cmd = [sys.executable, "-B", "-m", "voxel_native.build"]
try:
output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Error building the native modules.")
sys.exit(-1)
+ from voxel_native.scripts.common import is_macos, is_windows, is_linux
+ if is_windows():
- shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd")
+ shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd")
-
+ elif is_macos() or is_linux():
+ shutil.move("voxel_native/voxel_native.so", "voxel/voxel_native.so")
if __name__ == "__main__":
build_modules()
|
Update build script to work correctly on macOS and linux.
|
## Code Before:
import os, sys, subprocess, shutil
def check_for_module_builder():
if os.path.exists("voxel_native/scripts/"):
return
print("Downloading P3DModuleBuilder...")
cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"]
try:
output = subprocess.check_output(cmd, stderr=sys.stderr)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Couldn't download P3DModuleBuilder.")
sys.exit(-1)
def build_modules():
print("Building native modules...")
check_for_module_builder()
cmd = [sys.executable, "-B", "-m", "voxel_native.build"]
try:
output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Error building the native modules.")
sys.exit(-1)
shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd")
if __name__ == "__main__":
build_modules()
## Instruction:
Update build script to work correctly on macOS and linux.
## Code After:
import os, sys, subprocess, shutil
def check_for_module_builder():
if os.path.exists("voxel_native/scripts/"):
return
print("Downloading P3DModuleBuilder...")
cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"]
try:
output = subprocess.check_output(cmd, stderr=sys.stderr)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Couldn't download P3DModuleBuilder.")
sys.exit(-1)
def build_modules():
print("Building native modules...")
check_for_module_builder()
cmd = [sys.executable, "-B", "-m", "voxel_native.build"]
try:
output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Error building the native modules.")
sys.exit(-1)
from voxel_native.scripts.common import is_macos, is_windows, is_linux
if is_windows():
shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd")
elif is_macos() or is_linux():
shutil.move("voxel_native/voxel_native.so", "voxel/voxel_native.so")
if __name__ == "__main__":
build_modules()
|
import os, sys, subprocess, shutil
def check_for_module_builder():
if os.path.exists("voxel_native/scripts/"):
return
print("Downloading P3DModuleBuilder...")
cmd = [sys.executable, "-B", "voxel_native/download_P3DModuleBuilder.py"]
try:
output = subprocess.check_output(cmd, stderr=sys.stderr)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Couldn't download P3DModuleBuilder.")
sys.exit(-1)
def build_modules():
print("Building native modules...")
check_for_module_builder()
cmd = [sys.executable, "-B", "-m", "voxel_native.build"]
try:
output = subprocess.run(cmd, stderr=sys.stderr, stdout=sys.stdout, check=True)
except subprocess.CalledProcessError as errorMsg:
print(errorMsg)
print("Error building the native modules.")
sys.exit(-1)
+ from voxel_native.scripts.common import is_macos, is_windows, is_linux
+ if is_windows():
- shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd")
+ shutil.move("voxel_native/voxel_native.pyd", "voxel/voxel_native.pyd")
? ++++
-
+ elif is_macos() or is_linux():
+ shutil.move("voxel_native/voxel_native.so", "voxel/voxel_native.so")
if __name__ == "__main__":
build_modules()
|
2afe09bcbcc728e98ec8da39b68ea65f4c270fdb
|
html5lib/trie/_base.py
|
html5lib/trie/_base.py
|
from __future__ import absolute_import, division, unicode_literals
from collections import Mapping
class Trie(Mapping):
"""Abstract base class for tries"""
def keys(self, prefix=None):
keys = super().keys()
if prefix is None:
return set(keys)
# Python 2.6: no set comprehensions
return set([x for x in keys if x.startswith(prefix)])
def has_keys_with_prefix(self, prefix):
for key in self.keys():
if key.startswith(prefix):
return True
return False
def longest_prefix(self, prefix):
if prefix in self:
return prefix
for i in range(1, len(prefix) + 1):
if prefix[:-i] in self:
return prefix[:-i]
raise KeyError(prefix)
def longest_prefix_item(self, prefix):
lprefix = self.longest_prefix(prefix)
return (lprefix, self[lprefix])
|
from __future__ import absolute_import, division, unicode_literals
from collections import Mapping
class Trie(Mapping):
"""Abstract base class for tries"""
def keys(self, prefix=None):
keys = super(Trie, self).keys()
if prefix is None:
return set(keys)
# Python 2.6: no set comprehensions
return set([x for x in keys if x.startswith(prefix)])
def has_keys_with_prefix(self, prefix):
for key in self.keys():
if key.startswith(prefix):
return True
return False
def longest_prefix(self, prefix):
if prefix in self:
return prefix
for i in range(1, len(prefix) + 1):
if prefix[:-i] in self:
return prefix[:-i]
raise KeyError(prefix)
def longest_prefix_item(self, prefix):
lprefix = self.longest_prefix(prefix)
return (lprefix, self[lprefix])
|
Make this in practice unreachable code work on Py2
|
Make this in practice unreachable code work on Py2
|
Python
|
mit
|
html5lib/html5lib-python,html5lib/html5lib-python,html5lib/html5lib-python
|
from __future__ import absolute_import, division, unicode_literals
from collections import Mapping
class Trie(Mapping):
"""Abstract base class for tries"""
def keys(self, prefix=None):
- keys = super().keys()
+ keys = super(Trie, self).keys()
if prefix is None:
return set(keys)
# Python 2.6: no set comprehensions
return set([x for x in keys if x.startswith(prefix)])
def has_keys_with_prefix(self, prefix):
for key in self.keys():
if key.startswith(prefix):
return True
return False
def longest_prefix(self, prefix):
if prefix in self:
return prefix
for i in range(1, len(prefix) + 1):
if prefix[:-i] in self:
return prefix[:-i]
raise KeyError(prefix)
def longest_prefix_item(self, prefix):
lprefix = self.longest_prefix(prefix)
return (lprefix, self[lprefix])
|
Make this in practice unreachable code work on Py2
|
## Code Before:
from __future__ import absolute_import, division, unicode_literals
from collections import Mapping
class Trie(Mapping):
"""Abstract base class for tries"""
def keys(self, prefix=None):
keys = super().keys()
if prefix is None:
return set(keys)
# Python 2.6: no set comprehensions
return set([x for x in keys if x.startswith(prefix)])
def has_keys_with_prefix(self, prefix):
for key in self.keys():
if key.startswith(prefix):
return True
return False
def longest_prefix(self, prefix):
if prefix in self:
return prefix
for i in range(1, len(prefix) + 1):
if prefix[:-i] in self:
return prefix[:-i]
raise KeyError(prefix)
def longest_prefix_item(self, prefix):
lprefix = self.longest_prefix(prefix)
return (lprefix, self[lprefix])
## Instruction:
Make this in practice unreachable code work on Py2
## Code After:
from __future__ import absolute_import, division, unicode_literals
from collections import Mapping
class Trie(Mapping):
"""Abstract base class for tries"""
def keys(self, prefix=None):
keys = super(Trie, self).keys()
if prefix is None:
return set(keys)
# Python 2.6: no set comprehensions
return set([x for x in keys if x.startswith(prefix)])
def has_keys_with_prefix(self, prefix):
for key in self.keys():
if key.startswith(prefix):
return True
return False
def longest_prefix(self, prefix):
if prefix in self:
return prefix
for i in range(1, len(prefix) + 1):
if prefix[:-i] in self:
return prefix[:-i]
raise KeyError(prefix)
def longest_prefix_item(self, prefix):
lprefix = self.longest_prefix(prefix)
return (lprefix, self[lprefix])
|
from __future__ import absolute_import, division, unicode_literals
from collections import Mapping
class Trie(Mapping):
"""Abstract base class for tries"""
def keys(self, prefix=None):
- keys = super().keys()
+ keys = super(Trie, self).keys()
? ++++++++++
if prefix is None:
return set(keys)
# Python 2.6: no set comprehensions
return set([x for x in keys if x.startswith(prefix)])
def has_keys_with_prefix(self, prefix):
for key in self.keys():
if key.startswith(prefix):
return True
return False
def longest_prefix(self, prefix):
if prefix in self:
return prefix
for i in range(1, len(prefix) + 1):
if prefix[:-i] in self:
return prefix[:-i]
raise KeyError(prefix)
def longest_prefix_item(self, prefix):
lprefix = self.longest_prefix(prefix)
return (lprefix, self[lprefix])
|
622b81296b292035b970891cd259eaac113d20c1
|
apps/accounts/conf.py
|
apps/accounts/conf.py
|
from django.conf import settings # noqa
from appconf import AppConf
class AccountConf(AppConf):
"""
Custom settings for the account module. Mainly settings required for
the login on the remote system.
"""
PID = 3
LOGIN_TYPE = 'login'
LOGIN_SUCCESS_URL = 'home'
ENFORCE_LOGIN_TIMEOUT = 300 # 5 Minutes
ENFORCE_LOGIN_COOKIE_NAME = 'login_expiry'
ENFORCE_LOGIN_SALT = settings.AUTH_API_USER
ENFORCE_LOGIN_NAME = 'force_login_check'
# The name of the UNCCD role as provided by the remote system.
UNCCD_ROLE_NAME = 'UNCCD national focal point'
|
from django.conf import settings # noqa
from appconf import AppConf
class AccountConf(AppConf):
"""
Custom settings for the account module. Mainly settings required for
the login on the remote system.
"""
PID = 3
LOGIN_TYPE = 'login'
LOGIN_SUCCESS_URL = 'home'
ENFORCE_LOGIN_TIMEOUT = 300 # 5 Minutes
ENFORCE_LOGIN_COOKIE_NAME = 'login_expiry'
ENFORCE_LOGIN_SALT = settings.AUTH_API_USER
ENFORCE_LOGIN_NAME = 'force_login_check'
# The name of the UNCCD role as provided by the remote system.
UNCCD_ROLE_NAME = 'UNCCD Focal Point'
|
Change internal name of UNCCD role back to previous correct value
|
Change internal name of UNCCD role back to previous correct value
|
Python
|
apache-2.0
|
CDE-UNIBE/qcat,CDE-UNIBE/qcat,CDE-UNIBE/qcat,CDE-UNIBE/qcat
|
from django.conf import settings # noqa
from appconf import AppConf
class AccountConf(AppConf):
"""
Custom settings for the account module. Mainly settings required for
the login on the remote system.
"""
PID = 3
LOGIN_TYPE = 'login'
LOGIN_SUCCESS_URL = 'home'
ENFORCE_LOGIN_TIMEOUT = 300 # 5 Minutes
ENFORCE_LOGIN_COOKIE_NAME = 'login_expiry'
ENFORCE_LOGIN_SALT = settings.AUTH_API_USER
ENFORCE_LOGIN_NAME = 'force_login_check'
# The name of the UNCCD role as provided by the remote system.
- UNCCD_ROLE_NAME = 'UNCCD national focal point'
+ UNCCD_ROLE_NAME = 'UNCCD Focal Point'
|
Change internal name of UNCCD role back to previous correct value
|
## Code Before:
from django.conf import settings # noqa
from appconf import AppConf
class AccountConf(AppConf):
"""
Custom settings for the account module. Mainly settings required for
the login on the remote system.
"""
PID = 3
LOGIN_TYPE = 'login'
LOGIN_SUCCESS_URL = 'home'
ENFORCE_LOGIN_TIMEOUT = 300 # 5 Minutes
ENFORCE_LOGIN_COOKIE_NAME = 'login_expiry'
ENFORCE_LOGIN_SALT = settings.AUTH_API_USER
ENFORCE_LOGIN_NAME = 'force_login_check'
# The name of the UNCCD role as provided by the remote system.
UNCCD_ROLE_NAME = 'UNCCD national focal point'
## Instruction:
Change internal name of UNCCD role back to previous correct value
## Code After:
from django.conf import settings # noqa
from appconf import AppConf
class AccountConf(AppConf):
"""
Custom settings for the account module. Mainly settings required for
the login on the remote system.
"""
PID = 3
LOGIN_TYPE = 'login'
LOGIN_SUCCESS_URL = 'home'
ENFORCE_LOGIN_TIMEOUT = 300 # 5 Minutes
ENFORCE_LOGIN_COOKIE_NAME = 'login_expiry'
ENFORCE_LOGIN_SALT = settings.AUTH_API_USER
ENFORCE_LOGIN_NAME = 'force_login_check'
# The name of the UNCCD role as provided by the remote system.
UNCCD_ROLE_NAME = 'UNCCD Focal Point'
|
from django.conf import settings # noqa
from appconf import AppConf
class AccountConf(AppConf):
"""
Custom settings for the account module. Mainly settings required for
the login on the remote system.
"""
PID = 3
LOGIN_TYPE = 'login'
LOGIN_SUCCESS_URL = 'home'
ENFORCE_LOGIN_TIMEOUT = 300 # 5 Minutes
ENFORCE_LOGIN_COOKIE_NAME = 'login_expiry'
ENFORCE_LOGIN_SALT = settings.AUTH_API_USER
ENFORCE_LOGIN_NAME = 'force_login_check'
# The name of the UNCCD role as provided by the remote system.
- UNCCD_ROLE_NAME = 'UNCCD national focal point'
? ^^^^^^^^^^ ^
+ UNCCD_ROLE_NAME = 'UNCCD Focal Point'
? ^ ^
|
d7ea84800b89255137300b8e8d83b4b6abfc30b2
|
src/oscar/apps/voucher/receivers.py
|
src/oscar/apps/voucher/receivers.py
|
from oscar.apps.basket import signals
def track_voucher_addition(basket, voucher, **kwargs):
voucher.num_basket_additions += 1
voucher.save()
def track_voucher_removal(basket, voucher, **kwargs):
voucher.num_basket_additions -= 1
voucher.save()
signals.voucher_addition.connect(track_voucher_addition)
signals.voucher_removal.connect(track_voucher_removal)
|
from django.db.models import F
from oscar.apps.basket import signals
def track_voucher_addition(basket, voucher, **kwargs):
voucher.num_basket_additions += 1
voucher.__class__._default_manager.filter(pk=voucher.pk).update(
num_basket_additions=F('num_basket_additions') + 1,
)
def track_voucher_removal(basket, voucher, **kwargs):
voucher.num_basket_additions -= 1
voucher.__class__._default_manager.filter(pk=voucher.pk).update(
num_basket_additions=F('num_basket_additions') - 1,
)
signals.voucher_addition.connect(track_voucher_addition)
signals.voucher_removal.connect(track_voucher_removal)
|
Fix race condition when tracking num_basket_additions on a voucher
|
Fix race condition when tracking num_basket_additions on a voucher
|
Python
|
bsd-3-clause
|
jmt4/django-oscar,mexeniz/django-oscar,michaelkuty/django-oscar,pasqualguerrero/django-oscar,binarydud/django-oscar,saadatqadri/django-oscar,jmt4/django-oscar,michaelkuty/django-oscar,nickpack/django-oscar,pasqualguerrero/django-oscar,okfish/django-oscar,sasha0/django-oscar,okfish/django-oscar,sasha0/django-oscar,bnprk/django-oscar,anentropic/django-oscar,solarissmoke/django-oscar,amirrpp/django-oscar,solarissmoke/django-oscar,dongguangming/django-oscar,amirrpp/django-oscar,django-oscar/django-oscar,anentropic/django-oscar,saadatqadri/django-oscar,lijoantony/django-oscar,pasqualguerrero/django-oscar,sasha0/django-oscar,jmt4/django-oscar,anentropic/django-oscar,jlmadurga/django-oscar,lijoantony/django-oscar,lijoantony/django-oscar,Bogh/django-oscar,vovanbo/django-oscar,spartonia/django-oscar,eddiep1101/django-oscar,monikasulik/django-oscar,taedori81/django-oscar,amirrpp/django-oscar,sasha0/django-oscar,faratro/django-oscar,eddiep1101/django-oscar,django-oscar/django-oscar,vovanbo/django-oscar,michaelkuty/django-oscar,sonofatailor/django-oscar,MatthewWilkes/django-oscar,taedori81/django-oscar,lijoantony/django-oscar,pdonadeo/django-oscar,pdonadeo/django-oscar,binarydud/django-oscar,spartonia/django-oscar,michaelkuty/django-oscar,bnprk/django-oscar,binarydud/django-oscar,MatthewWilkes/django-oscar,WillisXChen/django-oscar,MatthewWilkes/django-oscar,monikasulik/django-oscar,WadeYuChen/django-oscar,WadeYuChen/django-oscar,thechampanurag/django-oscar,WadeYuChen/django-oscar,itbabu/django-oscar,QLGu/django-oscar,sonofatailor/django-oscar,anentropic/django-oscar,thechampanurag/django-oscar,QLGu/django-oscar,nfletton/django-oscar,jmt4/django-oscar,jlmadurga/django-oscar,MatthewWilkes/django-oscar,john-parton/django-oscar,john-parton/django-oscar,kapari/django-oscar,kapari/django-oscar,itbabu/django-oscar,Jannes123/django-oscar,okfish/django-oscar,ka7eh/django-oscar,rocopartners/django-oscar,sonofatailor/django-oscar,django-oscar/django-oscar,Jannes123/django-oscar,saadatqadri/django-oscar,bnprk/django-oscar,faratro/django-oscar,mexeniz/django-oscar,jlmadurga/django-oscar,mexeniz/django-oscar,Bogh/django-oscar,binarydud/django-oscar,itbabu/django-oscar,taedori81/django-oscar,nfletton/django-oscar,bschuon/django-oscar,pdonadeo/django-oscar,Jannes123/django-oscar,dongguangming/django-oscar,jlmadurga/django-oscar,itbabu/django-oscar,pasqualguerrero/django-oscar,Jannes123/django-oscar,Bogh/django-oscar,bschuon/django-oscar,rocopartners/django-oscar,spartonia/django-oscar,faratro/django-oscar,bschuon/django-oscar,rocopartners/django-oscar,dongguangming/django-oscar,monikasulik/django-oscar,nickpack/django-oscar,nickpack/django-oscar,bschuon/django-oscar,bnprk/django-oscar,saadatqadri/django-oscar,nfletton/django-oscar,nickpack/django-oscar,nfletton/django-oscar,QLGu/django-oscar,eddiep1101/django-oscar,WillisXChen/django-oscar,faratro/django-oscar,thechampanurag/django-oscar,dongguangming/django-oscar,Bogh/django-oscar,QLGu/django-oscar,monikasulik/django-oscar,ka7eh/django-oscar,john-parton/django-oscar,WadeYuChen/django-oscar,ka7eh/django-oscar,taedori81/django-oscar,eddiep1101/django-oscar,WillisXChen/django-oscar,WillisXChen/django-oscar,sonofatailor/django-oscar,django-oscar/django-oscar,thechampanurag/django-oscar,pdonadeo/django-oscar,john-parton/django-oscar,kapari/django-oscar,solarissmoke/django-oscar,kapari/django-oscar,rocopartners/django-oscar,amirrpp/django-oscar,mexeniz/django-oscar,vovanbo/django-oscar,vovanbo/django-oscar,WillisXChen/django-oscar,ka7eh/django-oscar,solarissmoke/django-oscar,WillisXChen/django-oscar,spartonia/django-oscar,okfish/django-oscar
|
+ from django.db.models import F
from oscar.apps.basket import signals
def track_voucher_addition(basket, voucher, **kwargs):
voucher.num_basket_additions += 1
- voucher.save()
+ voucher.__class__._default_manager.filter(pk=voucher.pk).update(
+ num_basket_additions=F('num_basket_additions') + 1,
+ )
def track_voucher_removal(basket, voucher, **kwargs):
voucher.num_basket_additions -= 1
- voucher.save()
+ voucher.__class__._default_manager.filter(pk=voucher.pk).update(
+ num_basket_additions=F('num_basket_additions') - 1,
+ )
signals.voucher_addition.connect(track_voucher_addition)
signals.voucher_removal.connect(track_voucher_removal)
|
Fix race condition when tracking num_basket_additions on a voucher
|
## Code Before:
from oscar.apps.basket import signals
def track_voucher_addition(basket, voucher, **kwargs):
voucher.num_basket_additions += 1
voucher.save()
def track_voucher_removal(basket, voucher, **kwargs):
voucher.num_basket_additions -= 1
voucher.save()
signals.voucher_addition.connect(track_voucher_addition)
signals.voucher_removal.connect(track_voucher_removal)
## Instruction:
Fix race condition when tracking num_basket_additions on a voucher
## Code After:
from django.db.models import F
from oscar.apps.basket import signals
def track_voucher_addition(basket, voucher, **kwargs):
voucher.num_basket_additions += 1
voucher.__class__._default_manager.filter(pk=voucher.pk).update(
num_basket_additions=F('num_basket_additions') + 1,
)
def track_voucher_removal(basket, voucher, **kwargs):
voucher.num_basket_additions -= 1
voucher.__class__._default_manager.filter(pk=voucher.pk).update(
num_basket_additions=F('num_basket_additions') - 1,
)
signals.voucher_addition.connect(track_voucher_addition)
signals.voucher_removal.connect(track_voucher_removal)
|
+ from django.db.models import F
from oscar.apps.basket import signals
def track_voucher_addition(basket, voucher, **kwargs):
voucher.num_basket_additions += 1
- voucher.save()
+ voucher.__class__._default_manager.filter(pk=voucher.pk).update(
+ num_basket_additions=F('num_basket_additions') + 1,
+ )
def track_voucher_removal(basket, voucher, **kwargs):
voucher.num_basket_additions -= 1
- voucher.save()
+ voucher.__class__._default_manager.filter(pk=voucher.pk).update(
+ num_basket_additions=F('num_basket_additions') - 1,
+ )
signals.voucher_addition.connect(track_voucher_addition)
signals.voucher_removal.connect(track_voucher_removal)
|
cec10f55b280311161033ad3c9457b20822f7353
|
geotrek/outdoor/migrations/0003_auto_20201214_1408.py
|
geotrek/outdoor/migrations/0003_auto_20201214_1408.py
|
from django.conf import settings
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('outdoor', '0002_practice_sitepractice'),
]
operations = [
migrations.AlterModelOptions(
name='site',
options={'ordering': ('name',), 'verbose_name': 'Outdoor site', 'verbose_name_plural': 'Outdoor sites'},
),
migrations.AlterField(
model_name='site',
name='geom',
field=django.contrib.gis.db.models.fields.GeometryCollectionField(srid=settings.SRID, verbose_name='Location'),
),
migrations.AlterField(
model_name='sitepractice',
name='site',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='site_practices', to='outdoor.site', verbose_name='Outdoor site'),
),
]
|
from django.conf import settings
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('outdoor', '0002_practice_sitepractice'),
]
operations = [
migrations.AlterModelOptions(
name='site',
options={'ordering': ('name',), 'verbose_name': 'Outdoor site', 'verbose_name_plural': 'Outdoor sites'},
),
migrations.SeparateDatabaseAndState(
database_operations=[
migrations.RunSQL('ALTER TABLE "outdoor_site" ALTER COLUMN "geom" TYPE geometry(GeometryCollection,2154) USING ST_ForceCollection(geom);')
],
state_operations=[
migrations.AlterField(
model_name='site',
name='geom',
field=django.contrib.gis.db.models.fields.GeometryCollectionField(srid=settings.SRID, verbose_name='Location'),
),
]
),
migrations.AlterField(
model_name='sitepractice',
name='site',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='site_practices', to='outdoor.site', verbose_name='Outdoor site'),
),
]
|
Fix migration Site geom to GeometryCollection
|
Fix migration Site geom to GeometryCollection
|
Python
|
bsd-2-clause
|
GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek
|
from django.conf import settings
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('outdoor', '0002_practice_sitepractice'),
]
operations = [
migrations.AlterModelOptions(
name='site',
options={'ordering': ('name',), 'verbose_name': 'Outdoor site', 'verbose_name_plural': 'Outdoor sites'},
),
+ migrations.SeparateDatabaseAndState(
+ database_operations=[
+ migrations.RunSQL('ALTER TABLE "outdoor_site" ALTER COLUMN "geom" TYPE geometry(GeometryCollection,2154) USING ST_ForceCollection(geom);')
+ ],
+ state_operations=[
- migrations.AlterField(
+ migrations.AlterField(
- model_name='site',
+ model_name='site',
- name='geom',
+ name='geom',
- field=django.contrib.gis.db.models.fields.GeometryCollectionField(srid=settings.SRID, verbose_name='Location'),
+ field=django.contrib.gis.db.models.fields.GeometryCollectionField(srid=settings.SRID, verbose_name='Location'),
+ ),
+ ]
),
migrations.AlterField(
model_name='sitepractice',
name='site',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='site_practices', to='outdoor.site', verbose_name='Outdoor site'),
),
]
|
Fix migration Site geom to GeometryCollection
|
## Code Before:
from django.conf import settings
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('outdoor', '0002_practice_sitepractice'),
]
operations = [
migrations.AlterModelOptions(
name='site',
options={'ordering': ('name',), 'verbose_name': 'Outdoor site', 'verbose_name_plural': 'Outdoor sites'},
),
migrations.AlterField(
model_name='site',
name='geom',
field=django.contrib.gis.db.models.fields.GeometryCollectionField(srid=settings.SRID, verbose_name='Location'),
),
migrations.AlterField(
model_name='sitepractice',
name='site',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='site_practices', to='outdoor.site', verbose_name='Outdoor site'),
),
]
## Instruction:
Fix migration Site geom to GeometryCollection
## Code After:
from django.conf import settings
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('outdoor', '0002_practice_sitepractice'),
]
operations = [
migrations.AlterModelOptions(
name='site',
options={'ordering': ('name',), 'verbose_name': 'Outdoor site', 'verbose_name_plural': 'Outdoor sites'},
),
migrations.SeparateDatabaseAndState(
database_operations=[
migrations.RunSQL('ALTER TABLE "outdoor_site" ALTER COLUMN "geom" TYPE geometry(GeometryCollection,2154) USING ST_ForceCollection(geom);')
],
state_operations=[
migrations.AlterField(
model_name='site',
name='geom',
field=django.contrib.gis.db.models.fields.GeometryCollectionField(srid=settings.SRID, verbose_name='Location'),
),
]
),
migrations.AlterField(
model_name='sitepractice',
name='site',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='site_practices', to='outdoor.site', verbose_name='Outdoor site'),
),
]
|
from django.conf import settings
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('outdoor', '0002_practice_sitepractice'),
]
operations = [
migrations.AlterModelOptions(
name='site',
options={'ordering': ('name',), 'verbose_name': 'Outdoor site', 'verbose_name_plural': 'Outdoor sites'},
),
+ migrations.SeparateDatabaseAndState(
+ database_operations=[
+ migrations.RunSQL('ALTER TABLE "outdoor_site" ALTER COLUMN "geom" TYPE geometry(GeometryCollection,2154) USING ST_ForceCollection(geom);')
+ ],
+ state_operations=[
- migrations.AlterField(
+ migrations.AlterField(
? ++++++++
- model_name='site',
+ model_name='site',
? ++++++++
- name='geom',
+ name='geom',
? ++++++++
- field=django.contrib.gis.db.models.fields.GeometryCollectionField(srid=settings.SRID, verbose_name='Location'),
+ field=django.contrib.gis.db.models.fields.GeometryCollectionField(srid=settings.SRID, verbose_name='Location'),
? ++++++++
+ ),
+ ]
),
migrations.AlterField(
model_name='sitepractice',
name='site',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='site_practices', to='outdoor.site', verbose_name='Outdoor site'),
),
]
|
f4f5852944d1fd1b9e96a70cb4496ee6e1e66dc0
|
genome_designer/main/celery_util.py
|
genome_designer/main/celery_util.py
|
from errno import errorcode
from celery.task.control import inspect
CELERY_ERROR_KEY = 'ERROR'
def get_celery_worker_status():
"""Checks whether celery is running and reports the error if not.
Source: http://stackoverflow.com/questions/8506914/detect-whether-celery-is-available-running
"""
try:
insp = inspect()
d = insp.stats()
if not d:
d = { CELERY_ERROR_KEY: 'No running Celery workers were found.' }
except IOError as e:
msg = "Error connecting to the backend: " + str(e)
if len(e.args) > 0 and errorcode.get(e.args[0]) == 'ECONNREFUSED':
msg += ' Check that the RabbitMQ server is running.'
d = { CELERY_ERROR_KEY: msg }
except ImportError as e:
d = { CELERY_ERROR_KEY: str(e)}
return d
|
from errno import errorcode
from celery.task.control import inspect
from django.conf import settings
CELERY_ERROR_KEY = 'ERROR'
def get_celery_worker_status():
"""Checks whether celery is running and reports the error if not.
Source: http://stackoverflow.com/questions/8506914/detect-whether-celery-is-available-running
"""
if settings.BROKER_BACKEND == 'memory':
# We are testing with in-memory celery. Celery is effectively running.
return {}
try:
insp = inspect()
d = insp.stats()
if not d:
d = { CELERY_ERROR_KEY: 'No running Celery workers were found.' }
except IOError as e:
msg = "Error connecting to the backend: " + str(e)
if len(e.args) > 0 and errorcode.get(e.args[0]) == 'ECONNREFUSED':
msg += ' Check that the RabbitMQ server is running.'
d = { CELERY_ERROR_KEY: msg }
except ImportError as e:
d = { CELERY_ERROR_KEY: str(e)}
return d
|
Fix tests: Allow for celery not to be running when doing in-memory celery for tests.
|
Fix tests: Allow for celery not to be running when doing in-memory celery for tests.
|
Python
|
mit
|
churchlab/millstone,churchlab/millstone,churchlab/millstone,churchlab/millstone,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone,woodymit/millstone,woodymit/millstone_accidental_source,woodymit/millstone,woodymit/millstone
|
from errno import errorcode
from celery.task.control import inspect
+ from django.conf import settings
CELERY_ERROR_KEY = 'ERROR'
def get_celery_worker_status():
"""Checks whether celery is running and reports the error if not.
Source: http://stackoverflow.com/questions/8506914/detect-whether-celery-is-available-running
"""
+ if settings.BROKER_BACKEND == 'memory':
+ # We are testing with in-memory celery. Celery is effectively running.
+ return {}
+
try:
insp = inspect()
d = insp.stats()
if not d:
d = { CELERY_ERROR_KEY: 'No running Celery workers were found.' }
except IOError as e:
msg = "Error connecting to the backend: " + str(e)
if len(e.args) > 0 and errorcode.get(e.args[0]) == 'ECONNREFUSED':
msg += ' Check that the RabbitMQ server is running.'
d = { CELERY_ERROR_KEY: msg }
except ImportError as e:
d = { CELERY_ERROR_KEY: str(e)}
return d
|
Fix tests: Allow for celery not to be running when doing in-memory celery for tests.
|
## Code Before:
from errno import errorcode
from celery.task.control import inspect
CELERY_ERROR_KEY = 'ERROR'
def get_celery_worker_status():
"""Checks whether celery is running and reports the error if not.
Source: http://stackoverflow.com/questions/8506914/detect-whether-celery-is-available-running
"""
try:
insp = inspect()
d = insp.stats()
if not d:
d = { CELERY_ERROR_KEY: 'No running Celery workers were found.' }
except IOError as e:
msg = "Error connecting to the backend: " + str(e)
if len(e.args) > 0 and errorcode.get(e.args[0]) == 'ECONNREFUSED':
msg += ' Check that the RabbitMQ server is running.'
d = { CELERY_ERROR_KEY: msg }
except ImportError as e:
d = { CELERY_ERROR_KEY: str(e)}
return d
## Instruction:
Fix tests: Allow for celery not to be running when doing in-memory celery for tests.
## Code After:
from errno import errorcode
from celery.task.control import inspect
from django.conf import settings
CELERY_ERROR_KEY = 'ERROR'
def get_celery_worker_status():
"""Checks whether celery is running and reports the error if not.
Source: http://stackoverflow.com/questions/8506914/detect-whether-celery-is-available-running
"""
if settings.BROKER_BACKEND == 'memory':
# We are testing with in-memory celery. Celery is effectively running.
return {}
try:
insp = inspect()
d = insp.stats()
if not d:
d = { CELERY_ERROR_KEY: 'No running Celery workers were found.' }
except IOError as e:
msg = "Error connecting to the backend: " + str(e)
if len(e.args) > 0 and errorcode.get(e.args[0]) == 'ECONNREFUSED':
msg += ' Check that the RabbitMQ server is running.'
d = { CELERY_ERROR_KEY: msg }
except ImportError as e:
d = { CELERY_ERROR_KEY: str(e)}
return d
|
from errno import errorcode
from celery.task.control import inspect
+ from django.conf import settings
CELERY_ERROR_KEY = 'ERROR'
def get_celery_worker_status():
"""Checks whether celery is running and reports the error if not.
Source: http://stackoverflow.com/questions/8506914/detect-whether-celery-is-available-running
"""
+ if settings.BROKER_BACKEND == 'memory':
+ # We are testing with in-memory celery. Celery is effectively running.
+ return {}
+
try:
insp = inspect()
d = insp.stats()
if not d:
d = { CELERY_ERROR_KEY: 'No running Celery workers were found.' }
except IOError as e:
msg = "Error connecting to the backend: " + str(e)
if len(e.args) > 0 and errorcode.get(e.args[0]) == 'ECONNREFUSED':
msg += ' Check that the RabbitMQ server is running.'
d = { CELERY_ERROR_KEY: msg }
except ImportError as e:
d = { CELERY_ERROR_KEY: str(e)}
return d
|
8774517714c8c8a7f7a2be9316a23497adfa9f59
|
pi_gpio/urls.py
|
pi_gpio/urls.py
|
from pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
import RPi.GPIO as GPIO
def event_callback(pin):
socketio.emit('pin:event', {"message":"woohoo!"})
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
GPIO.add_event_detect(23, GPIO.RISING, callback=event_callback)
return render_template('index.html')
|
from pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
from events import PinEventManager
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
PinEventManager()
return render_template('index.html')
|
Call event manager in index route
|
Call event manager in index route
|
Python
|
mit
|
projectweekend/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server
|
from pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
+ from events import PinEventManager
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
- import RPi.GPIO as GPIO
-
-
- def event_callback(pin):
- socketio.emit('pin:event', {"message":"woohoo!"})
-
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
- GPIO.add_event_detect(23, GPIO.RISING, callback=event_callback)
+ PinEventManager()
return render_template('index.html')
|
Call event manager in index route
|
## Code Before:
from pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
import RPi.GPIO as GPIO
def event_callback(pin):
socketio.emit('pin:event', {"message":"woohoo!"})
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
GPIO.add_event_detect(23, GPIO.RISING, callback=event_callback)
return render_template('index.html')
## Instruction:
Call event manager in index route
## Code After:
from pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
from events import PinEventManager
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
PinEventManager()
return render_template('index.html')
|
from pi_gpio import app, socketio
from flask.ext import restful
from flask import render_template
from handlers import PinList, PinDetail
+ from events import PinEventManager
api = restful.Api(app)
api.add_resource(PinList, '/api/v1/pin')
api.add_resource(PinDetail, '/api/v1/pin/<string:pin_num>')
- import RPi.GPIO as GPIO
-
-
- def event_callback(pin):
- socketio.emit('pin:event', {"message":"woohoo!"})
-
@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def index(path):
- GPIO.add_event_detect(23, GPIO.RISING, callback=event_callback)
+ PinEventManager()
return render_template('index.html')
|
9f64d5e2f9447233df8d3b841c519196c3213e05
|
pyflation/analysis/tests/test_deltaprel.py
|
pyflation/analysis/tests/test_deltaprel.py
|
''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_calc(self):
"""Test results of calculation."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
|
''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 2, 0.5)
assert_(arr == 3)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
|
Add test for scalar values.
|
Add test for scalar values.
|
Python
|
bsd-3-clause
|
ihuston/pyflation,ihuston/pyflation
|
''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
- def test_calc(self):
+ def test_scalar(self):
- """Test results of calculation."""
+ """Test results of 1x1x1 calculation."""
- arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
+ arr = deltaprel.soundspeeds(3, 2, 0.5)
+ assert_(arr == 3)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
|
Add test for scalar values.
|
## Code Before:
''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_calc(self):
"""Test results of calculation."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
## Instruction:
Add test for scalar values.
## Code After:
''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 2, 0.5)
assert_(arr == 3)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
|
''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
- def test_calc(self):
? ^
+ def test_scalar(self):
? + ^^
- """Test results of calculation."""
+ """Test results of 1x1x1 calculation."""
? ++++++
- arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
+ arr = deltaprel.soundspeeds(3, 2, 0.5)
+ assert_(arr == 3)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
|
65d8715705e07dc7f091e2da47a7ada923c6cfbb
|
release.py
|
release.py
|
import os
import subprocess
import pkg_resources
pkg_resources.require('jaraco.packaging>=2.0')
pkg_resources.require('wheel')
def before_upload():
BootstrapBookmark.add()
def after_push():
os.remove('CHANGES (links).txt')
BootstrapBookmark.push()
files_with_versions = (
'ez_setup.py', 'setuptools/version.py',
)
# bdist_wheel must be included or pip will break
dist_commands = 'sdist', 'bdist_wheel'
test_info = "Travis-CI tests: http://travis-ci.org/#!/jaraco/setuptools"
os.environ["SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES"] = "1"
class BootstrapBookmark:
name = 'bootstrap'
@classmethod
def add(cls):
cmd = ['hg', 'bookmark', '-i', cls.name, '-f']
subprocess.Popen(cmd)
@classmethod
def push(cls):
"""
Push the bootstrap bookmark
"""
push_command = ['hg', 'push', '-B', cls.name]
# don't use check_call here because mercurial will return a non-zero
# code even if it succeeds at pushing the bookmark (because there are
# no changesets to be pushed). !dm mercurial
subprocess.call(push_command)
|
import os
import subprocess
import pkg_resources
pkg_resources.require('jaraco.packaging>=2.0')
pkg_resources.require('wheel')
def before_upload():
BootstrapBookmark.add()
def after_push():
BootstrapBookmark.push()
files_with_versions = (
'ez_setup.py', 'setuptools/version.py',
)
# bdist_wheel must be included or pip will break
dist_commands = 'sdist', 'bdist_wheel'
test_info = "Travis-CI tests: http://travis-ci.org/#!/jaraco/setuptools"
os.environ["SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES"] = "1"
class BootstrapBookmark:
name = 'bootstrap'
@classmethod
def add(cls):
cmd = ['hg', 'bookmark', '-i', cls.name, '-f']
subprocess.Popen(cmd)
@classmethod
def push(cls):
"""
Push the bootstrap bookmark
"""
push_command = ['hg', 'push', '-B', cls.name]
# don't use check_call here because mercurial will return a non-zero
# code even if it succeeds at pushing the bookmark (because there are
# no changesets to be pushed). !dm mercurial
subprocess.call(push_command)
|
Remove lingering reference to linked changelog.
|
Remove lingering reference to linked changelog.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
import os
import subprocess
import pkg_resources
pkg_resources.require('jaraco.packaging>=2.0')
pkg_resources.require('wheel')
def before_upload():
BootstrapBookmark.add()
def after_push():
- os.remove('CHANGES (links).txt')
BootstrapBookmark.push()
files_with_versions = (
'ez_setup.py', 'setuptools/version.py',
)
# bdist_wheel must be included or pip will break
dist_commands = 'sdist', 'bdist_wheel'
test_info = "Travis-CI tests: http://travis-ci.org/#!/jaraco/setuptools"
os.environ["SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES"] = "1"
class BootstrapBookmark:
name = 'bootstrap'
@classmethod
def add(cls):
cmd = ['hg', 'bookmark', '-i', cls.name, '-f']
subprocess.Popen(cmd)
@classmethod
def push(cls):
"""
Push the bootstrap bookmark
"""
push_command = ['hg', 'push', '-B', cls.name]
# don't use check_call here because mercurial will return a non-zero
# code even if it succeeds at pushing the bookmark (because there are
# no changesets to be pushed). !dm mercurial
subprocess.call(push_command)
|
Remove lingering reference to linked changelog.
|
## Code Before:
import os
import subprocess
import pkg_resources
pkg_resources.require('jaraco.packaging>=2.0')
pkg_resources.require('wheel')
def before_upload():
BootstrapBookmark.add()
def after_push():
os.remove('CHANGES (links).txt')
BootstrapBookmark.push()
files_with_versions = (
'ez_setup.py', 'setuptools/version.py',
)
# bdist_wheel must be included or pip will break
dist_commands = 'sdist', 'bdist_wheel'
test_info = "Travis-CI tests: http://travis-ci.org/#!/jaraco/setuptools"
os.environ["SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES"] = "1"
class BootstrapBookmark:
name = 'bootstrap'
@classmethod
def add(cls):
cmd = ['hg', 'bookmark', '-i', cls.name, '-f']
subprocess.Popen(cmd)
@classmethod
def push(cls):
"""
Push the bootstrap bookmark
"""
push_command = ['hg', 'push', '-B', cls.name]
# don't use check_call here because mercurial will return a non-zero
# code even if it succeeds at pushing the bookmark (because there are
# no changesets to be pushed). !dm mercurial
subprocess.call(push_command)
## Instruction:
Remove lingering reference to linked changelog.
## Code After:
import os
import subprocess
import pkg_resources
pkg_resources.require('jaraco.packaging>=2.0')
pkg_resources.require('wheel')
def before_upload():
BootstrapBookmark.add()
def after_push():
BootstrapBookmark.push()
files_with_versions = (
'ez_setup.py', 'setuptools/version.py',
)
# bdist_wheel must be included or pip will break
dist_commands = 'sdist', 'bdist_wheel'
test_info = "Travis-CI tests: http://travis-ci.org/#!/jaraco/setuptools"
os.environ["SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES"] = "1"
class BootstrapBookmark:
name = 'bootstrap'
@classmethod
def add(cls):
cmd = ['hg', 'bookmark', '-i', cls.name, '-f']
subprocess.Popen(cmd)
@classmethod
def push(cls):
"""
Push the bootstrap bookmark
"""
push_command = ['hg', 'push', '-B', cls.name]
# don't use check_call here because mercurial will return a non-zero
# code even if it succeeds at pushing the bookmark (because there are
# no changesets to be pushed). !dm mercurial
subprocess.call(push_command)
|
import os
import subprocess
import pkg_resources
pkg_resources.require('jaraco.packaging>=2.0')
pkg_resources.require('wheel')
def before_upload():
BootstrapBookmark.add()
def after_push():
- os.remove('CHANGES (links).txt')
BootstrapBookmark.push()
files_with_versions = (
'ez_setup.py', 'setuptools/version.py',
)
# bdist_wheel must be included or pip will break
dist_commands = 'sdist', 'bdist_wheel'
test_info = "Travis-CI tests: http://travis-ci.org/#!/jaraco/setuptools"
os.environ["SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES"] = "1"
class BootstrapBookmark:
name = 'bootstrap'
@classmethod
def add(cls):
cmd = ['hg', 'bookmark', '-i', cls.name, '-f']
subprocess.Popen(cmd)
@classmethod
def push(cls):
"""
Push the bootstrap bookmark
"""
push_command = ['hg', 'push', '-B', cls.name]
# don't use check_call here because mercurial will return a non-zero
# code even if it succeeds at pushing the bookmark (because there are
# no changesets to be pushed). !dm mercurial
subprocess.call(push_command)
|
1265221d0300ff214cef12dc244f745c7f2ec316
|
tests/core/ast_transforms/test_basic_sanity.py
|
tests/core/ast_transforms/test_basic_sanity.py
|
from fastats.core.decorator import fs
from tests import cube
def child(x):
return x * x
@fs
def parent(a):
b = 2 * a
result = child(b)
return result
def quad(x):
return cube(x) * x
def test_child_transform_square_to_cube_execution():
original = parent(2)
assert original == 16
result = parent(2, child=cube)
assert result == 64
final = parent(2)
assert final == 16
def test_child_transform_square_to_quadruple():
original = parent(2)
assert original == 16
result = parent(2, child=quad)
assert result == 256
final_two = parent(2)
assert final_two == 16
final = parent(3)
assert final == 36
if __name__ == '__main__':
import pytest
pytest.main()
|
from fastats.core.decorator import fs
from tests import cube
def child(x):
return x * x
@fs
def parent(a):
b = 2 * a
result = child(b)
return result
def quad(x):
return cube(x) * x
def zero(x):
return 0
def child_faker(x):
return 42
child_faker.__name__ = 'child'
def test_child_transform_square_to_cube_execution():
original = parent(2)
assert original == 16
result = parent(2, child=cube)
assert result == 64
final = parent(2)
assert final == 16
def test_child_transform_square_to_quadruple():
original = parent(2)
assert original == 16
result = parent(2, child=quad)
assert result == 256
final_two = parent(2)
assert final_two == 16
final = parent(3)
assert final == 36
def test_child_transform_square_to_zero():
original = parent(2)
assert original == 16
result = parent(2, child=zero)
assert result == 0
final_two = parent(2)
assert final_two == 16
final = parent(3)
assert final == 36
def test_child_transform_with_faked_child():
# maliciously faking a function's name should not affect the result
# this can also happen when using decorators
assert child_faker.__name__ == child.__name__
original = parent(1)
assert original == 4
result = parent(1, child=child_faker)
assert result == 42
final = parent(1)
assert final == 4
if __name__ == '__main__':
import pytest
pytest.main()
|
Add a failing, coverage-increasing test
|
Add a failing, coverage-increasing test
|
Python
|
mit
|
dwillmer/fastats,fastats/fastats
|
from fastats.core.decorator import fs
from tests import cube
def child(x):
return x * x
@fs
def parent(a):
b = 2 * a
result = child(b)
return result
def quad(x):
return cube(x) * x
+
+
+ def zero(x):
+ return 0
+
+
+ def child_faker(x):
+ return 42
+
+
+ child_faker.__name__ = 'child'
def test_child_transform_square_to_cube_execution():
original = parent(2)
assert original == 16
result = parent(2, child=cube)
assert result == 64
final = parent(2)
assert final == 16
def test_child_transform_square_to_quadruple():
original = parent(2)
assert original == 16
result = parent(2, child=quad)
assert result == 256
final_two = parent(2)
assert final_two == 16
final = parent(3)
assert final == 36
+ def test_child_transform_square_to_zero():
+ original = parent(2)
+ assert original == 16
+
+ result = parent(2, child=zero)
+ assert result == 0
+
+ final_two = parent(2)
+ assert final_two == 16
+
+ final = parent(3)
+ assert final == 36
+
+
+ def test_child_transform_with_faked_child():
+ # maliciously faking a function's name should not affect the result
+ # this can also happen when using decorators
+ assert child_faker.__name__ == child.__name__
+
+ original = parent(1)
+ assert original == 4
+
+ result = parent(1, child=child_faker)
+ assert result == 42
+
+ final = parent(1)
+ assert final == 4
+
+
if __name__ == '__main__':
import pytest
pytest.main()
|
Add a failing, coverage-increasing test
|
## Code Before:
from fastats.core.decorator import fs
from tests import cube
def child(x):
return x * x
@fs
def parent(a):
b = 2 * a
result = child(b)
return result
def quad(x):
return cube(x) * x
def test_child_transform_square_to_cube_execution():
original = parent(2)
assert original == 16
result = parent(2, child=cube)
assert result == 64
final = parent(2)
assert final == 16
def test_child_transform_square_to_quadruple():
original = parent(2)
assert original == 16
result = parent(2, child=quad)
assert result == 256
final_two = parent(2)
assert final_two == 16
final = parent(3)
assert final == 36
if __name__ == '__main__':
import pytest
pytest.main()
## Instruction:
Add a failing, coverage-increasing test
## Code After:
from fastats.core.decorator import fs
from tests import cube
def child(x):
return x * x
@fs
def parent(a):
b = 2 * a
result = child(b)
return result
def quad(x):
return cube(x) * x
def zero(x):
return 0
def child_faker(x):
return 42
child_faker.__name__ = 'child'
def test_child_transform_square_to_cube_execution():
original = parent(2)
assert original == 16
result = parent(2, child=cube)
assert result == 64
final = parent(2)
assert final == 16
def test_child_transform_square_to_quadruple():
original = parent(2)
assert original == 16
result = parent(2, child=quad)
assert result == 256
final_two = parent(2)
assert final_two == 16
final = parent(3)
assert final == 36
def test_child_transform_square_to_zero():
original = parent(2)
assert original == 16
result = parent(2, child=zero)
assert result == 0
final_two = parent(2)
assert final_two == 16
final = parent(3)
assert final == 36
def test_child_transform_with_faked_child():
# maliciously faking a function's name should not affect the result
# this can also happen when using decorators
assert child_faker.__name__ == child.__name__
original = parent(1)
assert original == 4
result = parent(1, child=child_faker)
assert result == 42
final = parent(1)
assert final == 4
if __name__ == '__main__':
import pytest
pytest.main()
|
from fastats.core.decorator import fs
from tests import cube
def child(x):
return x * x
@fs
def parent(a):
b = 2 * a
result = child(b)
return result
def quad(x):
return cube(x) * x
+
+
+ def zero(x):
+ return 0
+
+
+ def child_faker(x):
+ return 42
+
+
+ child_faker.__name__ = 'child'
def test_child_transform_square_to_cube_execution():
original = parent(2)
assert original == 16
result = parent(2, child=cube)
assert result == 64
final = parent(2)
assert final == 16
def test_child_transform_square_to_quadruple():
original = parent(2)
assert original == 16
result = parent(2, child=quad)
assert result == 256
final_two = parent(2)
assert final_two == 16
final = parent(3)
assert final == 36
+ def test_child_transform_square_to_zero():
+ original = parent(2)
+ assert original == 16
+
+ result = parent(2, child=zero)
+ assert result == 0
+
+ final_two = parent(2)
+ assert final_two == 16
+
+ final = parent(3)
+ assert final == 36
+
+
+ def test_child_transform_with_faked_child():
+ # maliciously faking a function's name should not affect the result
+ # this can also happen when using decorators
+ assert child_faker.__name__ == child.__name__
+
+ original = parent(1)
+ assert original == 4
+
+ result = parent(1, child=child_faker)
+ assert result == 42
+
+ final = parent(1)
+ assert final == 4
+
+
if __name__ == '__main__':
import pytest
pytest.main()
|
3629e58c47941965406372cb2d3b52a3fdbadfc2
|
ckanext/tayside/logic/action/get.py
|
ckanext/tayside/logic/action/get.py
|
from ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
for extra in extras:
if extra.get('key') == 'theme':
extra['value'] = themes
return result
extras.append({'key': 'theme', 'value': themes})
return result
|
from ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
if extras:
for extra in extras:
if extra.get('key') == 'theme':
extra['value'] = themes
return result
extras.append({'key': 'theme', 'value': themes})
# extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
else:
result.update({'extras': []})
extras = result.get('extras')
# extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
return result
|
Handle logic for extras for dataset
|
Handle logic for extras for dataset
|
Python
|
agpl-3.0
|
ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside
|
from ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
+ if extras:
- for extra in extras:
+ for extra in extras:
- if extra.get('key') == 'theme':
+ if extra.get('key') == 'theme':
- extra['value'] = themes
+ extra['value'] = themes
- return result
+ return result
- extras.append({'key': 'theme', 'value': themes})
+ extras.append({'key': 'theme', 'value': themes})
+ # extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
+ else:
+ result.update({'extras': []})
+ extras = result.get('extras')
+ # extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
return result
|
Handle logic for extras for dataset
|
## Code Before:
from ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
for extra in extras:
if extra.get('key') == 'theme':
extra['value'] = themes
return result
extras.append({'key': 'theme', 'value': themes})
return result
## Instruction:
Handle logic for extras for dataset
## Code After:
from ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
if extras:
for extra in extras:
if extra.get('key') == 'theme':
extra['value'] = themes
return result
extras.append({'key': 'theme', 'value': themes})
# extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
else:
result.update({'extras': []})
extras = result.get('extras')
# extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
return result
|
from ckan.logic.action import get as get_core
from ckan.plugins import toolkit
@toolkit.side_effect_free
def package_show(context, data_dict):
''' This action is overriden so that the extra field "theme" is added.
This is needed because when a dataset is exposed to DCAT it needs this
field.
Themes are coming from groups where a dataset is added to. The field
"theme" exists in group's schema.'''
result = get_core.package_show(context, data_dict)
dataset_id = result.get('id')
model = context.get('model')
package = model.Package.get(dataset_id)
groups = package.get_groups(group_type='group')
themes = []
for group in groups:
theme = group.extras.get('theme')
if theme:
themes.append(theme)
result = result.copy()
extras = result.get('extras')
+ if extras:
- for extra in extras:
+ for extra in extras:
? ++++
- if extra.get('key') == 'theme':
+ if extra.get('key') == 'theme':
? ++++
- extra['value'] = themes
+ extra['value'] = themes
? ++++
- return result
+ return result
? ++++
- extras.append({'key': 'theme', 'value': themes})
+ extras.append({'key': 'theme', 'value': themes})
? ++++
+ # extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
+ else:
+ result.update({'extras': []})
+ extras = result.get('extras')
+ # extras.append({'key': 'dcat_publisher_name', 'value': 'testirame'})
return result
|
6cb9008ee2ed49d9630735378bd84727aef3caef
|
dipy/core/tests/test_qball.py
|
dipy/core/tests/test_qball.py
|
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
yield assert_false(True)
|
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
@parametric
def test_sph_harm_ind_list():
m_list, n_list = qball.sph_harm_ind_list(8)
yield assert_equal(m_list.shape, n_list.shape)
yield assert_equal(m_list.ndim, 2)
yield assert_equal(m_list.shape, (45,1))
yield assert_true(np.all(np.abs(m_list) <= n_list))
yield assert_array_equal(n_list % 2, 0)
yield assert_raises(ValueError, qball.sph_harm_ind_list, 1)
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
|
TEST - some real_sph_harm tests
|
TEST - some real_sph_harm tests
|
Python
|
bsd-3-clause
|
villalonreina/dipy,samuelstjean/dipy,jyeatman/dipy,sinkpoint/dipy,mdesco/dipy,Messaoud-Boudjada/dipy,maurozucchelli/dipy,villalonreina/dipy,nilgoyyou/dipy,beni55/dipy,demianw/dipy,FrancoisRheaultUS/dipy,rfdougherty/dipy,JohnGriffiths/dipy,mdesco/dipy,Messaoud-Boudjada/dipy,JohnGriffiths/dipy,samuelstjean/dipy,samuelstjean/dipy,StongeEtienne/dipy,oesteban/dipy,maurozucchelli/dipy,matthieudumont/dipy,matthieudumont/dipy,nilgoyyou/dipy,oesteban/dipy,rfdougherty/dipy,beni55/dipy,FrancoisRheaultUS/dipy,jyeatman/dipy,maurozucchelli/dipy,sinkpoint/dipy,StongeEtienne/dipy,demianw/dipy
|
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
+ @parametric
+ def test_sph_harm_ind_list():
+ m_list, n_list = qball.sph_harm_ind_list(8)
+ yield assert_equal(m_list.shape, n_list.shape)
+ yield assert_equal(m_list.ndim, 2)
+ yield assert_equal(m_list.shape, (45,1))
+ yield assert_true(np.all(np.abs(m_list) <= n_list))
+ yield assert_array_equal(n_list % 2, 0)
+ yield assert_raises(ValueError, qball.sph_harm_ind_list, 1)
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
- yield assert_false(True)
|
TEST - some real_sph_harm tests
|
## Code Before:
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
yield assert_false(True)
## Instruction:
TEST - some real_sph_harm tests
## Code After:
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
@parametric
def test_sph_harm_ind_list():
m_list, n_list = qball.sph_harm_ind_list(8)
yield assert_equal(m_list.shape, n_list.shape)
yield assert_equal(m_list.ndim, 2)
yield assert_equal(m_list.shape, (45,1))
yield assert_true(np.all(np.abs(m_list) <= n_list))
yield assert_array_equal(n_list % 2, 0)
yield assert_raises(ValueError, qball.sph_harm_ind_list, 1)
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
|
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
+ @parametric
+ def test_sph_harm_ind_list():
+ m_list, n_list = qball.sph_harm_ind_list(8)
+ yield assert_equal(m_list.shape, n_list.shape)
+ yield assert_equal(m_list.ndim, 2)
+ yield assert_equal(m_list.shape, (45,1))
+ yield assert_true(np.all(np.abs(m_list) <= n_list))
+ yield assert_array_equal(n_list % 2, 0)
+ yield assert_raises(ValueError, qball.sph_harm_ind_list, 1)
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
- yield assert_false(True)
|
6e3ddfc47487a8841a79d6265c96ba63005fccec
|
bnw_handlers/command_onoff.py
|
bnw_handlers/command_onoff.py
|
from base import *
import random
import bnw_core.bnw_objects as objs
@require_auth
@defer.inlineCallbacks
def cmd_on(request):
""" Включение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':False}},safe=True)
if request.user['off']:
defer.returnValue(
dict(ok=True,desc='Welcome back!')
)
else:
defer.returnValue(
dict(ok=True,desc='Welcoooome baaaack, I said.')
)
@require_auth
@defer.inlineCallbacks
def cmd_off(request):
""" Выключение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':True}},safe=True)
if request.user['off']:
defer.returnValue(
dict(ok=True,desc='See you later.')
)
else:
defer.returnValue(
dict(ok=True,desc='C u l8r!')
)
|
from base import *
import random
import bnw_core.bnw_objects as objs
@require_auth
@defer.inlineCallbacks
def cmd_on(request):
""" Включение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':False}},safe=True)
if request.user.get('off',False):
defer.returnValue(
dict(ok=True,desc='Welcome back!')
)
else:
defer.returnValue(
dict(ok=True,desc='Welcoooome baaaack, I said.')
)
@require_auth
@defer.inlineCallbacks
def cmd_off(request):
""" Выключение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':True}},safe=True)
if request.user.get('off',False):
defer.returnValue(
dict(ok=True,desc='See you later.')
)
else:
defer.returnValue(
dict(ok=True,desc='C u l8r!')
)
|
Fix on/off if there is no 'off' field.
|
Fix on/off if there is no 'off' field.
|
Python
|
bsd-2-clause
|
un-def/bnw,stiletto/bnw,un-def/bnw,stiletto/bnw,ojab/bnw,ojab/bnw,stiletto/bnw,un-def/bnw,ojab/bnw,stiletto/bnw,un-def/bnw,ojab/bnw
|
from base import *
import random
import bnw_core.bnw_objects as objs
@require_auth
@defer.inlineCallbacks
def cmd_on(request):
""" Включение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':False}},safe=True)
- if request.user['off']:
+ if request.user.get('off',False):
defer.returnValue(
dict(ok=True,desc='Welcome back!')
)
else:
defer.returnValue(
dict(ok=True,desc='Welcoooome baaaack, I said.')
)
@require_auth
@defer.inlineCallbacks
def cmd_off(request):
""" Выключение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':True}},safe=True)
- if request.user['off']:
+ if request.user.get('off',False):
defer.returnValue(
dict(ok=True,desc='See you later.')
)
else:
defer.returnValue(
dict(ok=True,desc='C u l8r!')
)
|
Fix on/off if there is no 'off' field.
|
## Code Before:
from base import *
import random
import bnw_core.bnw_objects as objs
@require_auth
@defer.inlineCallbacks
def cmd_on(request):
""" Включение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':False}},safe=True)
if request.user['off']:
defer.returnValue(
dict(ok=True,desc='Welcome back!')
)
else:
defer.returnValue(
dict(ok=True,desc='Welcoooome baaaack, I said.')
)
@require_auth
@defer.inlineCallbacks
def cmd_off(request):
""" Выключение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':True}},safe=True)
if request.user['off']:
defer.returnValue(
dict(ok=True,desc='See you later.')
)
else:
defer.returnValue(
dict(ok=True,desc='C u l8r!')
)
## Instruction:
Fix on/off if there is no 'off' field.
## Code After:
from base import *
import random
import bnw_core.bnw_objects as objs
@require_auth
@defer.inlineCallbacks
def cmd_on(request):
""" Включение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':False}},safe=True)
if request.user.get('off',False):
defer.returnValue(
dict(ok=True,desc='Welcome back!')
)
else:
defer.returnValue(
dict(ok=True,desc='Welcoooome baaaack, I said.')
)
@require_auth
@defer.inlineCallbacks
def cmd_off(request):
""" Выключение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':True}},safe=True)
if request.user.get('off',False):
defer.returnValue(
dict(ok=True,desc='See you later.')
)
else:
defer.returnValue(
dict(ok=True,desc='C u l8r!')
)
|
from base import *
import random
import bnw_core.bnw_objects as objs
@require_auth
@defer.inlineCallbacks
def cmd_on(request):
""" Включение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':False}},safe=True)
- if request.user['off']:
? ^ ^
+ if request.user.get('off',False):
? ^^^^^ ^^^^^^^
defer.returnValue(
dict(ok=True,desc='Welcome back!')
)
else:
defer.returnValue(
dict(ok=True,desc='Welcoooome baaaack, I said.')
)
@require_auth
@defer.inlineCallbacks
def cmd_off(request):
""" Выключение доставки сообщений """
_ = yield objs.User.mupdate({'name':request.user['name']},{'$set':{'off':True}},safe=True)
- if request.user['off']:
? ^ ^
+ if request.user.get('off',False):
? ^^^^^ ^^^^^^^
defer.returnValue(
dict(ok=True,desc='See you later.')
)
else:
defer.returnValue(
dict(ok=True,desc='C u l8r!')
)
|
dd50858ee22c27076919614d1994e3ce9c8e2399
|
soundem/handlers.py
|
soundem/handlers.py
|
from flask import jsonify
from soundem import app
def json_error_handler(e):
return jsonify({
'status_code': e.code,
'error': 'Bad Request',
'detail': e.description
}), e.code
@app.errorhandler(400)
def bad_request_handler(e):
return json_error_handler(e)
@app.errorhandler(401)
def unauthorized_handler(e):
return json_error_handler(e)
@app.errorhandler(404)
def not_found_handler(e):
return json_error_handler(e)
@app.errorhandler(405)
def method_not_allowed_handler(e):
return json_error_handler(e)
|
from flask import jsonify
from soundem import app
def json_error_handler(e):
return jsonify({
'status_code': e.code,
'error': e.name,
'detail': e.description
}), e.code
@app.errorhandler(400)
def bad_request_handler(e):
return json_error_handler(e)
@app.errorhandler(401)
def unauthorized_handler(e):
return json_error_handler(e)
@app.errorhandler(404)
def not_found_handler(e):
return json_error_handler(e)
@app.errorhandler(405)
def method_not_allowed_handler(e):
return json_error_handler(e)
|
Fix json error handler name
|
Fix json error handler name
|
Python
|
mit
|
building4theweb/soundem-api
|
from flask import jsonify
from soundem import app
def json_error_handler(e):
return jsonify({
'status_code': e.code,
- 'error': 'Bad Request',
+ 'error': e.name,
'detail': e.description
}), e.code
@app.errorhandler(400)
def bad_request_handler(e):
return json_error_handler(e)
@app.errorhandler(401)
def unauthorized_handler(e):
return json_error_handler(e)
@app.errorhandler(404)
def not_found_handler(e):
return json_error_handler(e)
@app.errorhandler(405)
def method_not_allowed_handler(e):
return json_error_handler(e)
|
Fix json error handler name
|
## Code Before:
from flask import jsonify
from soundem import app
def json_error_handler(e):
return jsonify({
'status_code': e.code,
'error': 'Bad Request',
'detail': e.description
}), e.code
@app.errorhandler(400)
def bad_request_handler(e):
return json_error_handler(e)
@app.errorhandler(401)
def unauthorized_handler(e):
return json_error_handler(e)
@app.errorhandler(404)
def not_found_handler(e):
return json_error_handler(e)
@app.errorhandler(405)
def method_not_allowed_handler(e):
return json_error_handler(e)
## Instruction:
Fix json error handler name
## Code After:
from flask import jsonify
from soundem import app
def json_error_handler(e):
return jsonify({
'status_code': e.code,
'error': e.name,
'detail': e.description
}), e.code
@app.errorhandler(400)
def bad_request_handler(e):
return json_error_handler(e)
@app.errorhandler(401)
def unauthorized_handler(e):
return json_error_handler(e)
@app.errorhandler(404)
def not_found_handler(e):
return json_error_handler(e)
@app.errorhandler(405)
def method_not_allowed_handler(e):
return json_error_handler(e)
|
from flask import jsonify
from soundem import app
def json_error_handler(e):
return jsonify({
'status_code': e.code,
- 'error': 'Bad Request',
+ 'error': e.name,
'detail': e.description
}), e.code
@app.errorhandler(400)
def bad_request_handler(e):
return json_error_handler(e)
@app.errorhandler(401)
def unauthorized_handler(e):
return json_error_handler(e)
@app.errorhandler(404)
def not_found_handler(e):
return json_error_handler(e)
@app.errorhandler(405)
def method_not_allowed_handler(e):
return json_error_handler(e)
|
3aa2f858f93ed3945bf1960d5c5d1d90df34422c
|
MoodJournal/entries/serializers.py
|
MoodJournal/entries/serializers.py
|
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator, UniqueForDateValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
validators = [
UniqueForDateValidator(
queryset=EntryInstance.objects.all(),
field='category',
date_field='date',
message='You already have an entry for this category on this date!'
)
]
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
|
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
|
Revert "unique for date validator"
|
Revert "unique for date validator"
This reverts commit 7d2eee38eebf62787b77cdd41e7677cfdad6d47b.
|
Python
|
mit
|
swpease/MoodJournal,swpease/MoodJournal,swpease/MoodJournal
|
from rest_framework import serializers
- from rest_framework.validators import UniqueTogetherValidator, UniqueForDateValidator
+ from rest_framework.validators import UniqueTogetherValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
- validators = [
- UniqueForDateValidator(
- queryset=EntryInstance.objects.all(),
- field='category',
- date_field='date',
- message='You already have an entry for this category on this date!'
- )
- ]
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
|
Revert "unique for date validator"
|
## Code Before:
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator, UniqueForDateValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
validators = [
UniqueForDateValidator(
queryset=EntryInstance.objects.all(),
field='category',
date_field='date',
message='You already have an entry for this category on this date!'
)
]
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
## Instruction:
Revert "unique for date validator"
## Code After:
from rest_framework import serializers
from rest_framework.validators import UniqueTogetherValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
|
from rest_framework import serializers
- from rest_framework.validators import UniqueTogetherValidator, UniqueForDateValidator
? ------------------------
+ from rest_framework.validators import UniqueTogetherValidator
from .models import UserDefinedCategory
from .models import EntryInstance
class UserDefinedCategorySerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='category-detail')
user = serializers.HiddenField(default=serializers.CurrentUserDefault())
rank = serializers.IntegerField(max_value=2147483647, min_value=0, required=False)
class Meta:
model = UserDefinedCategory
validators = [
UniqueTogetherValidator(
queryset=UserDefinedCategory.objects.all(),
fields=('user', 'category'),
message='There is already a category with this name.'
)
]
fields = ('url', 'category', 'rank', 'pk', 'user')
class EntryInstanceSerializer(serializers.HyperlinkedModelSerializer):
url = serializers.HyperlinkedIdentityField(view_name='entry-detail')
# I was getting weird behavior using other serializer fields, so here we are:
category = serializers.PrimaryKeyRelatedField(queryset=UserDefinedCategory.objects.all())
class Meta:
model = EntryInstance
- validators = [
- UniqueForDateValidator(
- queryset=EntryInstance.objects.all(),
- field='category',
- date_field='date',
- message='You already have an entry for this category on this date!'
- )
- ]
fields = ('url', 'category', 'date', 'entry', 'quality_rating')
|
69ff671582bb343bd2ac9515964a3913e29f3d72
|
oabutton/wsgi.py
|
oabutton/wsgi.py
|
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "oabutton.settings")
from django.core.wsgi import get_wsgi_application
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from dj_static import Cling
application = Cling(get_wsgi_application())
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "oabutton.settings")
# Enable Django secure mode (see http://security.stackexchange.com/a/8970)
os.environ["HTTPS"] = "on"
from django.core.wsgi import get_wsgi_application
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from dj_static import Cling
application = Cling(get_wsgi_application())
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
Enable Django secure mode in WSGI module
|
Enable Django secure mode in WSGI module
|
Python
|
mit
|
OAButton/OAButton_old,OAButton/OAButton_old,OAButton/OAButton_old
|
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "oabutton.settings")
+
+ # Enable Django secure mode (see http://security.stackexchange.com/a/8970)
+ os.environ["HTTPS"] = "on"
from django.core.wsgi import get_wsgi_application
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from dj_static import Cling
application = Cling(get_wsgi_application())
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
Enable Django secure mode in WSGI module
|
## Code Before:
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "oabutton.settings")
from django.core.wsgi import get_wsgi_application
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from dj_static import Cling
application = Cling(get_wsgi_application())
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
## Instruction:
Enable Django secure mode in WSGI module
## Code After:
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "oabutton.settings")
# Enable Django secure mode (see http://security.stackexchange.com/a/8970)
os.environ["HTTPS"] = "on"
from django.core.wsgi import get_wsgi_application
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from dj_static import Cling
application = Cling(get_wsgi_application())
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "oabutton.settings")
+
+ # Enable Django secure mode (see http://security.stackexchange.com/a/8970)
+ os.environ["HTTPS"] = "on"
from django.core.wsgi import get_wsgi_application
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from dj_static import Cling
application = Cling(get_wsgi_application())
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
9bc9ec9468459ab49530e6463255cca38aba721c
|
findaconf/tests/test_site_routes.py
|
findaconf/tests/test_site_routes.py
|
from unittest import TestCase
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
|
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
from unittest import TestCase
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_login(self):
# test if login page exists
resp = self.app.get('/login')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
# test if is there a link to login in the home page
resp = self.app.get('/')
assert 'href="/login"' in resp.data
|
Create tests for login page
|
Create tests for login page
|
Python
|
mit
|
cuducos/findaconf,cuducos/findaconf,koorukuroo/findaconf,cuducos/findaconf,koorukuroo/findaconf,koorukuroo/findaconf
|
- from unittest import TestCase
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
+ from unittest import TestCase
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
-
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
-
+
+ def test_login(self):
+
+ # test if login page exists
+ resp = self.app.get('/login')
+ assert resp.status_code == 200
+ assert resp.mimetype == 'text/html'
+
+ # test if is there a link to login in the home page
+ resp = self.app.get('/')
+ assert 'href="/login"' in resp.data
|
Create tests for login page
|
## Code Before:
from unittest import TestCase
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
## Instruction:
Create tests for login page
## Code After:
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
from unittest import TestCase
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_login(self):
# test if login page exists
resp = self.app.get('/login')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
# test if is there a link to login in the home page
resp = self.app.get('/')
assert 'href="/login"' in resp.data
|
- from unittest import TestCase
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
+ from unittest import TestCase
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
-
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
+
+ def test_login(self):
+
+ # test if login page exists
+ resp = self.app.get('/login')
+ assert resp.status_code == 200
+ assert resp.mimetype == 'text/html'
+
+ # test if is there a link to login in the home page
+ resp = self.app.get('/')
+ assert 'href="/login"' in resp.data
|
1f25d3a8d73fe776a2182ee68c027105fd15ab04
|
tiamat/decorators.py
|
tiamat/decorators.py
|
import json
from functools import wraps
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
def as_json(func):
def decorator(request, *ar, **kw):
output = func(request, *ar, **kw)
if not isinstance(output, dict):
return output
return HttpResponse(json.dumps(output), 'application/json')
return decorator
def as_jsonp(functionCallKey='callback'):
def decorator(func):
def wrapper(request, *ar, **kw):
output = func(request, *ar, **kw)
if not isinstance(output, dict):
return output
return HttpResponse(
"%s(%s)" % (request.GET.get(functionCallKey),
json.dumps(output)),
'application/json'
)
return wrapper
return decorator
def as_html(template_path):
"""
Decorator with the same functionality as render_to_response has, but uses
decorator syntax.
"""
def decorator(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
output = func(request, *args, **kwargs)
if not isinstance(output, dict):
return output
return render_to_response(
template_path,
output,
context_instance=RequestContext(request)
)
return wrapper
return decorator
|
import json
from functools import wraps
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
def as_json(func):
def decorator(request, *ar, **kw):
output = func(request, *ar, **kw)
return HttpResponse(json.dumps(output), 'application/json')
return decorator
def as_jsonp(functionCallKey='callback'):
def decorator(func):
def wrapper(request, *ar, **kw):
output = func(request, *ar, **kw)
return HttpResponse(
"%s(%s)" % (request.GET.get(functionCallKey, functionCallKey),
json.dumps(output)),
'application/json'
)
return wrapper
return decorator
def as_html(template_path):
"""
Decorator with the same functionality as render_to_response has, but uses
decorator syntax.
"""
def decorator(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
output = func(request, *args, **kwargs)
if not isinstance(output, dict):
return output
return render_to_response(
template_path,
output,
context_instance=RequestContext(request)
)
return wrapper
return decorator
|
Fix problem in as_json and as_jsonp
|
Fix problem in as_json and as_jsonp
|
Python
|
bsd-2-clause
|
rvause/django-tiamat
|
import json
from functools import wraps
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
def as_json(func):
def decorator(request, *ar, **kw):
output = func(request, *ar, **kw)
-
- if not isinstance(output, dict):
- return output
-
return HttpResponse(json.dumps(output), 'application/json')
return decorator
def as_jsonp(functionCallKey='callback'):
def decorator(func):
def wrapper(request, *ar, **kw):
output = func(request, *ar, **kw)
-
- if not isinstance(output, dict):
- return output
-
return HttpResponse(
- "%s(%s)" % (request.GET.get(functionCallKey),
+ "%s(%s)" % (request.GET.get(functionCallKey, functionCallKey),
json.dumps(output)),
'application/json'
)
return wrapper
return decorator
def as_html(template_path):
"""
Decorator with the same functionality as render_to_response has, but uses
decorator syntax.
"""
def decorator(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
output = func(request, *args, **kwargs)
if not isinstance(output, dict):
return output
return render_to_response(
template_path,
output,
context_instance=RequestContext(request)
)
return wrapper
return decorator
|
Fix problem in as_json and as_jsonp
|
## Code Before:
import json
from functools import wraps
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
def as_json(func):
def decorator(request, *ar, **kw):
output = func(request, *ar, **kw)
if not isinstance(output, dict):
return output
return HttpResponse(json.dumps(output), 'application/json')
return decorator
def as_jsonp(functionCallKey='callback'):
def decorator(func):
def wrapper(request, *ar, **kw):
output = func(request, *ar, **kw)
if not isinstance(output, dict):
return output
return HttpResponse(
"%s(%s)" % (request.GET.get(functionCallKey),
json.dumps(output)),
'application/json'
)
return wrapper
return decorator
def as_html(template_path):
"""
Decorator with the same functionality as render_to_response has, but uses
decorator syntax.
"""
def decorator(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
output = func(request, *args, **kwargs)
if not isinstance(output, dict):
return output
return render_to_response(
template_path,
output,
context_instance=RequestContext(request)
)
return wrapper
return decorator
## Instruction:
Fix problem in as_json and as_jsonp
## Code After:
import json
from functools import wraps
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
def as_json(func):
def decorator(request, *ar, **kw):
output = func(request, *ar, **kw)
return HttpResponse(json.dumps(output), 'application/json')
return decorator
def as_jsonp(functionCallKey='callback'):
def decorator(func):
def wrapper(request, *ar, **kw):
output = func(request, *ar, **kw)
return HttpResponse(
"%s(%s)" % (request.GET.get(functionCallKey, functionCallKey),
json.dumps(output)),
'application/json'
)
return wrapper
return decorator
def as_html(template_path):
"""
Decorator with the same functionality as render_to_response has, but uses
decorator syntax.
"""
def decorator(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
output = func(request, *args, **kwargs)
if not isinstance(output, dict):
return output
return render_to_response(
template_path,
output,
context_instance=RequestContext(request)
)
return wrapper
return decorator
|
import json
from functools import wraps
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
def as_json(func):
def decorator(request, *ar, **kw):
output = func(request, *ar, **kw)
-
- if not isinstance(output, dict):
- return output
-
return HttpResponse(json.dumps(output), 'application/json')
return decorator
def as_jsonp(functionCallKey='callback'):
def decorator(func):
def wrapper(request, *ar, **kw):
output = func(request, *ar, **kw)
-
- if not isinstance(output, dict):
- return output
-
return HttpResponse(
- "%s(%s)" % (request.GET.get(functionCallKey),
+ "%s(%s)" % (request.GET.get(functionCallKey, functionCallKey),
? +++++++++++++++++
json.dumps(output)),
'application/json'
)
return wrapper
return decorator
def as_html(template_path):
"""
Decorator with the same functionality as render_to_response has, but uses
decorator syntax.
"""
def decorator(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
output = func(request, *args, **kwargs)
if not isinstance(output, dict):
return output
return render_to_response(
template_path,
output,
context_instance=RequestContext(request)
)
return wrapper
return decorator
|
f8292dced6aef64950280a33e9980a7998f07104
|
tests/services/shop/base.py
|
tests/services/shop/base.py
|
from testfixtures.shop_article import create_article
from testfixtures.shop_shop import create_shop
from tests.base import AbstractAppTestCase
from tests.helpers import DEFAULT_EMAIL_CONFIG_ID
class ShopTestBase(AbstractAppTestCase):
# -------------------------------------------------------------------- #
# helpers
def create_shop(
self, shop_id='shop-1', email_config_id=DEFAULT_EMAIL_CONFIG_ID
):
shop = create_shop(shop_id, email_config_id)
self.db.session.add(shop)
self.db.session.commit()
return shop
def create_article(self, shop_id, **kwargs):
article = create_article(shop_id, **kwargs)
self.db.session.add(article)
self.db.session.commit()
return article
|
from byceps.services.shop.article import service as article_service
from testfixtures.shop_article import create_article
from testfixtures.shop_shop import create_shop
from tests.base import AbstractAppTestCase
from tests.helpers import DEFAULT_EMAIL_CONFIG_ID
class ShopTestBase(AbstractAppTestCase):
# -------------------------------------------------------------------- #
# helpers
def create_shop(
self, shop_id='shop-1', email_config_id=DEFAULT_EMAIL_CONFIG_ID
):
shop = create_shop(shop_id, email_config_id)
self.db.session.add(shop)
self.db.session.commit()
return shop
def create_article(self, shop_id, **kwargs):
article = create_article(shop_id, **kwargs)
return article_service.create_article(
shop_id,
article.item_number,
article.description,
article.price,
article.tax_rate,
article.quantity,
)
|
Create test articles via service
|
Create test articles via service
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
+
+ from byceps.services.shop.article import service as article_service
from testfixtures.shop_article import create_article
from testfixtures.shop_shop import create_shop
from tests.base import AbstractAppTestCase
from tests.helpers import DEFAULT_EMAIL_CONFIG_ID
class ShopTestBase(AbstractAppTestCase):
# -------------------------------------------------------------------- #
# helpers
def create_shop(
self, shop_id='shop-1', email_config_id=DEFAULT_EMAIL_CONFIG_ID
):
shop = create_shop(shop_id, email_config_id)
self.db.session.add(shop)
self.db.session.commit()
return shop
def create_article(self, shop_id, **kwargs):
article = create_article(shop_id, **kwargs)
- self.db.session.add(article)
- self.db.session.commit()
+ return article_service.create_article(
+ shop_id,
+ article.item_number,
+ article.description,
+ article.price,
+ article.tax_rate,
+ article.quantity,
+ )
- return article
-
|
Create test articles via service
|
## Code Before:
from testfixtures.shop_article import create_article
from testfixtures.shop_shop import create_shop
from tests.base import AbstractAppTestCase
from tests.helpers import DEFAULT_EMAIL_CONFIG_ID
class ShopTestBase(AbstractAppTestCase):
# -------------------------------------------------------------------- #
# helpers
def create_shop(
self, shop_id='shop-1', email_config_id=DEFAULT_EMAIL_CONFIG_ID
):
shop = create_shop(shop_id, email_config_id)
self.db.session.add(shop)
self.db.session.commit()
return shop
def create_article(self, shop_id, **kwargs):
article = create_article(shop_id, **kwargs)
self.db.session.add(article)
self.db.session.commit()
return article
## Instruction:
Create test articles via service
## Code After:
from byceps.services.shop.article import service as article_service
from testfixtures.shop_article import create_article
from testfixtures.shop_shop import create_shop
from tests.base import AbstractAppTestCase
from tests.helpers import DEFAULT_EMAIL_CONFIG_ID
class ShopTestBase(AbstractAppTestCase):
# -------------------------------------------------------------------- #
# helpers
def create_shop(
self, shop_id='shop-1', email_config_id=DEFAULT_EMAIL_CONFIG_ID
):
shop = create_shop(shop_id, email_config_id)
self.db.session.add(shop)
self.db.session.commit()
return shop
def create_article(self, shop_id, **kwargs):
article = create_article(shop_id, **kwargs)
return article_service.create_article(
shop_id,
article.item_number,
article.description,
article.price,
article.tax_rate,
article.quantity,
)
|
+
+ from byceps.services.shop.article import service as article_service
from testfixtures.shop_article import create_article
from testfixtures.shop_shop import create_shop
from tests.base import AbstractAppTestCase
from tests.helpers import DEFAULT_EMAIL_CONFIG_ID
class ShopTestBase(AbstractAppTestCase):
# -------------------------------------------------------------------- #
# helpers
def create_shop(
self, shop_id='shop-1', email_config_id=DEFAULT_EMAIL_CONFIG_ID
):
shop = create_shop(shop_id, email_config_id)
self.db.session.add(shop)
self.db.session.commit()
return shop
def create_article(self, shop_id, **kwargs):
article = create_article(shop_id, **kwargs)
- self.db.session.add(article)
- self.db.session.commit()
-
- return article
+ return article_service.create_article(
+ shop_id,
+ article.item_number,
+ article.description,
+ article.price,
+ article.tax_rate,
+ article.quantity,
+ )
|
6df7ee955c7dfaee9a597b331dbc4c448fe3738a
|
fpr/migrations/0017_ocr_unique_names.py
|
fpr/migrations/0017_ocr_unique_names.py
|
from __future__ import unicode_literals
from django.db import migrations
def data_migration(apps, schema_editor):
"""Migration that causes each OCR text file to include the UUID of its
source file in its filename. This prevents OCR text files from overwriting
one another when there are two identically named source files in a
transfer. See
https://github.com/artefactual/archivematica-fpr-admin/issues/66
"""
IDCommand = apps.get_model('fpr', 'IDCommand')
ocr_command = IDCommand.objects.get(
uuid='5d501dbf-76bb-4569-a9db-9e367800995e')
ocr_command.command = (
'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n'
'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n'
'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"')
ocr_command.output_location = (
'%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt')
ocr_command.save()
class Migration(migrations.Migration):
dependencies = [
('fpr', '0016_update_idtools'),
]
operations = [
migrations.RunPython(data_migration),
]
|
from __future__ import unicode_literals
from django.db import migrations
def data_migration(apps, schema_editor):
"""Migration that causes each OCR text file to include the UUID of its
source file in its filename. This prevents OCR text files from overwriting
one another when there are two identically named source files in a
transfer. See
https://github.com/artefactual/archivematica-fpr-admin/issues/66
"""
FPCommand = apps.get_model('fpr', 'FPCommand')
ocr_command = FPCommand.objects.get(
uuid='4ea06c2b-ee42-4f80-ad10-4e044ba0676a')
ocr_command.command = (
'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n'
'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n'
'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"')
ocr_command.output_location = (
'%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt')
ocr_command.save()
class Migration(migrations.Migration):
dependencies = [
('fpr', '0016_update_idtools'),
]
operations = [
migrations.RunPython(data_migration),
]
|
Fix OCR command UUID typo
|
Fix OCR command UUID typo
|
Python
|
agpl-3.0
|
artefactual/archivematica-fpr-admin,artefactual/archivematica-fpr-admin,artefactual/archivematica-fpr-admin,artefactual/archivematica-fpr-admin
|
from __future__ import unicode_literals
from django.db import migrations
def data_migration(apps, schema_editor):
"""Migration that causes each OCR text file to include the UUID of its
source file in its filename. This prevents OCR text files from overwriting
one another when there are two identically named source files in a
transfer. See
https://github.com/artefactual/archivematica-fpr-admin/issues/66
"""
- IDCommand = apps.get_model('fpr', 'IDCommand')
+ FPCommand = apps.get_model('fpr', 'FPCommand')
- ocr_command = IDCommand.objects.get(
+ ocr_command = FPCommand.objects.get(
- uuid='5d501dbf-76bb-4569-a9db-9e367800995e')
+ uuid='4ea06c2b-ee42-4f80-ad10-4e044ba0676a')
ocr_command.command = (
'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n'
'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n'
'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"')
ocr_command.output_location = (
'%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt')
ocr_command.save()
class Migration(migrations.Migration):
dependencies = [
('fpr', '0016_update_idtools'),
]
operations = [
migrations.RunPython(data_migration),
]
|
Fix OCR command UUID typo
|
## Code Before:
from __future__ import unicode_literals
from django.db import migrations
def data_migration(apps, schema_editor):
"""Migration that causes each OCR text file to include the UUID of its
source file in its filename. This prevents OCR text files from overwriting
one another when there are two identically named source files in a
transfer. See
https://github.com/artefactual/archivematica-fpr-admin/issues/66
"""
IDCommand = apps.get_model('fpr', 'IDCommand')
ocr_command = IDCommand.objects.get(
uuid='5d501dbf-76bb-4569-a9db-9e367800995e')
ocr_command.command = (
'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n'
'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n'
'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"')
ocr_command.output_location = (
'%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt')
ocr_command.save()
class Migration(migrations.Migration):
dependencies = [
('fpr', '0016_update_idtools'),
]
operations = [
migrations.RunPython(data_migration),
]
## Instruction:
Fix OCR command UUID typo
## Code After:
from __future__ import unicode_literals
from django.db import migrations
def data_migration(apps, schema_editor):
"""Migration that causes each OCR text file to include the UUID of its
source file in its filename. This prevents OCR text files from overwriting
one another when there are two identically named source files in a
transfer. See
https://github.com/artefactual/archivematica-fpr-admin/issues/66
"""
FPCommand = apps.get_model('fpr', 'FPCommand')
ocr_command = FPCommand.objects.get(
uuid='4ea06c2b-ee42-4f80-ad10-4e044ba0676a')
ocr_command.command = (
'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n'
'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n'
'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"')
ocr_command.output_location = (
'%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt')
ocr_command.save()
class Migration(migrations.Migration):
dependencies = [
('fpr', '0016_update_idtools'),
]
operations = [
migrations.RunPython(data_migration),
]
|
from __future__ import unicode_literals
from django.db import migrations
def data_migration(apps, schema_editor):
"""Migration that causes each OCR text file to include the UUID of its
source file in its filename. This prevents OCR text files from overwriting
one another when there are two identically named source files in a
transfer. See
https://github.com/artefactual/archivematica-fpr-admin/issues/66
"""
- IDCommand = apps.get_model('fpr', 'IDCommand')
? ^^ ^^
+ FPCommand = apps.get_model('fpr', 'FPCommand')
? ^^ ^^
- ocr_command = IDCommand.objects.get(
? ^^
+ ocr_command = FPCommand.objects.get(
? ^^
- uuid='5d501dbf-76bb-4569-a9db-9e367800995e')
+ uuid='4ea06c2b-ee42-4f80-ad10-4e044ba0676a')
ocr_command.command = (
'ocrfiles="%SIPObjectsDirectory%metadata/OCRfiles"\n'
'test -d "$ocrfiles" || mkdir -p "$ocrfiles"\n\n'
'tesseract %fileFullName% "$ocrfiles/%fileName%-%fileUUID%"')
ocr_command.output_location = (
'%SIPObjectsDirectory%metadata/OCRfiles/%fileName%-%fileUUID%.txt')
ocr_command.save()
class Migration(migrations.Migration):
dependencies = [
('fpr', '0016_update_idtools'),
]
operations = [
migrations.RunPython(data_migration),
]
|
6bd088acd0ec0cfa5298051e286ce76e42430067
|
shuup/front/themes/views/_product_preview.py
|
shuup/front/themes/views/_product_preview.py
|
from shuup.front.views.product import ProductDetailView
class ProductPreviewView(ProductDetailView):
template_name = "shuup/front/product/product_preview.jinja"
def get_context_data(self, **kwargs):
# By default the template rendering the basket add form
# uses the `request.path` as its' `next` value.
# This is fine if you are on product page but here in
# preview, we cannot redirect back to `/xtheme/product_preview`.
context = super(ProductPreviewView, self).get_context_data(**kwargs)
# Add `return_url` to context to avoid usage of `request.path` in
# `classic_gray/shuup/front/product/_detail_order_section.jinja`
context["return_url"] = "/xtheme/products"
return context
def product_preview(request):
return ProductPreviewView.as_view()(request, pk=request.GET["id"])
|
from shuup.front.views.product import ProductDetailView
class ProductPreviewView(ProductDetailView):
template_name = "shuup/front/product/product_preview.jinja"
def get_context_data(self, **kwargs):
# By default the template rendering the basket add form
# uses the `request.path` as its' `next` value.
# This is fine if you are on product page but here in
# preview, we cannot redirect back to `/xtheme/product_preview`.
context = super(ProductPreviewView, self).get_context_data(**kwargs)
# Add `return_url` to context to avoid usage of `request.path`
context["return_url"] = "/xtheme/products"
return context
def product_preview(request):
return ProductPreviewView.as_view()(request, pk=request.GET["id"])
|
Remove reference to nonexistent file
|
Front: Remove reference to nonexistent file
|
Python
|
agpl-3.0
|
shoopio/shoop,shoopio/shoop,shawnadelic/shuup,suutari-ai/shoop,shawnadelic/shuup,hrayr-artunyan/shuup,suutari/shoop,suutari/shoop,shoopio/shoop,suutari-ai/shoop,hrayr-artunyan/shuup,shawnadelic/shuup,suutari/shoop,suutari-ai/shoop,hrayr-artunyan/shuup
|
from shuup.front.views.product import ProductDetailView
class ProductPreviewView(ProductDetailView):
template_name = "shuup/front/product/product_preview.jinja"
def get_context_data(self, **kwargs):
# By default the template rendering the basket add form
# uses the `request.path` as its' `next` value.
# This is fine if you are on product page but here in
# preview, we cannot redirect back to `/xtheme/product_preview`.
context = super(ProductPreviewView, self).get_context_data(**kwargs)
- # Add `return_url` to context to avoid usage of `request.path` in
+ # Add `return_url` to context to avoid usage of `request.path`
- # `classic_gray/shuup/front/product/_detail_order_section.jinja`
context["return_url"] = "/xtheme/products"
return context
def product_preview(request):
return ProductPreviewView.as_view()(request, pk=request.GET["id"])
|
Remove reference to nonexistent file
|
## Code Before:
from shuup.front.views.product import ProductDetailView
class ProductPreviewView(ProductDetailView):
template_name = "shuup/front/product/product_preview.jinja"
def get_context_data(self, **kwargs):
# By default the template rendering the basket add form
# uses the `request.path` as its' `next` value.
# This is fine if you are on product page but here in
# preview, we cannot redirect back to `/xtheme/product_preview`.
context = super(ProductPreviewView, self).get_context_data(**kwargs)
# Add `return_url` to context to avoid usage of `request.path` in
# `classic_gray/shuup/front/product/_detail_order_section.jinja`
context["return_url"] = "/xtheme/products"
return context
def product_preview(request):
return ProductPreviewView.as_view()(request, pk=request.GET["id"])
## Instruction:
Remove reference to nonexistent file
## Code After:
from shuup.front.views.product import ProductDetailView
class ProductPreviewView(ProductDetailView):
template_name = "shuup/front/product/product_preview.jinja"
def get_context_data(self, **kwargs):
# By default the template rendering the basket add form
# uses the `request.path` as its' `next` value.
# This is fine if you are on product page but here in
# preview, we cannot redirect back to `/xtheme/product_preview`.
context = super(ProductPreviewView, self).get_context_data(**kwargs)
# Add `return_url` to context to avoid usage of `request.path`
context["return_url"] = "/xtheme/products"
return context
def product_preview(request):
return ProductPreviewView.as_view()(request, pk=request.GET["id"])
|
from shuup.front.views.product import ProductDetailView
class ProductPreviewView(ProductDetailView):
template_name = "shuup/front/product/product_preview.jinja"
def get_context_data(self, **kwargs):
# By default the template rendering the basket add form
# uses the `request.path` as its' `next` value.
# This is fine if you are on product page but here in
# preview, we cannot redirect back to `/xtheme/product_preview`.
context = super(ProductPreviewView, self).get_context_data(**kwargs)
- # Add `return_url` to context to avoid usage of `request.path` in
? ---
+ # Add `return_url` to context to avoid usage of `request.path`
- # `classic_gray/shuup/front/product/_detail_order_section.jinja`
context["return_url"] = "/xtheme/products"
return context
def product_preview(request):
return ProductPreviewView.as_view()(request, pk=request.GET["id"])
|
39a1212508c27a5c21f8b027fef3fb409a28657f
|
app/commands.py
|
app/commands.py
|
from flask import current_app
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) # noqa
def setup_commands(application):
application.cli.command('list-routes')(list_routes)
|
import click
from flask import current_app
from flask.cli import with_appcontext
@click.command('list-routes')
@with_appcontext
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) # noqa
def setup_commands(application):
application.cli.add_command(list_routes)
|
Switch existing command to standard approach
|
Switch existing command to standard approach
This is the suggested approach in the documentation [1] and using
it makes it clearer what's going on and to add other commands with
arguments, which we'll do in the next commit.
[1]: https://flask.palletsprojects.com/en/2.0.x/cli/#custom-commands
|
Python
|
mit
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
|
+ import click
from flask import current_app
+ from flask.cli import with_appcontext
+ @click.command('list-routes')
+ @with_appcontext
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) # noqa
def setup_commands(application):
- application.cli.command('list-routes')(list_routes)
+ application.cli.add_command(list_routes)
|
Switch existing command to standard approach
|
## Code Before:
from flask import current_app
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) # noqa
def setup_commands(application):
application.cli.command('list-routes')(list_routes)
## Instruction:
Switch existing command to standard approach
## Code After:
import click
from flask import current_app
from flask.cli import with_appcontext
@click.command('list-routes')
@with_appcontext
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) # noqa
def setup_commands(application):
application.cli.add_command(list_routes)
|
+ import click
from flask import current_app
+ from flask.cli import with_appcontext
+ @click.command('list-routes')
+ @with_appcontext
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(current_app.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule)) # noqa
def setup_commands(application):
- application.cli.command('list-routes')(list_routes)
? ---------------
+ application.cli.add_command(list_routes)
? ++++
|
97b2e90f4f9a4f3c08f4556856aec1d31b44749a
|
flocker/control/_clusterstate.py
|
flocker/control/_clusterstate.py
|
from twisted.application.service import Service
from ._model import Deployment, Node
class ClusterStateService(Service):
"""
Store known current cluster state, and combine partial updates with
the existing known state.
(Follow up issue will deal with semantics of expiring data, which
should happen so stale information isn't stored. This needs some extra
work for the agent resending state even when it doesn't change, etc..)
"""
def __init__(self):
self._nodes = {}
def update_node_state(self, hostname, node_state):
"""
Update the state of a given node.
:param unicode hostname: The node's identifier.
:param NodeState node_state: The state of the node.
"""
self._nodes[hostname] = node_state
def as_deployment(self):
"""
Return cluster state as a Deployment object.
"""
return Deployment(nodes=frozenset([
Node(hostname=hostname,
applications=frozenset(
node_state.running + node_state.not_running))
for hostname, node_state in self._nodes.items()]))
|
from twisted.application.service import Service
from ._model import Deployment, Node
class ClusterStateService(Service):
"""
Store known current cluster state, and combine partial updates with
the existing known state.
https://clusterhq.atlassian.net/browse/FLOC-1269 will deal with
semantics of expiring data, which should happen so stale information
isn't treated as correct.
"""
def __init__(self):
self._nodes = {}
def update_node_state(self, hostname, node_state):
"""
Update the state of a given node.
:param unicode hostname: The node's identifier.
:param NodeState node_state: The state of the node.
"""
self._nodes[hostname] = node_state
def as_deployment(self):
"""
Return cluster state as a Deployment object.
"""
return Deployment(nodes=frozenset([
Node(hostname=hostname,
applications=frozenset(
node_state.running + node_state.not_running))
for hostname, node_state in self._nodes.items()]))
|
Address review comment: Link to issue.
|
Address review comment: Link to issue.
|
Python
|
apache-2.0
|
achanda/flocker,runcom/flocker,Azulinho/flocker,mbrukman/flocker,jml/flocker,moypray/flocker,AndyHuu/flocker,agonzalezro/flocker,Azulinho/flocker,moypray/flocker,w4ngyi/flocker,moypray/flocker,jml/flocker,LaynePeng/flocker,hackday-profilers/flocker,adamtheturtle/flocker,LaynePeng/flocker,1d4Nf6/flocker,w4ngyi/flocker,adamtheturtle/flocker,AndyHuu/flocker,runcom/flocker,wallnerryan/flocker-profiles,w4ngyi/flocker,mbrukman/flocker,lukemarsden/flocker,jml/flocker,AndyHuu/flocker,achanda/flocker,agonzalezro/flocker,hackday-profilers/flocker,Azulinho/flocker,hackday-profilers/flocker,lukemarsden/flocker,achanda/flocker,agonzalezro/flocker,lukemarsden/flocker,1d4Nf6/flocker,LaynePeng/flocker,runcom/flocker,adamtheturtle/flocker,wallnerryan/flocker-profiles,1d4Nf6/flocker,wallnerryan/flocker-profiles,mbrukman/flocker
|
from twisted.application.service import Service
from ._model import Deployment, Node
class ClusterStateService(Service):
"""
Store known current cluster state, and combine partial updates with
the existing known state.
- (Follow up issue will deal with semantics of expiring data, which
- should happen so stale information isn't stored. This needs some extra
- work for the agent resending state even when it doesn't change, etc..)
+ https://clusterhq.atlassian.net/browse/FLOC-1269 will deal with
+ semantics of expiring data, which should happen so stale information
+ isn't treated as correct.
"""
def __init__(self):
self._nodes = {}
def update_node_state(self, hostname, node_state):
"""
Update the state of a given node.
:param unicode hostname: The node's identifier.
:param NodeState node_state: The state of the node.
"""
self._nodes[hostname] = node_state
def as_deployment(self):
"""
Return cluster state as a Deployment object.
"""
return Deployment(nodes=frozenset([
Node(hostname=hostname,
applications=frozenset(
node_state.running + node_state.not_running))
for hostname, node_state in self._nodes.items()]))
|
Address review comment: Link to issue.
|
## Code Before:
from twisted.application.service import Service
from ._model import Deployment, Node
class ClusterStateService(Service):
"""
Store known current cluster state, and combine partial updates with
the existing known state.
(Follow up issue will deal with semantics of expiring data, which
should happen so stale information isn't stored. This needs some extra
work for the agent resending state even when it doesn't change, etc..)
"""
def __init__(self):
self._nodes = {}
def update_node_state(self, hostname, node_state):
"""
Update the state of a given node.
:param unicode hostname: The node's identifier.
:param NodeState node_state: The state of the node.
"""
self._nodes[hostname] = node_state
def as_deployment(self):
"""
Return cluster state as a Deployment object.
"""
return Deployment(nodes=frozenset([
Node(hostname=hostname,
applications=frozenset(
node_state.running + node_state.not_running))
for hostname, node_state in self._nodes.items()]))
## Instruction:
Address review comment: Link to issue.
## Code After:
from twisted.application.service import Service
from ._model import Deployment, Node
class ClusterStateService(Service):
"""
Store known current cluster state, and combine partial updates with
the existing known state.
https://clusterhq.atlassian.net/browse/FLOC-1269 will deal with
semantics of expiring data, which should happen so stale information
isn't treated as correct.
"""
def __init__(self):
self._nodes = {}
def update_node_state(self, hostname, node_state):
"""
Update the state of a given node.
:param unicode hostname: The node's identifier.
:param NodeState node_state: The state of the node.
"""
self._nodes[hostname] = node_state
def as_deployment(self):
"""
Return cluster state as a Deployment object.
"""
return Deployment(nodes=frozenset([
Node(hostname=hostname,
applications=frozenset(
node_state.running + node_state.not_running))
for hostname, node_state in self._nodes.items()]))
|
from twisted.application.service import Service
from ._model import Deployment, Node
class ClusterStateService(Service):
"""
Store known current cluster state, and combine partial updates with
the existing known state.
- (Follow up issue will deal with semantics of expiring data, which
- should happen so stale information isn't stored. This needs some extra
- work for the agent resending state even when it doesn't change, etc..)
+ https://clusterhq.atlassian.net/browse/FLOC-1269 will deal with
+ semantics of expiring data, which should happen so stale information
+ isn't treated as correct.
"""
def __init__(self):
self._nodes = {}
def update_node_state(self, hostname, node_state):
"""
Update the state of a given node.
:param unicode hostname: The node's identifier.
:param NodeState node_state: The state of the node.
"""
self._nodes[hostname] = node_state
def as_deployment(self):
"""
Return cluster state as a Deployment object.
"""
return Deployment(nodes=frozenset([
Node(hostname=hostname,
applications=frozenset(
node_state.running + node_state.not_running))
for hostname, node_state in self._nodes.items()]))
|
014925aa73e85fe3cb0d939a3d5d9c30424e32b4
|
func.py
|
func.py
|
from pathlib import Path
# Set the Alphabet folder path
folder_path = Path("Alphabet").resolve()
# Read all Capital Letters - AA is Capital A
def letter_reader(letter):
# if it's Capital - AA is Capital A
if 65 <= ord(letter) <= 90:
letter_file = open(str(folder_path) + str("\\") + str(letter) + str(letter) + ".txt", 'r')
letter_txt = letter_file.read()
# if it's small - a is small a
elif 97 <= ord(letter) <= 122:
letter_file = open(str(folder_path) + str("\\") + str(letter) + ".txt", 'r')
letter_txt = letter_file.read()
# if it's symbol
else:
letter_file = open(str(folder_path) + str("\\") + str(letter) + ".txt", 'r')
letter_txt = letter_file.read()
print(letter_txt)
letter_file.close()
|
from pathlib import Path
# Set the Alphabet folder path
folder_path = Path("Alphabet").resolve()
# Read all Capital Letters - AA is Capital A
def letter_reader(letter):
# if it's Capital - AA is Capital A
if 65 <= ord(letter) <= 90:
letter_file = open(str(folder_path) + str("\\") + str(letter) + str(letter) + ".txt", 'r')
letter_txt = letter_file.read()
# if it's small - a is small a
elif 97 <= ord(letter) <= 122:
letter_file = open(str(folder_path) + str("\\") + str(letter) + ".txt", 'r')
letter_txt = letter_file.read()
# if it's symbol or number - NOT SUPPORTED in Ver. 1.0
else:
print("Sorry, Numbers and Symbols are NOT supported yet :)\n"
"I'll Add them in Ver. 2.0")
return
print(letter_txt)
letter_file.close()
|
Add Numbers and Symbols Exception
|
Add Numbers and Symbols Exception
|
Python
|
mit
|
MohamadKh75/Arthon
|
from pathlib import Path
# Set the Alphabet folder path
folder_path = Path("Alphabet").resolve()
# Read all Capital Letters - AA is Capital A
def letter_reader(letter):
# if it's Capital - AA is Capital A
if 65 <= ord(letter) <= 90:
letter_file = open(str(folder_path) + str("\\") + str(letter) + str(letter) + ".txt", 'r')
letter_txt = letter_file.read()
# if it's small - a is small a
elif 97 <= ord(letter) <= 122:
letter_file = open(str(folder_path) + str("\\") + str(letter) + ".txt", 'r')
letter_txt = letter_file.read()
- # if it's symbol
+ # if it's symbol or number - NOT SUPPORTED in Ver. 1.0
else:
- letter_file = open(str(folder_path) + str("\\") + str(letter) + ".txt", 'r')
- letter_txt = letter_file.read()
+ print("Sorry, Numbers and Symbols are NOT supported yet :)\n"
+ "I'll Add them in Ver. 2.0")
+ return
print(letter_txt)
letter_file.close()
|
Add Numbers and Symbols Exception
|
## Code Before:
from pathlib import Path
# Set the Alphabet folder path
folder_path = Path("Alphabet").resolve()
# Read all Capital Letters - AA is Capital A
def letter_reader(letter):
# if it's Capital - AA is Capital A
if 65 <= ord(letter) <= 90:
letter_file = open(str(folder_path) + str("\\") + str(letter) + str(letter) + ".txt", 'r')
letter_txt = letter_file.read()
# if it's small - a is small a
elif 97 <= ord(letter) <= 122:
letter_file = open(str(folder_path) + str("\\") + str(letter) + ".txt", 'r')
letter_txt = letter_file.read()
# if it's symbol
else:
letter_file = open(str(folder_path) + str("\\") + str(letter) + ".txt", 'r')
letter_txt = letter_file.read()
print(letter_txt)
letter_file.close()
## Instruction:
Add Numbers and Symbols Exception
## Code After:
from pathlib import Path
# Set the Alphabet folder path
folder_path = Path("Alphabet").resolve()
# Read all Capital Letters - AA is Capital A
def letter_reader(letter):
# if it's Capital - AA is Capital A
if 65 <= ord(letter) <= 90:
letter_file = open(str(folder_path) + str("\\") + str(letter) + str(letter) + ".txt", 'r')
letter_txt = letter_file.read()
# if it's small - a is small a
elif 97 <= ord(letter) <= 122:
letter_file = open(str(folder_path) + str("\\") + str(letter) + ".txt", 'r')
letter_txt = letter_file.read()
# if it's symbol or number - NOT SUPPORTED in Ver. 1.0
else:
print("Sorry, Numbers and Symbols are NOT supported yet :)\n"
"I'll Add them in Ver. 2.0")
return
print(letter_txt)
letter_file.close()
|
from pathlib import Path
# Set the Alphabet folder path
folder_path = Path("Alphabet").resolve()
# Read all Capital Letters - AA is Capital A
def letter_reader(letter):
# if it's Capital - AA is Capital A
if 65 <= ord(letter) <= 90:
letter_file = open(str(folder_path) + str("\\") + str(letter) + str(letter) + ".txt", 'r')
letter_txt = letter_file.read()
# if it's small - a is small a
elif 97 <= ord(letter) <= 122:
letter_file = open(str(folder_path) + str("\\") + str(letter) + ".txt", 'r')
letter_txt = letter_file.read()
- # if it's symbol
+ # if it's symbol or number - NOT SUPPORTED in Ver. 1.0
else:
- letter_file = open(str(folder_path) + str("\\") + str(letter) + ".txt", 'r')
- letter_txt = letter_file.read()
+ print("Sorry, Numbers and Symbols are NOT supported yet :)\n"
+ "I'll Add them in Ver. 2.0")
+ return
print(letter_txt)
letter_file.close()
|
fdd1604ae64d72dc2391abe137adba07da830bcd
|
imagersite/imager_profile/models.py
|
imagersite/imager_profile/models.py
|
"""Models."""
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class ImagerProfile(models.Model):
"""Imager Profile Model."""
camera_model = models.CharField(max_length=200)
photography_type = models.TextField()
# friends = models.ManyToManyField('self')
region = models.CharField(max_length=200)
user = models.OneToOneField(User, unique=True, null=False)
def is_active(self):
"""Return if the user can log in."""
return self.user.is_active
class ActiveUserManager(models.Manager):
"""Manager to grab active users."""
def get_query_set(self):
"""Return only active users."""
return super(ActiveUserManager, self).get_query_set().filter(user.is_active())
|
"""Models."""
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class ActiveUserManager(models.Manager):
"""Manager to grab active users."""
def get_query_set(self):
"""Return only active users."""
return super(ActiveUserManager, self).get_query_set().filter(user.is_active)
class ImagerProfile(models.Model):
"""Imager Profile Model."""
camera_model = models.CharField(max_length=200)
photography_type = models.TextField()
# friends = models.ManyToManyField('self')
region = models.CharField(max_length=200)
user = models.OneToOneField(User, unique=True, null=False)
# Need to have models.Manager since we overwrote default with ActiveUser
# Without it, we would have lost reference to 'objects'
objects = models.Manager()
active = ActiveUserManager()
@property
def is_active(self):
"""Return all instances of active ImagerProfile."""
return self.user.is_active
# We control the profile, don't have code for user
# If profile is deleted, user is deleted. We want the opposite.
# How do we do that?
# Idea of Signals (pyramid also has)
# Signals hook into the listener pattern (like event listeners)
# Imager profile exists, and gets removed (handelers.py)
# first arg(sender(class that sent signal), **kwargs)
# Must ensure errors aren't raised. Log problem, do nothing.
# If errors are raised, it will prevent other things from happening
# Must put signal code into a place where Django can execute it.
# in apps.py def ready(self): from imager_profile import handlers (will register handlers)
# In init.py add default_app_config = 'imager_rofile.apps.ImagerProfileConfig'
# now Django knows about handlers
|
Add ability to access all 'objects' and only 'active' users
|
Add ability to access all 'objects' and only 'active' users
|
Python
|
mit
|
DZwell/django-imager
|
"""Models."""
+ from __future__ import unicode_literals
+
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
+
+
+ class ActiveUserManager(models.Manager):
+ """Manager to grab active users."""
+
+ def get_query_set(self):
+ """Return only active users."""
+ return super(ActiveUserManager, self).get_query_set().filter(user.is_active)
class ImagerProfile(models.Model):
"""Imager Profile Model."""
camera_model = models.CharField(max_length=200)
photography_type = models.TextField()
# friends = models.ManyToManyField('self')
region = models.CharField(max_length=200)
user = models.OneToOneField(User, unique=True, null=False)
+ # Need to have models.Manager since we overwrote default with ActiveUser
+ # Without it, we would have lost reference to 'objects'
+ objects = models.Manager()
+ active = ActiveUserManager()
+
+ @property
def is_active(self):
- """Return if the user can log in."""
+ """Return all instances of active ImagerProfile."""
return self.user.is_active
-
-
- class ActiveUserManager(models.Manager):
- """Manager to grab active users."""
-
- def get_query_set(self):
- """Return only active users."""
- return super(ActiveUserManager, self).get_query_set().filter(user.is_active())
+
+ # We control the profile, don't have code for user
+ # If profile is deleted, user is deleted. We want the opposite.
+ # How do we do that?
+ # Idea of Signals (pyramid also has)
+ # Signals hook into the listener pattern (like event listeners)
+ # Imager profile exists, and gets removed (handelers.py)
+ # first arg(sender(class that sent signal), **kwargs)
+ # Must ensure errors aren't raised. Log problem, do nothing.
+ # If errors are raised, it will prevent other things from happening
+ # Must put signal code into a place where Django can execute it.
+ # in apps.py def ready(self): from imager_profile import handlers (will register handlers)
+ # In init.py add default_app_config = 'imager_rofile.apps.ImagerProfileConfig'
+ # now Django knows about handlers
+
|
Add ability to access all 'objects' and only 'active' users
|
## Code Before:
"""Models."""
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class ImagerProfile(models.Model):
"""Imager Profile Model."""
camera_model = models.CharField(max_length=200)
photography_type = models.TextField()
# friends = models.ManyToManyField('self')
region = models.CharField(max_length=200)
user = models.OneToOneField(User, unique=True, null=False)
def is_active(self):
"""Return if the user can log in."""
return self.user.is_active
class ActiveUserManager(models.Manager):
"""Manager to grab active users."""
def get_query_set(self):
"""Return only active users."""
return super(ActiveUserManager, self).get_query_set().filter(user.is_active())
## Instruction:
Add ability to access all 'objects' and only 'active' users
## Code After:
"""Models."""
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class ActiveUserManager(models.Manager):
"""Manager to grab active users."""
def get_query_set(self):
"""Return only active users."""
return super(ActiveUserManager, self).get_query_set().filter(user.is_active)
class ImagerProfile(models.Model):
"""Imager Profile Model."""
camera_model = models.CharField(max_length=200)
photography_type = models.TextField()
# friends = models.ManyToManyField('self')
region = models.CharField(max_length=200)
user = models.OneToOneField(User, unique=True, null=False)
# Need to have models.Manager since we overwrote default with ActiveUser
# Without it, we would have lost reference to 'objects'
objects = models.Manager()
active = ActiveUserManager()
@property
def is_active(self):
"""Return all instances of active ImagerProfile."""
return self.user.is_active
# We control the profile, don't have code for user
# If profile is deleted, user is deleted. We want the opposite.
# How do we do that?
# Idea of Signals (pyramid also has)
# Signals hook into the listener pattern (like event listeners)
# Imager profile exists, and gets removed (handelers.py)
# first arg(sender(class that sent signal), **kwargs)
# Must ensure errors aren't raised. Log problem, do nothing.
# If errors are raised, it will prevent other things from happening
# Must put signal code into a place where Django can execute it.
# in apps.py def ready(self): from imager_profile import handlers (will register handlers)
# In init.py add default_app_config = 'imager_rofile.apps.ImagerProfileConfig'
# now Django knows about handlers
|
"""Models."""
+ from __future__ import unicode_literals
+
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
+
+
+ class ActiveUserManager(models.Manager):
+ """Manager to grab active users."""
+
+ def get_query_set(self):
+ """Return only active users."""
+ return super(ActiveUserManager, self).get_query_set().filter(user.is_active)
class ImagerProfile(models.Model):
"""Imager Profile Model."""
camera_model = models.CharField(max_length=200)
photography_type = models.TextField()
# friends = models.ManyToManyField('self')
region = models.CharField(max_length=200)
user = models.OneToOneField(User, unique=True, null=False)
+ # Need to have models.Manager since we overwrote default with ActiveUser
+ # Without it, we would have lost reference to 'objects'
+ objects = models.Manager()
+ active = ActiveUserManager()
+
+ @property
def is_active(self):
- """Return if the user can log in."""
+ """Return all instances of active ImagerProfile."""
return self.user.is_active
-
-
- class ActiveUserManager(models.Manager):
- """Manager to grab active users."""
-
- def get_query_set(self):
- """Return only active users."""
- return super(ActiveUserManager, self).get_query_set().filter(user.is_active())
+
+
+ # We control the profile, don't have code for user
+ # If profile is deleted, user is deleted. We want the opposite.
+ # How do we do that?
+ # Idea of Signals (pyramid also has)
+ # Signals hook into the listener pattern (like event listeners)
+ # Imager profile exists, and gets removed (handelers.py)
+ # first arg(sender(class that sent signal), **kwargs)
+ # Must ensure errors aren't raised. Log problem, do nothing.
+ # If errors are raised, it will prevent other things from happening
+ # Must put signal code into a place where Django can execute it.
+ # in apps.py def ready(self): from imager_profile import handlers (will register handlers)
+ # In init.py add default_app_config = 'imager_rofile.apps.ImagerProfileConfig'
+ # now Django knows about handlers
|
8fafef4c2151d17133c5787847d68ab4b58f40c3
|
stagecraft/libs/views/utils.py
|
stagecraft/libs/views/utils.py
|
import json
from django.utils.cache import patch_response_headers
from functools import wraps
def long_cache(a_view):
@wraps(a_view)
def _wrapped_view(request, *args, **kwargs):
response = a_view(request, *args, **kwargs)
patch_response_headers(response, 86400 * 365)
return response
return _wrapped_view
def to_json(what):
return json.dumps(what, indent=1)
|
import json
from django.utils.cache import patch_response_headers
from functools import wraps
from uuid import UUID
class JsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, UUID):
return '{}'.format(obj)
if hasattr(obj, 'serialize'):
return obj.serialize()
return json.JSONEncoder.default(self, obj)
def long_cache(a_view):
@wraps(a_view)
def _wrapped_view(request, *args, **kwargs):
response = a_view(request, *args, **kwargs)
patch_response_headers(response, 86400 * 365)
return response
return _wrapped_view
def to_json(what):
return json.dumps(what, indent=1, cls=JsonEncoder)
|
Extend JSON serialiser to use serialize() method
|
Extend JSON serialiser to use serialize() method
If an object is a UUID, return a string representation of it.
If the object still can't be serialised, call its serialize() method.
This is useful when nesting Link models inside dashboards, for
example.
|
Python
|
mit
|
alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft
|
import json
from django.utils.cache import patch_response_headers
from functools import wraps
+ from uuid import UUID
+
+
+ class JsonEncoder(json.JSONEncoder):
+ def default(self, obj):
+ if isinstance(obj, UUID):
+ return '{}'.format(obj)
+
+ if hasattr(obj, 'serialize'):
+ return obj.serialize()
+
+ return json.JSONEncoder.default(self, obj)
def long_cache(a_view):
@wraps(a_view)
def _wrapped_view(request, *args, **kwargs):
response = a_view(request, *args, **kwargs)
patch_response_headers(response, 86400 * 365)
return response
return _wrapped_view
def to_json(what):
- return json.dumps(what, indent=1)
+ return json.dumps(what, indent=1, cls=JsonEncoder)
|
Extend JSON serialiser to use serialize() method
|
## Code Before:
import json
from django.utils.cache import patch_response_headers
from functools import wraps
def long_cache(a_view):
@wraps(a_view)
def _wrapped_view(request, *args, **kwargs):
response = a_view(request, *args, **kwargs)
patch_response_headers(response, 86400 * 365)
return response
return _wrapped_view
def to_json(what):
return json.dumps(what, indent=1)
## Instruction:
Extend JSON serialiser to use serialize() method
## Code After:
import json
from django.utils.cache import patch_response_headers
from functools import wraps
from uuid import UUID
class JsonEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, UUID):
return '{}'.format(obj)
if hasattr(obj, 'serialize'):
return obj.serialize()
return json.JSONEncoder.default(self, obj)
def long_cache(a_view):
@wraps(a_view)
def _wrapped_view(request, *args, **kwargs):
response = a_view(request, *args, **kwargs)
patch_response_headers(response, 86400 * 365)
return response
return _wrapped_view
def to_json(what):
return json.dumps(what, indent=1, cls=JsonEncoder)
|
import json
from django.utils.cache import patch_response_headers
from functools import wraps
+ from uuid import UUID
+
+
+ class JsonEncoder(json.JSONEncoder):
+ def default(self, obj):
+ if isinstance(obj, UUID):
+ return '{}'.format(obj)
+
+ if hasattr(obj, 'serialize'):
+ return obj.serialize()
+
+ return json.JSONEncoder.default(self, obj)
def long_cache(a_view):
@wraps(a_view)
def _wrapped_view(request, *args, **kwargs):
response = a_view(request, *args, **kwargs)
patch_response_headers(response, 86400 * 365)
return response
return _wrapped_view
def to_json(what):
- return json.dumps(what, indent=1)
+ return json.dumps(what, indent=1, cls=JsonEncoder)
? +++++++++++++++++
|
de9e8ab1a91e2a0e69971f9c23377f97e717b048
|
app/__init__.py
|
app/__init__.py
|
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
|
import os
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
application = bundle_app({
'CLI_OR_DEPLOY': True,
'GUNICORN': 'gunicorn' in os.environ.get('SERVER_SOFTWARE', '')}) # noqa
|
Add additional application for gunicorn.
|
Add additional application for gunicorn.
|
Python
|
mpl-2.0
|
mrf345/FQM,mrf345/FQM,mrf345/FQM,mrf345/FQM
|
+ import os
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
+ application = bundle_app({
+ 'CLI_OR_DEPLOY': True,
+ 'GUNICORN': 'gunicorn' in os.environ.get('SERVER_SOFTWARE', '')}) # noqa
+
|
Add additional application for gunicorn.
|
## Code Before:
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
## Instruction:
Add additional application for gunicorn.
## Code After:
import os
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
application = bundle_app({
'CLI_OR_DEPLOY': True,
'GUNICORN': 'gunicorn' in os.environ.get('SERVER_SOFTWARE', '')}) # noqa
|
+ import os
from app.main import bundle_app # noqa
# NOTE: uncomment out while genrating migration
# app = bundle_app({'MIGRATION': True})
+
+ application = bundle_app({
+ 'CLI_OR_DEPLOY': True,
+ 'GUNICORN': 'gunicorn' in os.environ.get('SERVER_SOFTWARE', '')}) # noqa
|
beb8f12e4a8290d4107cdb91a321a6618a038ef9
|
rose_trellis/util.py
|
rose_trellis/util.py
|
from urllib.parse import urljoin
import time
import asyncio
from typing import Any
from typing import Callable
TRELLO_URL_BASE = 'https://api.trello.com/1/'
def join_url(part: str) -> str:
"""
Adds `part` to API base url. Always returns url without trailing slash.
:param part:
:return: url
"""
part = part.strip('/')
newpath = urljoin(TRELLO_URL_BASE, part)
while newpath.endswith('/'):
newpath = newpath[:-1]
return newpath
def easy_run(func: Callable[Any], *args, **kwargs) -> Any:
el = asyncio.get_event_loop()
return el.run_until_complete(func(*args, **kwargs))
|
from urllib.parse import urljoin
import time
import asyncio
from typing import Any
from typing import Callable
TRELLO_URL_BASE = 'https://api.trello.com/1/'
def join_url(part: str) -> str:
"""
Adds `part` to API base url. Always returns url without trailing slash.
:param part:
:return: url
"""
part = part.strip('/')
newpath = urljoin(TRELLO_URL_BASE, part)
while newpath.endswith('/'):
newpath = newpath[:-1]
return newpath
def easy_run(gen) -> Any:
el = asyncio.get_event_loop()
return el.run_until_complete(gen)
|
Make easy_run better by taking the generator from coroutines
|
Make easy_run better by taking the generator from coroutines
|
Python
|
mit
|
dmwyatt/rose_trellis
|
from urllib.parse import urljoin
import time
import asyncio
from typing import Any
from typing import Callable
TRELLO_URL_BASE = 'https://api.trello.com/1/'
def join_url(part: str) -> str:
"""
Adds `part` to API base url. Always returns url without trailing slash.
:param part:
:return: url
"""
part = part.strip('/')
newpath = urljoin(TRELLO_URL_BASE, part)
while newpath.endswith('/'):
newpath = newpath[:-1]
return newpath
- def easy_run(func: Callable[Any], *args, **kwargs) -> Any:
+ def easy_run(gen) -> Any:
el = asyncio.get_event_loop()
- return el.run_until_complete(func(*args, **kwargs))
+ return el.run_until_complete(gen)
|
Make easy_run better by taking the generator from coroutines
|
## Code Before:
from urllib.parse import urljoin
import time
import asyncio
from typing import Any
from typing import Callable
TRELLO_URL_BASE = 'https://api.trello.com/1/'
def join_url(part: str) -> str:
"""
Adds `part` to API base url. Always returns url without trailing slash.
:param part:
:return: url
"""
part = part.strip('/')
newpath = urljoin(TRELLO_URL_BASE, part)
while newpath.endswith('/'):
newpath = newpath[:-1]
return newpath
def easy_run(func: Callable[Any], *args, **kwargs) -> Any:
el = asyncio.get_event_loop()
return el.run_until_complete(func(*args, **kwargs))
## Instruction:
Make easy_run better by taking the generator from coroutines
## Code After:
from urllib.parse import urljoin
import time
import asyncio
from typing import Any
from typing import Callable
TRELLO_URL_BASE = 'https://api.trello.com/1/'
def join_url(part: str) -> str:
"""
Adds `part` to API base url. Always returns url without trailing slash.
:param part:
:return: url
"""
part = part.strip('/')
newpath = urljoin(TRELLO_URL_BASE, part)
while newpath.endswith('/'):
newpath = newpath[:-1]
return newpath
def easy_run(gen) -> Any:
el = asyncio.get_event_loop()
return el.run_until_complete(gen)
|
from urllib.parse import urljoin
import time
import asyncio
from typing import Any
from typing import Callable
TRELLO_URL_BASE = 'https://api.trello.com/1/'
def join_url(part: str) -> str:
"""
Adds `part` to API base url. Always returns url without trailing slash.
:param part:
:return: url
"""
part = part.strip('/')
newpath = urljoin(TRELLO_URL_BASE, part)
while newpath.endswith('/'):
newpath = newpath[:-1]
return newpath
- def easy_run(func: Callable[Any], *args, **kwargs) -> Any:
+ def easy_run(gen) -> Any:
el = asyncio.get_event_loop()
- return el.run_until_complete(func(*args, **kwargs))
+ return el.run_until_complete(gen)
|
edcde8ed3562e19b7bde43632965c2902a8e7f25
|
troposphere/sns.py
|
troposphere/sns.py
|
from . import AWSObject, AWSProperty
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
|
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
Add Tags to SNS::Topic per 2019-11-31 changes
|
Add Tags to SNS::Topic per 2019-11-31 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere,ikben/troposphere,ikben/troposphere
|
- from . import AWSObject, AWSProperty
+ from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
+ 'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
Add Tags to SNS::Topic per 2019-11-31 changes
|
## Code Before:
from . import AWSObject, AWSProperty
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'TopicName': (basestring, False),
}
## Instruction:
Add Tags to SNS::Topic per 2019-11-31 changes
## Code After:
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
- from . import AWSObject, AWSProperty
+ from . import AWSObject, AWSProperty, Tags
? ++++++
from .compat import policytypes
from .validators import boolean
class Subscription(AWSProperty):
props = {
'Endpoint': (basestring, True),
'Protocol': (basestring, True),
}
class SubscriptionResource(AWSObject):
resource_type = "AWS::SNS::Subscription"
props = {
'DeliveryPolicy': (dict, False),
'Endpoint': (basestring, False),
'FilterPolicy': (dict, False),
'Protocol': (basestring, True),
'RawMessageDelivery': (boolean, False),
'Region': (basestring, False),
'TopicArn': (basestring, True),
}
class TopicPolicy(AWSObject):
resource_type = "AWS::SNS::TopicPolicy"
props = {
'PolicyDocument': (policytypes, True),
'Topics': (list, True),
}
class Topic(AWSObject):
resource_type = "AWS::SNS::Topic"
props = {
'DisplayName': (basestring, False),
'KmsMasterKeyId': (basestring, False),
'Subscription': ([Subscription], False),
+ 'Tags': (Tags, False),
'TopicName': (basestring, False),
}
|
e64b0544b146cb810424e0e243835a34aa977f40
|
boxoffice/__init__.py
|
boxoffice/__init__.py
|
from pytz import timezone
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.lastuser import Lastuser
from flask.ext.lastuser.sqlalchemy import UserManager
from baseframe import baseframe, assets, Version
from ._version import __version__
import coaster.app
import wtforms_json
app = Flask(__name__, instance_relative_config=True)
lastuser = Lastuser()
mail = Mail()
# --- Assets ------------------------------------------------------------------
version = Version(__version__)
assets['boxoffice.js'][version] = 'js/scripts.js'
assets['boxoffice.css'][version] = 'css/order.css'
from boxoffice.models import db, User # noqa
from . import extapi, views # noqa
# Configure the app
def init_for(env):
coaster.app.init_app(app, env)
db.init_app(app)
db.app = app
lastuser.init_app(app)
lastuser.init_usermanager(UserManager(db, User))
app.config['tz'] = timezone(app.config['TIMEZONE'])
baseframe.init_app(app, requires=['boxoffice'], ext_requires=['baseframe-bs3', 'fontawesome>=4.0.0', 'ractive'])
mail.init_app(app)
wtforms_json.init()
|
from pytz import timezone
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.lastuser import Lastuser
from flask.ext.lastuser.sqlalchemy import UserManager
from baseframe import baseframe, assets, Version
from ._version import __version__
import coaster.app
import wtforms_json
app = Flask(__name__, instance_relative_config=True)
lastuser = Lastuser()
mail = Mail()
# --- Assets ------------------------------------------------------------------
version = Version(__version__)
assets['boxoffice.js'][version] = 'js/scripts.js'
assets['boxoffice.css'][version] = 'css/order.css'
from boxoffice.models import db, User # noqa
from . import extapi, views # noqa
# Configure the app
def init_for(env):
coaster.app.init_app(app, env)
db.init_app(app)
db.app = app
lastuser.init_app(app)
lastuser.init_usermanager(UserManager(db, User))
app.config['tz'] = timezone(app.config['TIMEZONE'])
baseframe.init_app(app, requires=['boxoffice'], ext_requires=['baseframe-bs3', 'fontawesome>=4.0.0', 'ractive', 'ractive-transitions-fly', 'validate'])
mail.init_app(app)
wtforms_json.init()
|
Add assests ractive-transitions-fly and validate
|
Add assests ractive-transitions-fly and validate
|
Python
|
agpl-3.0
|
hasgeek/boxoffice,hasgeek/boxoffice,hasgeek/boxoffice,hasgeek/boxoffice
|
from pytz import timezone
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.lastuser import Lastuser
from flask.ext.lastuser.sqlalchemy import UserManager
from baseframe import baseframe, assets, Version
from ._version import __version__
import coaster.app
import wtforms_json
app = Flask(__name__, instance_relative_config=True)
lastuser = Lastuser()
mail = Mail()
# --- Assets ------------------------------------------------------------------
version = Version(__version__)
assets['boxoffice.js'][version] = 'js/scripts.js'
assets['boxoffice.css'][version] = 'css/order.css'
from boxoffice.models import db, User # noqa
from . import extapi, views # noqa
# Configure the app
def init_for(env):
coaster.app.init_app(app, env)
db.init_app(app)
db.app = app
lastuser.init_app(app)
lastuser.init_usermanager(UserManager(db, User))
app.config['tz'] = timezone(app.config['TIMEZONE'])
- baseframe.init_app(app, requires=['boxoffice'], ext_requires=['baseframe-bs3', 'fontawesome>=4.0.0', 'ractive'])
+ baseframe.init_app(app, requires=['boxoffice'], ext_requires=['baseframe-bs3', 'fontawesome>=4.0.0', 'ractive', 'ractive-transitions-fly', 'validate'])
mail.init_app(app)
wtforms_json.init()
|
Add assests ractive-transitions-fly and validate
|
## Code Before:
from pytz import timezone
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.lastuser import Lastuser
from flask.ext.lastuser.sqlalchemy import UserManager
from baseframe import baseframe, assets, Version
from ._version import __version__
import coaster.app
import wtforms_json
app = Flask(__name__, instance_relative_config=True)
lastuser = Lastuser()
mail = Mail()
# --- Assets ------------------------------------------------------------------
version = Version(__version__)
assets['boxoffice.js'][version] = 'js/scripts.js'
assets['boxoffice.css'][version] = 'css/order.css'
from boxoffice.models import db, User # noqa
from . import extapi, views # noqa
# Configure the app
def init_for(env):
coaster.app.init_app(app, env)
db.init_app(app)
db.app = app
lastuser.init_app(app)
lastuser.init_usermanager(UserManager(db, User))
app.config['tz'] = timezone(app.config['TIMEZONE'])
baseframe.init_app(app, requires=['boxoffice'], ext_requires=['baseframe-bs3', 'fontawesome>=4.0.0', 'ractive'])
mail.init_app(app)
wtforms_json.init()
## Instruction:
Add assests ractive-transitions-fly and validate
## Code After:
from pytz import timezone
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.lastuser import Lastuser
from flask.ext.lastuser.sqlalchemy import UserManager
from baseframe import baseframe, assets, Version
from ._version import __version__
import coaster.app
import wtforms_json
app = Flask(__name__, instance_relative_config=True)
lastuser = Lastuser()
mail = Mail()
# --- Assets ------------------------------------------------------------------
version = Version(__version__)
assets['boxoffice.js'][version] = 'js/scripts.js'
assets['boxoffice.css'][version] = 'css/order.css'
from boxoffice.models import db, User # noqa
from . import extapi, views # noqa
# Configure the app
def init_for(env):
coaster.app.init_app(app, env)
db.init_app(app)
db.app = app
lastuser.init_app(app)
lastuser.init_usermanager(UserManager(db, User))
app.config['tz'] = timezone(app.config['TIMEZONE'])
baseframe.init_app(app, requires=['boxoffice'], ext_requires=['baseframe-bs3', 'fontawesome>=4.0.0', 'ractive', 'ractive-transitions-fly', 'validate'])
mail.init_app(app)
wtforms_json.init()
|
from pytz import timezone
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.lastuser import Lastuser
from flask.ext.lastuser.sqlalchemy import UserManager
from baseframe import baseframe, assets, Version
from ._version import __version__
import coaster.app
import wtforms_json
app = Flask(__name__, instance_relative_config=True)
lastuser = Lastuser()
mail = Mail()
# --- Assets ------------------------------------------------------------------
version = Version(__version__)
assets['boxoffice.js'][version] = 'js/scripts.js'
assets['boxoffice.css'][version] = 'css/order.css'
from boxoffice.models import db, User # noqa
from . import extapi, views # noqa
# Configure the app
def init_for(env):
coaster.app.init_app(app, env)
db.init_app(app)
db.app = app
lastuser.init_app(app)
lastuser.init_usermanager(UserManager(db, User))
app.config['tz'] = timezone(app.config['TIMEZONE'])
- baseframe.init_app(app, requires=['boxoffice'], ext_requires=['baseframe-bs3', 'fontawesome>=4.0.0', 'ractive'])
+ baseframe.init_app(app, requires=['boxoffice'], ext_requires=['baseframe-bs3', 'fontawesome>=4.0.0', 'ractive', 'ractive-transitions-fly', 'validate'])
? +++++++++++++++++++++++++++++++++++++++
mail.init_app(app)
wtforms_json.init()
|
04b7e79ce3fed1afac129098badb632ca226fdee
|
dispatch.py
|
dispatch.py
|
import config
import steam
steam.set_api_key(config.api_key)
from optf2.backend import openid
from optf2.frontend import render
openid.set_session(render.session)
import web
if config.enable_fastcgi:
web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr)
if __name__ == "__main__":
render.application.run()
|
import config
import steam
steam.set_api_key(config.api_key)
from optf2.backend import openid
from optf2.frontend import render
openid.set_session(render.session)
import web
# wsgi
application = render.application.wsgifunc()
if config.enable_fastcgi:
web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr)
if __name__ == "__main__":
render.application.run()
|
Add wsgi handler by default
|
Add wsgi handler by default
|
Python
|
isc
|
Lagg/optf2,FlaminSarge/optf2,Lagg/optf2,FlaminSarge/optf2,Lagg/optf2,FlaminSarge/optf2
|
import config
import steam
steam.set_api_key(config.api_key)
from optf2.backend import openid
from optf2.frontend import render
openid.set_session(render.session)
import web
+ # wsgi
+ application = render.application.wsgifunc()
+
if config.enable_fastcgi:
web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr)
if __name__ == "__main__":
render.application.run()
|
Add wsgi handler by default
|
## Code Before:
import config
import steam
steam.set_api_key(config.api_key)
from optf2.backend import openid
from optf2.frontend import render
openid.set_session(render.session)
import web
if config.enable_fastcgi:
web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr)
if __name__ == "__main__":
render.application.run()
## Instruction:
Add wsgi handler by default
## Code After:
import config
import steam
steam.set_api_key(config.api_key)
from optf2.backend import openid
from optf2.frontend import render
openid.set_session(render.session)
import web
# wsgi
application = render.application.wsgifunc()
if config.enable_fastcgi:
web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr)
if __name__ == "__main__":
render.application.run()
|
import config
import steam
steam.set_api_key(config.api_key)
from optf2.backend import openid
from optf2.frontend import render
openid.set_session(render.session)
import web
+ # wsgi
+ application = render.application.wsgifunc()
+
if config.enable_fastcgi:
web.wsgi.runwsgi = lambda func, addr=None: web.wsgi.runfcgi(func, addr)
if __name__ == "__main__":
render.application.run()
|
b9ccbb2addd8dcaeb100bb5e95768caa2a97c280
|
srttools/core/__init__.py
|
srttools/core/__init__.py
|
import warnings
try:
import matplotlib
# matplotlib.use('TkAgg')
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
|
import warnings
DEFAULT_MPL_BACKEND = 'TkAgg'
try:
import matplotlib
# This is necessary. Random backends might respond incorrectly.
matplotlib.use(DEFAULT_MPL_BACKEND)
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
version = [int(i) for i in sm.version.version.split('.')]
# Minimum version 0.8.0
if version < (0, 8, 0):
warnings.warn("Please update statsmodels")
raise ImportError
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
|
Set default backend, and minimum statsmodels version
|
Set default backend, and minimum statsmodels version
|
Python
|
bsd-3-clause
|
matteobachetti/srt-single-dish-tools
|
import warnings
+ DEFAULT_MPL_BACKEND = 'TkAgg'
try:
import matplotlib
- # matplotlib.use('TkAgg')
+ # This is necessary. Random backends might respond incorrectly.
+ matplotlib.use(DEFAULT_MPL_BACKEND)
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
+ version = [int(i) for i in sm.version.version.split('.')]
+
+ # Minimum version 0.8.0
+ if version < (0, 8, 0):
+ warnings.warn("Please update statsmodels")
+ raise ImportError
+
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
|
Set default backend, and minimum statsmodels version
|
## Code Before:
import warnings
try:
import matplotlib
# matplotlib.use('TkAgg')
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
## Instruction:
Set default backend, and minimum statsmodels version
## Code After:
import warnings
DEFAULT_MPL_BACKEND = 'TkAgg'
try:
import matplotlib
# This is necessary. Random backends might respond incorrectly.
matplotlib.use(DEFAULT_MPL_BACKEND)
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
version = [int(i) for i in sm.version.version.split('.')]
# Minimum version 0.8.0
if version < (0, 8, 0):
warnings.warn("Please update statsmodels")
raise ImportError
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
|
import warnings
+ DEFAULT_MPL_BACKEND = 'TkAgg'
try:
import matplotlib
- # matplotlib.use('TkAgg')
+ # This is necessary. Random backends might respond incorrectly.
+ matplotlib.use(DEFAULT_MPL_BACKEND)
HAS_MPL = True
except ImportError:
HAS_MPL = False
try:
import statsmodels.api as sm
+ version = [int(i) for i in sm.version.version.split('.')]
+
+ # Minimum version 0.8.0
+ if version < (0, 8, 0):
+ warnings.warn("Please update statsmodels")
+ raise ImportError
+
HAS_STATSM = True
except ImportError:
HAS_STATSM = False
try:
from numba import jit, vectorize
except ImportError:
warnings.warn("Numba not installed. Faking it")
def jit(fun):
return fun
def vectorize(*args, **kwargs):
return jit
|
11d3075ba9d1881526ce90d01ae3b3d5728740fa
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='dg',
version='HEAD',
description='A programming language for the CPython VM',
author='pyos',
author_email='[email protected]',
url='https://github.com/pyos/dg.git',
packages=['dg'],
package_dir={'dg': '.'},
package_data={'dg': ['*.dg', 'core/*', 'bundles/*']}
)
|
from distutils.core import setup
setup(
name='dg',
version='HEAD',
description='A programming language for the CPython VM',
author='pyos',
author_email='[email protected]',
url='https://github.com/pyos/dg.git',
packages=['dg'],
package_dir={'dg': '.'},
package_data={'dg': ['*.dg', 'addon/*', 'core/*', 'bundles/*']}
)
|
Install stuff from /addon, too.
|
Install stuff from /addon, too.
|
Python
|
mit
|
pyos/dg
|
from distutils.core import setup
setup(
name='dg',
version='HEAD',
description='A programming language for the CPython VM',
author='pyos',
author_email='[email protected]',
url='https://github.com/pyos/dg.git',
packages=['dg'],
package_dir={'dg': '.'},
- package_data={'dg': ['*.dg', 'core/*', 'bundles/*']}
+ package_data={'dg': ['*.dg', 'addon/*', 'core/*', 'bundles/*']}
)
|
Install stuff from /addon, too.
|
## Code Before:
from distutils.core import setup
setup(
name='dg',
version='HEAD',
description='A programming language for the CPython VM',
author='pyos',
author_email='[email protected]',
url='https://github.com/pyos/dg.git',
packages=['dg'],
package_dir={'dg': '.'},
package_data={'dg': ['*.dg', 'core/*', 'bundles/*']}
)
## Instruction:
Install stuff from /addon, too.
## Code After:
from distutils.core import setup
setup(
name='dg',
version='HEAD',
description='A programming language for the CPython VM',
author='pyos',
author_email='[email protected]',
url='https://github.com/pyos/dg.git',
packages=['dg'],
package_dir={'dg': '.'},
package_data={'dg': ['*.dg', 'addon/*', 'core/*', 'bundles/*']}
)
|
from distutils.core import setup
setup(
name='dg',
version='HEAD',
description='A programming language for the CPython VM',
author='pyos',
author_email='[email protected]',
url='https://github.com/pyos/dg.git',
packages=['dg'],
package_dir={'dg': '.'},
- package_data={'dg': ['*.dg', 'core/*', 'bundles/*']}
+ package_data={'dg': ['*.dg', 'addon/*', 'core/*', 'bundles/*']}
? +++++++++++
)
|
55d10f77f963eb0cdbe29e04fe910f65c4edaec4
|
erpnext/buying/doctype/supplier/supplier_dashboard.py
|
erpnext/buying/doctype/supplier/supplier_dashboard.py
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
'Payment Entry': 'party_name'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
'Payment Entry': 'party_name',
'Bank Account': 'party'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
'label': _('Bank'),
'items': ['Bank Account']
},
{
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
|
Add linked bank accounts to supplier dashboard
|
fix: Add linked bank accounts to supplier dashboard
|
Python
|
agpl-3.0
|
gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
- 'Payment Entry': 'party_name'
+ 'Payment Entry': 'party_name',
+ 'Bank Account': 'party'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
+ 'label': _('Bank'),
+ 'items': ['Bank Account']
+ },
+ {
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
-
|
Add linked bank accounts to supplier dashboard
|
## Code Before:
from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
'Payment Entry': 'party_name'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
## Instruction:
Add linked bank accounts to supplier dashboard
## Code After:
from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
'Payment Entry': 'party_name',
'Bank Account': 'party'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
'label': _('Bank'),
'items': ['Bank Account']
},
{
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
|
from __future__ import unicode_literals
from frappe import _
def get_data():
return {
'heatmap': True,
'heatmap_message': _('This is based on transactions against this Supplier. See timeline below for details'),
'fieldname': 'supplier',
'non_standard_fieldnames': {
- 'Payment Entry': 'party_name'
+ 'Payment Entry': 'party_name',
? +
+ 'Bank Account': 'party'
},
'transactions': [
{
'label': _('Procurement'),
'items': ['Request for Quotation', 'Supplier Quotation']
},
{
'label': _('Orders'),
'items': ['Purchase Order', 'Purchase Receipt', 'Purchase Invoice']
},
{
'label': _('Payments'),
'items': ['Payment Entry']
},
{
+ 'label': _('Bank'),
+ 'items': ['Bank Account']
+ },
+ {
'label': _('Pricing'),
'items': ['Pricing Rule']
}
]
}
|
e3f8fa13758ebed06abc1369d8c85474f7346d29
|
api/nodes/urls.py
|
api/nodes/urls.py
|
from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
|
from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
url(r'^bulk_delete/(?P<confirmation_token>\w+)/$', views.NodeBulkDelete.as_view(), name='node-bulk-delete'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
|
Add second delete url where users will send request to confirm they want to bulk delete.
|
Add second delete url where users will send request to confirm they want to bulk delete.
|
Python
|
apache-2.0
|
GageGaskins/osf.io,adlius/osf.io,brandonPurvis/osf.io,cwisecarver/osf.io,chrisseto/osf.io,abought/osf.io,GageGaskins/osf.io,binoculars/osf.io,RomanZWang/osf.io,danielneis/osf.io,Nesiehr/osf.io,KAsante95/osf.io,baylee-d/osf.io,billyhunt/osf.io,adlius/osf.io,HalcyonChimera/osf.io,wearpants/osf.io,erinspace/osf.io,crcresearch/osf.io,SSJohns/osf.io,erinspace/osf.io,cwisecarver/osf.io,hmoco/osf.io,crcresearch/osf.io,binoculars/osf.io,leb2dg/osf.io,mluke93/osf.io,jnayak1/osf.io,chrisseto/osf.io,caneruguz/osf.io,mluke93/osf.io,samanehsan/osf.io,cslzchen/osf.io,chrisseto/osf.io,alexschiller/osf.io,abought/osf.io,caseyrygt/osf.io,icereval/osf.io,mluo613/osf.io,caseyrygt/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,icereval/osf.io,laurenrevere/osf.io,felliott/osf.io,acshi/osf.io,kwierman/osf.io,RomanZWang/osf.io,abought/osf.io,acshi/osf.io,TomHeatwole/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,zamattiac/osf.io,rdhyee/osf.io,crcresearch/osf.io,SSJohns/osf.io,emetsger/osf.io,asanfilippo7/osf.io,billyhunt/osf.io,emetsger/osf.io,mluo613/osf.io,ZobairAlijan/osf.io,hmoco/osf.io,chrisseto/osf.io,amyshi188/osf.io,kch8qx/osf.io,emetsger/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,mluke93/osf.io,DanielSBrown/osf.io,emetsger/osf.io,caseyrollins/osf.io,cwisecarver/osf.io,GageGaskins/osf.io,asanfilippo7/osf.io,doublebits/osf.io,chennan47/osf.io,Ghalko/osf.io,brandonPurvis/osf.io,acshi/osf.io,TomBaxter/osf.io,monikagrabowska/osf.io,KAsante95/osf.io,caseyrollins/osf.io,baylee-d/osf.io,RomanZWang/osf.io,TomBaxter/osf.io,kch8qx/osf.io,RomanZWang/osf.io,zachjanicki/osf.io,TomBaxter/osf.io,mattclark/osf.io,cslzchen/osf.io,abought/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,DanielSBrown/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,amyshi188/osf.io,alexschiller/osf.io,zachjanicki/osf.io,TomHeatwole/osf.io,doublebits/osf.io,felliott/osf.io,laurenrevere/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,RomanZWang/osf.io,billyhunt/osf.io,ZobairAlijan/osf.io,acshi/osf.io,mluo613/osf.io,brandonPurvis/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,acshi/osf.io,pattisdr/osf.io,SSJohns/osf.io,mfraezz/osf.io,alexschiller/osf.io,danielneis/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,mluke93/osf.io,saradbowman/osf.io,mattclark/osf.io,wearpants/osf.io,leb2dg/osf.io,Ghalko/osf.io,TomHeatwole/osf.io,SSJohns/osf.io,asanfilippo7/osf.io,zamattiac/osf.io,HalcyonChimera/osf.io,monikagrabowska/osf.io,sloria/osf.io,samanehsan/osf.io,caneruguz/osf.io,amyshi188/osf.io,KAsante95/osf.io,billyhunt/osf.io,cslzchen/osf.io,ZobairAlijan/osf.io,felliott/osf.io,zachjanicki/osf.io,aaxelb/osf.io,aaxelb/osf.io,hmoco/osf.io,sloria/osf.io,samanehsan/osf.io,ticklemepierce/osf.io,KAsante95/osf.io,cwisecarver/osf.io,kwierman/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,mattclark/osf.io,rdhyee/osf.io,chennan47/osf.io,brandonPurvis/osf.io,mfraezz/osf.io,GageGaskins/osf.io,jnayak1/osf.io,mluo613/osf.io,kwierman/osf.io,baylee-d/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,brianjgeiger/osf.io,mluo613/osf.io,sloria/osf.io,ticklemepierce/osf.io,aaxelb/osf.io,Ghalko/osf.io,alexschiller/osf.io,samchrisinger/osf.io,caseyrollins/osf.io,binoculars/osf.io,zamattiac/osf.io,adlius/osf.io,kch8qx/osf.io,wearpants/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,mfraezz/osf.io,icereval/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,danielneis/osf.io,samanehsan/osf.io,samchrisinger/osf.io,danielneis/osf.io,amyshi188/osf.io,hmoco/osf.io,doublebits/osf.io,aaxelb/osf.io,rdhyee/osf.io,kch8qx/osf.io,laurenrevere/osf.io,erinspace/osf.io,samchrisinger/osf.io,kwierman/osf.io,KAsante95/osf.io,adlius/osf.io,DanielSBrown/osf.io,asanfilippo7/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,felliott/osf.io,caneruguz/osf.io,DanielSBrown/osf.io,pattisdr/osf.io,chennan47/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,TomHeatwole/osf.io,Ghalko/osf.io,doublebits/osf.io,Nesiehr/osf.io,jnayak1/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,caseyrygt/osf.io,Johnetordoff/osf.io,zachjanicki/osf.io,leb2dg/osf.io,zamattiac/osf.io,ZobairAlijan/osf.io,kch8qx/osf.io,caseyrygt/osf.io
|
from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
+ url(r'^bulk_delete/(?P<confirmation_token>\w+)/$', views.NodeBulkDelete.as_view(), name='node-bulk-delete'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
|
Add second delete url where users will send request to confirm they want to bulk delete.
|
## Code Before:
from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
## Instruction:
Add second delete url where users will send request to confirm they want to bulk delete.
## Code After:
from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
url(r'^bulk_delete/(?P<confirmation_token>\w+)/$', views.NodeBulkDelete.as_view(), name='node-bulk-delete'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
|
from django.conf.urls import url
from api.nodes import views
urlpatterns = [
# Examples:
# url(r'^$', 'api.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', views.NodeList.as_view(), name='node-list'),
+ url(r'^bulk_delete/(?P<confirmation_token>\w+)/$', views.NodeBulkDelete.as_view(), name='node-bulk-delete'),
url(r'^(?P<node_id>\w+)/$', views.NodeDetail.as_view(), name='node-detail'),
url(r'^(?P<node_id>\w+)/contributors/$', views.NodeContributorsList.as_view(), name='node-contributors'),
url(r'^(?P<node_id>\w+)/registrations/$', views.NodeRegistrationsList.as_view(), name='node-registrations'),
url(r'^(?P<node_id>\w+)/children/$', views.NodeChildrenList.as_view(), name='node-children'),
url(r'^(?P<node_id>\w+)/node_links/$', views.NodeLinksList.as_view(), name='node-pointers'),
url(r'^(?P<node_id>\w+)/files/$', views.NodeFilesList.as_view(), name='node-files'),
url(r'^(?P<node_id>\w+)/node_links/(?P<node_link_id>\w+)', views.NodeLinksDetail.as_view(), name='node-pointer-detail'),
]
|
e5939631835ce04d808246fdc391c95354f3b044
|
slug/posix.py
|
slug/posix.py
|
import signal
from . import base
__all__ = ('Process',)
class Process(base.Process):
def pause(self):
"""
Pause the process, able to be continued later
"""
self.signal(signal.SIGSTOP)
def unpause(self):
# continue is a reserved word
"""
Continue the process after it's been paused
"""
self.signal(signal.SIGCONT)
|
import signal
import selectors
from . import base
__all__ = ('Process', 'Valve')
class Process(base.Process):
def pause(self):
"""
Pause the process, able to be continued later
"""
self.signal(signal.SIGSTOP)
def unpause(self):
# continue is a reserved word
"""
Continue the process after it's been paused
"""
self.signal(signal.SIGCONT)
class Valve(base.Valve):
"""
Forwards from one file-like to another, but this flow may be paused and
resumed.
This implementation doesn't support changing the target descriptors after
initialization.
"""
def _thread(self):
sel = selectors.DefaultSelector()
sel.register(self.side_in, selectors.EVENT_READ)
while True:
sel.select()
# Don't care about the event, there's only one thing it can be.
# This feels like there's a race condition in here, but I think the
# window is small enough we can call it "slight asyncronousity".
if not self.gate.is_set():
self.gate.wait()
continue
chunk = self.side_in.read(self.CHUNKSIZE)
if chunk == b'':
break
else:
self.side_out.write(chunk)
if not self.keepopen:
self.side_out.close()
|
Correct Valve behavior on Posix
|
Correct Valve behavior on Posix
|
Python
|
bsd-3-clause
|
xonsh/slug
|
import signal
+ import selectors
from . import base
- __all__ = ('Process',)
+ __all__ = ('Process', 'Valve')
class Process(base.Process):
def pause(self):
"""
Pause the process, able to be continued later
"""
self.signal(signal.SIGSTOP)
def unpause(self):
# continue is a reserved word
"""
Continue the process after it's been paused
"""
self.signal(signal.SIGCONT)
+
+ class Valve(base.Valve):
+ """
+ Forwards from one file-like to another, but this flow may be paused and
+ resumed.
+
+ This implementation doesn't support changing the target descriptors after
+ initialization.
+ """
+ def _thread(self):
+ sel = selectors.DefaultSelector()
+ sel.register(self.side_in, selectors.EVENT_READ)
+ while True:
+ sel.select()
+ # Don't care about the event, there's only one thing it can be.
+
+ # This feels like there's a race condition in here, but I think the
+ # window is small enough we can call it "slight asyncronousity".
+ if not self.gate.is_set():
+ self.gate.wait()
+ continue
+
+ chunk = self.side_in.read(self.CHUNKSIZE)
+ if chunk == b'':
+ break
+ else:
+ self.side_out.write(chunk)
+ if not self.keepopen:
+ self.side_out.close()
+
|
Correct Valve behavior on Posix
|
## Code Before:
import signal
from . import base
__all__ = ('Process',)
class Process(base.Process):
def pause(self):
"""
Pause the process, able to be continued later
"""
self.signal(signal.SIGSTOP)
def unpause(self):
# continue is a reserved word
"""
Continue the process after it's been paused
"""
self.signal(signal.SIGCONT)
## Instruction:
Correct Valve behavior on Posix
## Code After:
import signal
import selectors
from . import base
__all__ = ('Process', 'Valve')
class Process(base.Process):
def pause(self):
"""
Pause the process, able to be continued later
"""
self.signal(signal.SIGSTOP)
def unpause(self):
# continue is a reserved word
"""
Continue the process after it's been paused
"""
self.signal(signal.SIGCONT)
class Valve(base.Valve):
"""
Forwards from one file-like to another, but this flow may be paused and
resumed.
This implementation doesn't support changing the target descriptors after
initialization.
"""
def _thread(self):
sel = selectors.DefaultSelector()
sel.register(self.side_in, selectors.EVENT_READ)
while True:
sel.select()
# Don't care about the event, there's only one thing it can be.
# This feels like there's a race condition in here, but I think the
# window is small enough we can call it "slight asyncronousity".
if not self.gate.is_set():
self.gate.wait()
continue
chunk = self.side_in.read(self.CHUNKSIZE)
if chunk == b'':
break
else:
self.side_out.write(chunk)
if not self.keepopen:
self.side_out.close()
|
import signal
+ import selectors
from . import base
- __all__ = ('Process',)
+ __all__ = ('Process', 'Valve')
? ++++++++
class Process(base.Process):
def pause(self):
"""
Pause the process, able to be continued later
"""
self.signal(signal.SIGSTOP)
def unpause(self):
# continue is a reserved word
"""
Continue the process after it's been paused
"""
self.signal(signal.SIGCONT)
+
+
+ class Valve(base.Valve):
+ """
+ Forwards from one file-like to another, but this flow may be paused and
+ resumed.
+
+ This implementation doesn't support changing the target descriptors after
+ initialization.
+ """
+ def _thread(self):
+ sel = selectors.DefaultSelector()
+ sel.register(self.side_in, selectors.EVENT_READ)
+ while True:
+ sel.select()
+ # Don't care about the event, there's only one thing it can be.
+
+ # This feels like there's a race condition in here, but I think the
+ # window is small enough we can call it "slight asyncronousity".
+ if not self.gate.is_set():
+ self.gate.wait()
+ continue
+
+ chunk = self.side_in.read(self.CHUNKSIZE)
+ if chunk == b'':
+ break
+ else:
+ self.side_out.write(chunk)
+ if not self.keepopen:
+ self.side_out.close()
|
d95d4da272915ad6a581260679df756bf24a6f4c
|
app/utils/db/__init__.py
|
app/utils/db/__init__.py
|
import logging
from app import db
logger = logging.getLogger(__name__)
def save_data(data):
try:
db.session.add(data)
db.session.commit()
except Exception as err:
logger.error(err)
|
import logging
from app import db
logger = logging.getLogger(__name__)
def save_record(record):
try:
db.session.add(record)
db.session.commit()
except Exception as err:
logger.error(err)
def delete_record(record):
try:
db.session.delete(record)
db.session.commit()
except Exception as err:
logger.error(err)
|
Rename save method for database to a more descriptive name
|
[FIX] Rename save method for database to a more descriptive name
|
Python
|
mit
|
brayoh/bucket-list-api
|
import logging
from app import db
logger = logging.getLogger(__name__)
- def save_data(data):
+ def save_record(record):
try:
- db.session.add(data)
+ db.session.add(record)
db.session.commit()
except Exception as err:
logger.error(err)
+ def delete_record(record):
+ try:
+ db.session.delete(record)
+ db.session.commit()
+ except Exception as err:
+ logger.error(err)
+
|
Rename save method for database to a more descriptive name
|
## Code Before:
import logging
from app import db
logger = logging.getLogger(__name__)
def save_data(data):
try:
db.session.add(data)
db.session.commit()
except Exception as err:
logger.error(err)
## Instruction:
Rename save method for database to a more descriptive name
## Code After:
import logging
from app import db
logger = logging.getLogger(__name__)
def save_record(record):
try:
db.session.add(record)
db.session.commit()
except Exception as err:
logger.error(err)
def delete_record(record):
try:
db.session.delete(record)
db.session.commit()
except Exception as err:
logger.error(err)
|
import logging
from app import db
logger = logging.getLogger(__name__)
- def save_data(data):
+ def save_record(record):
try:
- db.session.add(data)
? ---
+ db.session.add(record)
? +++++
db.session.commit()
except Exception as err:
logger.error(err)
+
+ def delete_record(record):
+ try:
+ db.session.delete(record)
+ db.session.commit()
+ except Exception as err:
+ logger.error(err)
|
2a7d28573d1e4f07250da1d30209304fdb6de90d
|
sqlobject/tests/test_blob.py
|
sqlobject/tests/test_blob.py
|
import pytest
from sqlobject import BLOBCol, SQLObject
from sqlobject.compat import PY2
from sqlobject.tests.dbtest import setupClass, supports
########################################
# BLOB columns
########################################
class ImageData(SQLObject):
image = BLOBCol(default=b'emptydata', length=256)
def test_BLOBCol():
if not supports('blobData'):
pytest.skip("blobData isn't supported")
setupClass(ImageData)
if PY2:
data = ''.join([chr(x) for x in range(256)])
else:
data = bytes(range(256))
prof = ImageData()
prof.image = data
iid = prof.id
ImageData._connection.cache.clear()
prof2 = ImageData.get(iid)
assert prof2.image == data
ImageData(image='string')
assert ImageData.selectBy(image='string').count() == 1
|
import pytest
from sqlobject import BLOBCol, SQLObject
from sqlobject.compat import PY2
from sqlobject.tests.dbtest import setupClass, supports
########################################
# BLOB columns
########################################
class ImageData(SQLObject):
image = BLOBCol(default=b'emptydata', length=256)
def test_BLOBCol():
if not supports('blobData'):
pytest.skip("blobData isn't supported")
setupClass(ImageData)
if PY2:
data = ''.join([chr(x) for x in range(256)])
else:
data = bytes(range(256))
prof = ImageData(image=data)
iid = prof.id
ImageData._connection.cache.clear()
prof2 = ImageData.get(iid)
assert prof2.image == data
ImageData(image=b'string')
assert ImageData.selectBy(image=b'string').count() == 1
|
Use byte string for test
|
Tests(blob): Use byte string for test
|
Python
|
lgpl-2.1
|
sqlobject/sqlobject,drnlm/sqlobject,sqlobject/sqlobject,drnlm/sqlobject
|
import pytest
from sqlobject import BLOBCol, SQLObject
from sqlobject.compat import PY2
from sqlobject.tests.dbtest import setupClass, supports
########################################
# BLOB columns
########################################
class ImageData(SQLObject):
image = BLOBCol(default=b'emptydata', length=256)
def test_BLOBCol():
if not supports('blobData'):
pytest.skip("blobData isn't supported")
setupClass(ImageData)
if PY2:
data = ''.join([chr(x) for x in range(256)])
else:
data = bytes(range(256))
- prof = ImageData()
+ prof = ImageData(image=data)
- prof.image = data
iid = prof.id
ImageData._connection.cache.clear()
prof2 = ImageData.get(iid)
assert prof2.image == data
- ImageData(image='string')
+ ImageData(image=b'string')
- assert ImageData.selectBy(image='string').count() == 1
+ assert ImageData.selectBy(image=b'string').count() == 1
|
Use byte string for test
|
## Code Before:
import pytest
from sqlobject import BLOBCol, SQLObject
from sqlobject.compat import PY2
from sqlobject.tests.dbtest import setupClass, supports
########################################
# BLOB columns
########################################
class ImageData(SQLObject):
image = BLOBCol(default=b'emptydata', length=256)
def test_BLOBCol():
if not supports('blobData'):
pytest.skip("blobData isn't supported")
setupClass(ImageData)
if PY2:
data = ''.join([chr(x) for x in range(256)])
else:
data = bytes(range(256))
prof = ImageData()
prof.image = data
iid = prof.id
ImageData._connection.cache.clear()
prof2 = ImageData.get(iid)
assert prof2.image == data
ImageData(image='string')
assert ImageData.selectBy(image='string').count() == 1
## Instruction:
Use byte string for test
## Code After:
import pytest
from sqlobject import BLOBCol, SQLObject
from sqlobject.compat import PY2
from sqlobject.tests.dbtest import setupClass, supports
########################################
# BLOB columns
########################################
class ImageData(SQLObject):
image = BLOBCol(default=b'emptydata', length=256)
def test_BLOBCol():
if not supports('blobData'):
pytest.skip("blobData isn't supported")
setupClass(ImageData)
if PY2:
data = ''.join([chr(x) for x in range(256)])
else:
data = bytes(range(256))
prof = ImageData(image=data)
iid = prof.id
ImageData._connection.cache.clear()
prof2 = ImageData.get(iid)
assert prof2.image == data
ImageData(image=b'string')
assert ImageData.selectBy(image=b'string').count() == 1
|
import pytest
from sqlobject import BLOBCol, SQLObject
from sqlobject.compat import PY2
from sqlobject.tests.dbtest import setupClass, supports
########################################
# BLOB columns
########################################
class ImageData(SQLObject):
image = BLOBCol(default=b'emptydata', length=256)
def test_BLOBCol():
if not supports('blobData'):
pytest.skip("blobData isn't supported")
setupClass(ImageData)
if PY2:
data = ''.join([chr(x) for x in range(256)])
else:
data = bytes(range(256))
- prof = ImageData()
+ prof = ImageData(image=data)
? ++++++++++
- prof.image = data
iid = prof.id
ImageData._connection.cache.clear()
prof2 = ImageData.get(iid)
assert prof2.image == data
- ImageData(image='string')
+ ImageData(image=b'string')
? +
- assert ImageData.selectBy(image='string').count() == 1
+ assert ImageData.selectBy(image=b'string').count() == 1
? +
|
dfa92db8ba32a2209dacab04d9b14279f5f37f3d
|
core/scraper.py
|
core/scraper.py
|
from bs4 import BeautifulSoup
def extract_blocks(page):
soup = BeautifulSoup(page)
table_rows = soup.find_all('tr')
blocks = []
for i, row in enumerate(table_rows[4:-2]):
table_cells = row.find_all('td')
if table_cells:
component_and_section = table_cells[1].get_text().rstrip()
if 'LEC' in component_and_section or 'LAB' in component_and_section:
component, section = component_and_section.split(' ')
block = {'component': component,
'section': section,
'enroll_cap': int(table_cells[6].get_text().rstrip()),
'enroll_total': int(table_cells[7].get_text().rstrip()),
'time': table_cells[10].get_text().rstrip(),
'room': table_cells[11].get_text().rstrip(),
'prof': table_cells[12].get_text().rstrip() if len(table_cells) > 12 else ''}
blocks.append(block)
return blocks
|
from bs4 import BeautifulSoup
def extract_blocks(page):
soup = BeautifulSoup(page)
table_rows = soup.find_all('tr')
blocks = []
for i, row in enumerate(table_rows[4:-2]):
table_cells = row.find_all('td')
if table_cells:
component_and_section = table_cells[1].get_text().rstrip()
for ctype in ['LEC', 'LAB', 'SEM']:
if ctype in component_and_section:
component, section = component_and_section.split(' ')
block = {'component': component,
'section': section,
'enroll_cap': int(table_cells[6].get_text().rstrip()),
'enroll_total': int(table_cells[7].get_text().rstrip()),
'time': table_cells[10].get_text().rstrip(),
'room': table_cells[11].get_text().rstrip(),
'prof': table_cells[12].get_text().rstrip() if len(table_cells) > 12 else ''}
blocks.append(block)
break
return blocks
|
Support seminars in addition to lectures and labs
|
Support seminars in addition to lectures and labs
|
Python
|
mit
|
tuzhucheng/uw-course-alerter,tuzhucheng/uw-course-alerter,tuzhucheng/uw-course-alerter,tuzhucheng/uw-course-alerter
|
from bs4 import BeautifulSoup
def extract_blocks(page):
soup = BeautifulSoup(page)
table_rows = soup.find_all('tr')
blocks = []
for i, row in enumerate(table_rows[4:-2]):
table_cells = row.find_all('td')
if table_cells:
component_and_section = table_cells[1].get_text().rstrip()
- if 'LEC' in component_and_section or 'LAB' in component_and_section:
+ for ctype in ['LEC', 'LAB', 'SEM']:
+ if ctype in component_and_section:
- component, section = component_and_section.split(' ')
+ component, section = component_and_section.split(' ')
- block = {'component': component,
+ block = {'component': component,
- 'section': section,
+ 'section': section,
- 'enroll_cap': int(table_cells[6].get_text().rstrip()),
+ 'enroll_cap': int(table_cells[6].get_text().rstrip()),
- 'enroll_total': int(table_cells[7].get_text().rstrip()),
+ 'enroll_total': int(table_cells[7].get_text().rstrip()),
- 'time': table_cells[10].get_text().rstrip(),
+ 'time': table_cells[10].get_text().rstrip(),
- 'room': table_cells[11].get_text().rstrip(),
+ 'room': table_cells[11].get_text().rstrip(),
- 'prof': table_cells[12].get_text().rstrip() if len(table_cells) > 12 else ''}
+ 'prof': table_cells[12].get_text().rstrip() if len(table_cells) > 12 else ''}
- blocks.append(block)
+ blocks.append(block)
+ break
return blocks
|
Support seminars in addition to lectures and labs
|
## Code Before:
from bs4 import BeautifulSoup
def extract_blocks(page):
soup = BeautifulSoup(page)
table_rows = soup.find_all('tr')
blocks = []
for i, row in enumerate(table_rows[4:-2]):
table_cells = row.find_all('td')
if table_cells:
component_and_section = table_cells[1].get_text().rstrip()
if 'LEC' in component_and_section or 'LAB' in component_and_section:
component, section = component_and_section.split(' ')
block = {'component': component,
'section': section,
'enroll_cap': int(table_cells[6].get_text().rstrip()),
'enroll_total': int(table_cells[7].get_text().rstrip()),
'time': table_cells[10].get_text().rstrip(),
'room': table_cells[11].get_text().rstrip(),
'prof': table_cells[12].get_text().rstrip() if len(table_cells) > 12 else ''}
blocks.append(block)
return blocks
## Instruction:
Support seminars in addition to lectures and labs
## Code After:
from bs4 import BeautifulSoup
def extract_blocks(page):
soup = BeautifulSoup(page)
table_rows = soup.find_all('tr')
blocks = []
for i, row in enumerate(table_rows[4:-2]):
table_cells = row.find_all('td')
if table_cells:
component_and_section = table_cells[1].get_text().rstrip()
for ctype in ['LEC', 'LAB', 'SEM']:
if ctype in component_and_section:
component, section = component_and_section.split(' ')
block = {'component': component,
'section': section,
'enroll_cap': int(table_cells[6].get_text().rstrip()),
'enroll_total': int(table_cells[7].get_text().rstrip()),
'time': table_cells[10].get_text().rstrip(),
'room': table_cells[11].get_text().rstrip(),
'prof': table_cells[12].get_text().rstrip() if len(table_cells) > 12 else ''}
blocks.append(block)
break
return blocks
|
from bs4 import BeautifulSoup
def extract_blocks(page):
soup = BeautifulSoup(page)
table_rows = soup.find_all('tr')
blocks = []
for i, row in enumerate(table_rows[4:-2]):
table_cells = row.find_all('td')
if table_cells:
component_and_section = table_cells[1].get_text().rstrip()
- if 'LEC' in component_and_section or 'LAB' in component_and_section:
+ for ctype in ['LEC', 'LAB', 'SEM']:
+ if ctype in component_and_section:
- component, section = component_and_section.split(' ')
+ component, section = component_and_section.split(' ')
? ++++
- block = {'component': component,
+ block = {'component': component,
? ++++
- 'section': section,
+ 'section': section,
? ++++
- 'enroll_cap': int(table_cells[6].get_text().rstrip()),
+ 'enroll_cap': int(table_cells[6].get_text().rstrip()),
? ++++
- 'enroll_total': int(table_cells[7].get_text().rstrip()),
+ 'enroll_total': int(table_cells[7].get_text().rstrip()),
? ++++
- 'time': table_cells[10].get_text().rstrip(),
+ 'time': table_cells[10].get_text().rstrip(),
? ++++
- 'room': table_cells[11].get_text().rstrip(),
+ 'room': table_cells[11].get_text().rstrip(),
? ++++
- 'prof': table_cells[12].get_text().rstrip() if len(table_cells) > 12 else ''}
+ 'prof': table_cells[12].get_text().rstrip() if len(table_cells) > 12 else ''}
? ++++
- blocks.append(block)
+ blocks.append(block)
? ++++
+ break
return blocks
|
c81cc838d6e8109020dafae7e4ed1ff5aa7ebb88
|
invoke/__init__.py
|
invoke/__init__.py
|
from ._version import __version_info__, __version__ # noqa
from .tasks import task, ctask, Task # noqa
from .collection import Collection # noqa
from .context import Context # noqa
def run(command, **kwargs):
"""
Invoke ``command`` in a subprocess and return a `.Result` object.
This function is simply a convenience wrapper for creating an anonymous
`.Context` object and calling its `.Context.run` method, which lets you use
Invoke's powerful local command execution without requiring the rest of its
API.
"""
return Context().run(command, **kwargs)
|
from ._version import __version_info__, __version__ # noqa
from .tasks import task, ctask, Task # noqa
from .collection import Collection # noqa
from .context import Context # noqa
from .config import Config # noqa
def run(command, **kwargs):
"""
Invoke ``command`` in a subprocess and return a `.Result` object.
This function is simply a convenience wrapper for creating an anonymous
`.Context` object and calling its `.Context.run` method, which lets you use
Invoke's powerful local command execution without requiring the rest of its
API.
"""
return Context().run(command, **kwargs)
|
Add Config to root convenience imports
|
Add Config to root convenience imports
|
Python
|
bsd-2-clause
|
pyinvoke/invoke,mattrobenolt/invoke,frol/invoke,mkusz/invoke,pfmoore/invoke,mkusz/invoke,pyinvoke/invoke,mattrobenolt/invoke,pfmoore/invoke,kejbaly2/invoke,kejbaly2/invoke,tyewang/invoke,frol/invoke,singingwolfboy/invoke
|
from ._version import __version_info__, __version__ # noqa
from .tasks import task, ctask, Task # noqa
from .collection import Collection # noqa
from .context import Context # noqa
+ from .config import Config # noqa
def run(command, **kwargs):
"""
Invoke ``command`` in a subprocess and return a `.Result` object.
This function is simply a convenience wrapper for creating an anonymous
`.Context` object and calling its `.Context.run` method, which lets you use
Invoke's powerful local command execution without requiring the rest of its
API.
"""
return Context().run(command, **kwargs)
|
Add Config to root convenience imports
|
## Code Before:
from ._version import __version_info__, __version__ # noqa
from .tasks import task, ctask, Task # noqa
from .collection import Collection # noqa
from .context import Context # noqa
def run(command, **kwargs):
"""
Invoke ``command`` in a subprocess and return a `.Result` object.
This function is simply a convenience wrapper for creating an anonymous
`.Context` object and calling its `.Context.run` method, which lets you use
Invoke's powerful local command execution without requiring the rest of its
API.
"""
return Context().run(command, **kwargs)
## Instruction:
Add Config to root convenience imports
## Code After:
from ._version import __version_info__, __version__ # noqa
from .tasks import task, ctask, Task # noqa
from .collection import Collection # noqa
from .context import Context # noqa
from .config import Config # noqa
def run(command, **kwargs):
"""
Invoke ``command`` in a subprocess and return a `.Result` object.
This function is simply a convenience wrapper for creating an anonymous
`.Context` object and calling its `.Context.run` method, which lets you use
Invoke's powerful local command execution without requiring the rest of its
API.
"""
return Context().run(command, **kwargs)
|
from ._version import __version_info__, __version__ # noqa
from .tasks import task, ctask, Task # noqa
from .collection import Collection # noqa
from .context import Context # noqa
+ from .config import Config # noqa
def run(command, **kwargs):
"""
Invoke ``command`` in a subprocess and return a `.Result` object.
This function is simply a convenience wrapper for creating an anonymous
`.Context` object and calling its `.Context.run` method, which lets you use
Invoke's powerful local command execution without requiring the rest of its
API.
"""
return Context().run(command, **kwargs)
|
3555b002aae386220bc02d662a9b188426afc08f
|
cmsplugin_facebook/cms_plugins.py
|
cmsplugin_facebook/cms_plugins.py
|
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cmsplugin_facebook import models
class BasePlugin(CMSPluginBase):
name = None
def render(self, context, instance, placeholder):
context.update({'instance': instance,
'name': self.name,
'url': instance.pageurl or \
context['request'].build_absolute_uri()})
return context
class FacebookLikeBoxPlugin(BasePlugin):
model = models.FacebookLikeBox
name = 'Facebook Like Box'
render_template = 'cmsplugin_facebook/likebox.html'
change_form_template = 'cmsplugin_facebook/likebox_change_form.html'
class FacebookLikeButtonPlugin(BasePlugin):
model = models.FacebookLikeButton
name = 'Facebook Like Button'
render_template = 'cmsplugin_facebook/likebutton.html'
change_form_template = 'cmsplugin_facebook/likebutton_change_form.html'
plugin_pool.register_plugin(FacebookLikeBoxPlugin)
plugin_pool.register_plugin(FacebookLikeButtonPlugin)
|
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cmsplugin_facebook import models
class BasePlugin(CMSPluginBase):
name = None
def render(self, context, instance, placeholder):
context.update({'instance': instance,
'name': self.name,
'url': instance.pageurl or \
context['request'].build_absolute_uri()})
return context
class FacebookLikeBoxPlugin(BasePlugin):
model = models.FacebookLikeBox
name = 'Facebook Like Box'
module = 'Facebook'
render_template = 'cmsplugin_facebook/likebox.html'
change_form_template = 'cmsplugin_facebook/likebox_change_form.html'
class FacebookLikeButtonPlugin(BasePlugin):
model = models.FacebookLikeButton
name = 'Facebook Like Button'
module = 'Facebook'
render_template = 'cmsplugin_facebook/likebutton.html'
change_form_template = 'cmsplugin_facebook/likebutton_change_form.html'
plugin_pool.register_plugin(FacebookLikeBoxPlugin)
plugin_pool.register_plugin(FacebookLikeButtonPlugin)
|
Create a specific group for the Facebook plugins - makes it a bit neater in the list of plugins.
|
Create a specific group for the Facebook plugins - makes it a bit neater in the list of plugins.
|
Python
|
bsd-3-clause
|
chrisglass/cmsplugin_facebook
|
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cmsplugin_facebook import models
class BasePlugin(CMSPluginBase):
name = None
def render(self, context, instance, placeholder):
context.update({'instance': instance,
'name': self.name,
'url': instance.pageurl or \
context['request'].build_absolute_uri()})
return context
class FacebookLikeBoxPlugin(BasePlugin):
model = models.FacebookLikeBox
name = 'Facebook Like Box'
+ module = 'Facebook'
render_template = 'cmsplugin_facebook/likebox.html'
change_form_template = 'cmsplugin_facebook/likebox_change_form.html'
class FacebookLikeButtonPlugin(BasePlugin):
model = models.FacebookLikeButton
name = 'Facebook Like Button'
+ module = 'Facebook'
render_template = 'cmsplugin_facebook/likebutton.html'
change_form_template = 'cmsplugin_facebook/likebutton_change_form.html'
plugin_pool.register_plugin(FacebookLikeBoxPlugin)
plugin_pool.register_plugin(FacebookLikeButtonPlugin)
|
Create a specific group for the Facebook plugins - makes it a bit neater in the list of plugins.
|
## Code Before:
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cmsplugin_facebook import models
class BasePlugin(CMSPluginBase):
name = None
def render(self, context, instance, placeholder):
context.update({'instance': instance,
'name': self.name,
'url': instance.pageurl or \
context['request'].build_absolute_uri()})
return context
class FacebookLikeBoxPlugin(BasePlugin):
model = models.FacebookLikeBox
name = 'Facebook Like Box'
render_template = 'cmsplugin_facebook/likebox.html'
change_form_template = 'cmsplugin_facebook/likebox_change_form.html'
class FacebookLikeButtonPlugin(BasePlugin):
model = models.FacebookLikeButton
name = 'Facebook Like Button'
render_template = 'cmsplugin_facebook/likebutton.html'
change_form_template = 'cmsplugin_facebook/likebutton_change_form.html'
plugin_pool.register_plugin(FacebookLikeBoxPlugin)
plugin_pool.register_plugin(FacebookLikeButtonPlugin)
## Instruction:
Create a specific group for the Facebook plugins - makes it a bit neater in the list of plugins.
## Code After:
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cmsplugin_facebook import models
class BasePlugin(CMSPluginBase):
name = None
def render(self, context, instance, placeholder):
context.update({'instance': instance,
'name': self.name,
'url': instance.pageurl or \
context['request'].build_absolute_uri()})
return context
class FacebookLikeBoxPlugin(BasePlugin):
model = models.FacebookLikeBox
name = 'Facebook Like Box'
module = 'Facebook'
render_template = 'cmsplugin_facebook/likebox.html'
change_form_template = 'cmsplugin_facebook/likebox_change_form.html'
class FacebookLikeButtonPlugin(BasePlugin):
model = models.FacebookLikeButton
name = 'Facebook Like Button'
module = 'Facebook'
render_template = 'cmsplugin_facebook/likebutton.html'
change_form_template = 'cmsplugin_facebook/likebutton_change_form.html'
plugin_pool.register_plugin(FacebookLikeBoxPlugin)
plugin_pool.register_plugin(FacebookLikeButtonPlugin)
|
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from cmsplugin_facebook import models
class BasePlugin(CMSPluginBase):
name = None
def render(self, context, instance, placeholder):
context.update({'instance': instance,
'name': self.name,
'url': instance.pageurl or \
context['request'].build_absolute_uri()})
return context
class FacebookLikeBoxPlugin(BasePlugin):
model = models.FacebookLikeBox
name = 'Facebook Like Box'
+ module = 'Facebook'
render_template = 'cmsplugin_facebook/likebox.html'
change_form_template = 'cmsplugin_facebook/likebox_change_form.html'
class FacebookLikeButtonPlugin(BasePlugin):
model = models.FacebookLikeButton
name = 'Facebook Like Button'
+ module = 'Facebook'
render_template = 'cmsplugin_facebook/likebutton.html'
change_form_template = 'cmsplugin_facebook/likebutton_change_form.html'
plugin_pool.register_plugin(FacebookLikeBoxPlugin)
plugin_pool.register_plugin(FacebookLikeButtonPlugin)
|
d29410b39af1165ba520e7ecad7e6e9c36a7fd2f
|
test/test_basic.py
|
test/test_basic.py
|
import os
import sys
#installed
import pytest
#local
sys.path.append(os.path.split(os.path.split(__file__)[0])[0])
import searchcolor
from api_keys import GoogleKeyLocker as Key
Key = Key()
def test_google_average():
result = searchcolor.google_average('Death', 10, Key.api(), Key.cse())
assert result.get('name') == 'Death'
assert result.get('red') >= 0 and result.get('red') <= 255
assert result.get('green') >= 0 and result.get('green') <= 255
assert result.get('blue') >= 0 and result.get('blue') <= 255
|
import os
import sys
#installed
import pytest
#local
sys.path.append(os.path.split(os.path.split(__file__)[0])[0])
import searchcolor
from api_keys import GoogleKeyLocker
from api_keys import BingKeyLocker
from api_keys import MSCSKeyLocker
GKL = GoogleKeyLocker()
BKL = BingKeyLocker()
MSCSKL = MSCSKeyLocker()
def test_google_average():
result = searchcolor.google_average('Death', 10, GKL.api(), GKL.cse(), max_threads=8)
assert result.get('name') == 'Death'
assert result.get('red') >= 0 and result.get('red') <= 255
assert result.get('green') >= 0 and result.get('green') <= 255
assert result.get('blue') >= 0 and result.get('blue') <= 255
def test_bing_average():
result = searchcolor.bing_average('Death', 10, BKL.api(), max_threads=8)
assert result.get('name') == 'Death'
assert result.get('red') >= 0 and result.get('red') <= 255
assert result.get('green') >= 0 and result.get('green') <= 255
assert result.get('blue') >= 0 and result.get('blue') <= 255
def test_mscs_average():
result = searchcolor.mscs_average('Death', 10, MSCSKL.api(), max_threads=8)
assert result.get('name') == 'Death'
assert result.get('red') >= 0 and result.get('red') <= 255
assert result.get('green') >= 0 and result.get('green') <= 255
assert result.get('blue') >= 0 and result.get('blue') <= 255
|
Add tests for bing and mscs
|
Add tests for bing and mscs
|
Python
|
mit
|
Tathorack/searchcolor,Tathorack/searchcolor
|
import os
import sys
#installed
import pytest
#local
sys.path.append(os.path.split(os.path.split(__file__)[0])[0])
import searchcolor
- from api_keys import GoogleKeyLocker as Key
+ from api_keys import GoogleKeyLocker
+ from api_keys import BingKeyLocker
+ from api_keys import MSCSKeyLocker
- Key = Key()
+ GKL = GoogleKeyLocker()
+ BKL = BingKeyLocker()
+ MSCSKL = MSCSKeyLocker()
def test_google_average():
- result = searchcolor.google_average('Death', 10, Key.api(), Key.cse())
+ result = searchcolor.google_average('Death', 10, GKL.api(), GKL.cse(), max_threads=8)
assert result.get('name') == 'Death'
assert result.get('red') >= 0 and result.get('red') <= 255
assert result.get('green') >= 0 and result.get('green') <= 255
assert result.get('blue') >= 0 and result.get('blue') <= 255
+ def test_bing_average():
+ result = searchcolor.bing_average('Death', 10, BKL.api(), max_threads=8)
+ assert result.get('name') == 'Death'
+ assert result.get('red') >= 0 and result.get('red') <= 255
+ assert result.get('green') >= 0 and result.get('green') <= 255
+ assert result.get('blue') >= 0 and result.get('blue') <= 255
+
+ def test_mscs_average():
+ result = searchcolor.mscs_average('Death', 10, MSCSKL.api(), max_threads=8)
+ assert result.get('name') == 'Death'
+ assert result.get('red') >= 0 and result.get('red') <= 255
+ assert result.get('green') >= 0 and result.get('green') <= 255
+ assert result.get('blue') >= 0 and result.get('blue') <= 255
+
|
Add tests for bing and mscs
|
## Code Before:
import os
import sys
#installed
import pytest
#local
sys.path.append(os.path.split(os.path.split(__file__)[0])[0])
import searchcolor
from api_keys import GoogleKeyLocker as Key
Key = Key()
def test_google_average():
result = searchcolor.google_average('Death', 10, Key.api(), Key.cse())
assert result.get('name') == 'Death'
assert result.get('red') >= 0 and result.get('red') <= 255
assert result.get('green') >= 0 and result.get('green') <= 255
assert result.get('blue') >= 0 and result.get('blue') <= 255
## Instruction:
Add tests for bing and mscs
## Code After:
import os
import sys
#installed
import pytest
#local
sys.path.append(os.path.split(os.path.split(__file__)[0])[0])
import searchcolor
from api_keys import GoogleKeyLocker
from api_keys import BingKeyLocker
from api_keys import MSCSKeyLocker
GKL = GoogleKeyLocker()
BKL = BingKeyLocker()
MSCSKL = MSCSKeyLocker()
def test_google_average():
result = searchcolor.google_average('Death', 10, GKL.api(), GKL.cse(), max_threads=8)
assert result.get('name') == 'Death'
assert result.get('red') >= 0 and result.get('red') <= 255
assert result.get('green') >= 0 and result.get('green') <= 255
assert result.get('blue') >= 0 and result.get('blue') <= 255
def test_bing_average():
result = searchcolor.bing_average('Death', 10, BKL.api(), max_threads=8)
assert result.get('name') == 'Death'
assert result.get('red') >= 0 and result.get('red') <= 255
assert result.get('green') >= 0 and result.get('green') <= 255
assert result.get('blue') >= 0 and result.get('blue') <= 255
def test_mscs_average():
result = searchcolor.mscs_average('Death', 10, MSCSKL.api(), max_threads=8)
assert result.get('name') == 'Death'
assert result.get('red') >= 0 and result.get('red') <= 255
assert result.get('green') >= 0 and result.get('green') <= 255
assert result.get('blue') >= 0 and result.get('blue') <= 255
|
import os
import sys
#installed
import pytest
#local
sys.path.append(os.path.split(os.path.split(__file__)[0])[0])
import searchcolor
- from api_keys import GoogleKeyLocker as Key
? -------
+ from api_keys import GoogleKeyLocker
+ from api_keys import BingKeyLocker
+ from api_keys import MSCSKeyLocker
- Key = Key()
+ GKL = GoogleKeyLocker()
+ BKL = BingKeyLocker()
+ MSCSKL = MSCSKeyLocker()
def test_google_average():
- result = searchcolor.google_average('Death', 10, Key.api(), Key.cse())
? ^^ ^^
+ result = searchcolor.google_average('Death', 10, GKL.api(), GKL.cse(), max_threads=8)
? + ^ + ^ +++++++++++++++
assert result.get('name') == 'Death'
assert result.get('red') >= 0 and result.get('red') <= 255
assert result.get('green') >= 0 and result.get('green') <= 255
assert result.get('blue') >= 0 and result.get('blue') <= 255
+
+ def test_bing_average():
+ result = searchcolor.bing_average('Death', 10, BKL.api(), max_threads=8)
+ assert result.get('name') == 'Death'
+ assert result.get('red') >= 0 and result.get('red') <= 255
+ assert result.get('green') >= 0 and result.get('green') <= 255
+ assert result.get('blue') >= 0 and result.get('blue') <= 255
+
+ def test_mscs_average():
+ result = searchcolor.mscs_average('Death', 10, MSCSKL.api(), max_threads=8)
+ assert result.get('name') == 'Death'
+ assert result.get('red') >= 0 and result.get('red') <= 255
+ assert result.get('green') >= 0 and result.get('green') <= 255
+ assert result.get('blue') >= 0 and result.get('blue') <= 255
|
ebc4acb745287762cc8cb0a18fb97ed3e01c9ab0
|
mkerefuse/util.py
|
mkerefuse/util.py
|
from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
|
import json
from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
|
Add json library for repr() calls
|
Add json library for repr() calls
|
Python
|
unlicense
|
tomislacker/python-mke-trash-pickup,tomislacker/python-mke-trash-pickup
|
+ import json
from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
|
Add json library for repr() calls
|
## Code Before:
from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
## Instruction:
Add json library for repr() calls
## Code After:
import json
from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
|
+ import json
from lxml import html
class XPathObject(object):
input_properties = {}
"""Dict of keys (property names) and XPaths (to read vals from)"""
@classmethod
def FromHTML(cls, html_contents):
inst = cls()
print("Reading through {b} bytes for {c} properties...".format(
b=len(html_contents),
c=len(cls.input_properties)))
tree = html.fromstring(html_contents)
for attr_name, xpath in cls.input_properties.items():
print("Searching for '{n}': {x}".format(
n=attr_name,
x=xpath))
elements = tree.xpath(xpath)
if not len(elements):
print("Failed to find '{n}': {x}".format(
n=attr_name,
x=xpath))
continue
setattr(
inst,
attr_name,
elements[0].text)
return inst
def __repr__(self):
return json.dumps(
self.__dict__,
indent=4,
separators=(',', ': '))
|
4a1976c6aa21f519825c527c795e60dffa7f46db
|
githubsetupircnotifications.py
|
githubsetupircnotifications.py
|
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
|
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
|
Print message if signing in failed
|
Print message if signing in failed
|
Python
|
mit
|
kragniz/github-setup-irc-notifications
|
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
+ if github is None:
+ print('Failed to sign into github')
+
|
Print message if signing in failed
|
## Code Before:
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
## Instruction:
Print message if signing in failed
## Code After:
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
|
import argparse
import getpass
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username'),
parser.add_argument('--password'),
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
+
+ if github is None:
+ print('Failed to sign into github')
|
80cecb69170adf7235ecbff3eec4e737cf5d9292
|
impersonate/urls.py
|
impersonate/urls.py
|
from django.conf.urls import patterns, url
urlpatterns = patterns('impersonate.views',
url(r'^stop/$',
'stop_impersonate',
name='impersonate-stop'),
url(r'^list/$',
'list_users',
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
'search_users',
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
'impersonate',
name='impersonate-start'),
)
|
from django.conf.urls import url
from .views import stop_impersonate, list_users, search_users, impersonate
urlpatterns = [
url(r'^stop/$',
stop_impersonate,
name='impersonate-stop'),
url(r'^list/$',
list_users,
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
search_users,
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
impersonate,
name='impersonate-start'),
]
|
Replace deprecated string view arguments to url
|
Replace deprecated string view arguments to url
|
Python
|
bsd-3-clause
|
Top20Talent/django-impersonate,Top20Talent/django-impersonate
|
- from django.conf.urls import patterns, url
+ from django.conf.urls import url
+
+ from .views import stop_impersonate, list_users, search_users, impersonate
- urlpatterns = patterns('impersonate.views',
+ urlpatterns = [
url(r'^stop/$',
- 'stop_impersonate',
+ stop_impersonate,
name='impersonate-stop'),
url(r'^list/$',
- 'list_users',
+ list_users,
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
- 'search_users',
+ search_users,
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
- 'impersonate',
+ impersonate,
name='impersonate-start'),
- )
+ ]
|
Replace deprecated string view arguments to url
|
## Code Before:
from django.conf.urls import patterns, url
urlpatterns = patterns('impersonate.views',
url(r'^stop/$',
'stop_impersonate',
name='impersonate-stop'),
url(r'^list/$',
'list_users',
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
'search_users',
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
'impersonate',
name='impersonate-start'),
)
## Instruction:
Replace deprecated string view arguments to url
## Code After:
from django.conf.urls import url
from .views import stop_impersonate, list_users, search_users, impersonate
urlpatterns = [
url(r'^stop/$',
stop_impersonate,
name='impersonate-stop'),
url(r'^list/$',
list_users,
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
search_users,
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
impersonate,
name='impersonate-start'),
]
|
- from django.conf.urls import patterns, url
? ----------
+ from django.conf.urls import url
+
+ from .views import stop_impersonate, list_users, search_users, impersonate
- urlpatterns = patterns('impersonate.views',
+ urlpatterns = [
url(r'^stop/$',
- 'stop_impersonate',
? - -
+ stop_impersonate,
name='impersonate-stop'),
url(r'^list/$',
- 'list_users',
? - -
+ list_users,
{'template': 'impersonate/list_users.html'},
name='impersonate-list'),
url(r'^search/$',
- 'search_users',
? - -
+ search_users,
{'template': 'impersonate/search_users.html'},
name='impersonate-search'),
url(r'^(?P<uid>.+)/$',
- 'impersonate',
? - -
+ impersonate,
name='impersonate-start'),
- )
+ ]
|
e03426b8fd696b8794e21ef52c76a0a5140e1463
|
Maths/fibonacciSeries.py
|
Maths/fibonacciSeries.py
|
def recur_fibo(n):
if n <= 1:
return n
else:
return(recur_fibo(n-1) + recur_fibo(n-2))
limit = int(input("How many terms to include in fibonacci series: "))
if limit <= 0:
print("Please enter a positive integer: ")
else:
print(f"The first {limit} terms of the fibonacci series are as follows")
for i in range(limit):
print(recur_fibo(i))
|
def recur_fibo(n):
return n if n <= 1 else (recur_fibo(n-1) + recur_fibo(n-2))
def isPositiveInteger(limit):
return limit >= 0
def main():
limit = int(input("How many terms to include in fibonacci series: "))
if isPositiveInteger:
print(f"The first {limit} terms of the fibonacci series are as follows:")
print([recur_fibo(n) for n in range(limit)])
else:
print("Please enter a positive integer: ")
if __name__ == '__main__':
main()
|
Improve and Refactor the fibonnaciSeries.py (Recursion)
|
Improve and Refactor the fibonnaciSeries.py (Recursion)
|
Python
|
mit
|
TheAlgorithms/Python
|
def recur_fibo(n):
- if n <= 1:
- return n
- else:
- return(recur_fibo(n-1) + recur_fibo(n-2))
+ return n if n <= 1 else (recur_fibo(n-1) + recur_fibo(n-2))
- limit = int(input("How many terms to include in fibonacci series: "))
+ def isPositiveInteger(limit):
+ return limit >= 0
- if limit <= 0:
- print("Please enter a positive integer: ")
- else:
+ def main():
+ limit = int(input("How many terms to include in fibonacci series: "))
+ if isPositiveInteger:
- print(f"The first {limit} terms of the fibonacci series are as follows")
+ print(f"The first {limit} terms of the fibonacci series are as follows:")
- for i in range(limit):
- print(recur_fibo(i))
+ print([recur_fibo(n) for n in range(limit)])
+ else:
+ print("Please enter a positive integer: ")
+ if __name__ == '__main__':
+ main()
+
|
Improve and Refactor the fibonnaciSeries.py (Recursion)
|
## Code Before:
def recur_fibo(n):
if n <= 1:
return n
else:
return(recur_fibo(n-1) + recur_fibo(n-2))
limit = int(input("How many terms to include in fibonacci series: "))
if limit <= 0:
print("Please enter a positive integer: ")
else:
print(f"The first {limit} terms of the fibonacci series are as follows")
for i in range(limit):
print(recur_fibo(i))
## Instruction:
Improve and Refactor the fibonnaciSeries.py (Recursion)
## Code After:
def recur_fibo(n):
return n if n <= 1 else (recur_fibo(n-1) + recur_fibo(n-2))
def isPositiveInteger(limit):
return limit >= 0
def main():
limit = int(input("How many terms to include in fibonacci series: "))
if isPositiveInteger:
print(f"The first {limit} terms of the fibonacci series are as follows:")
print([recur_fibo(n) for n in range(limit)])
else:
print("Please enter a positive integer: ")
if __name__ == '__main__':
main()
|
def recur_fibo(n):
- if n <= 1:
- return n
- else:
- return(recur_fibo(n-1) + recur_fibo(n-2))
? ---
+ return n if n <= 1 else (recur_fibo(n-1) + recur_fibo(n-2))
? ++++++++++++++++++
- limit = int(input("How many terms to include in fibonacci series: "))
+ def isPositiveInteger(limit):
+ return limit >= 0
- if limit <= 0:
- print("Please enter a positive integer: ")
- else:
+ def main():
+ limit = int(input("How many terms to include in fibonacci series: "))
+ if isPositiveInteger:
- print(f"The first {limit} terms of the fibonacci series are as follows")
+ print(f"The first {limit} terms of the fibonacci series are as follows:")
? +++++ +
- for i in range(limit):
- print(recur_fibo(i))
+ print([recur_fibo(n) for n in range(limit)])
+ else:
+ print("Please enter a positive integer: ")
+
+ if __name__ == '__main__':
+ main()
|
bb88b1d2e2c4d3eb482c3cf32d1a53c9e89f94cf
|
conftest.py
|
conftest.py
|
from __future__ import unicode_literals
from django.db import connection
def pytest_report_header(config):
with connection.cursor() as cursor:
cursor.execute("SELECT VERSION()")
version = cursor.fetchone()[0]
return "MySQL version: {}".format(version)
|
from __future__ import unicode_literals
import django
from django.db import connection
def pytest_report_header(config):
dot_version = '.'.join(str(x) for x in django.VERSION)
header = "Django version: " + dot_version
if hasattr(connection, '_nodb_connection'):
with connection._nodb_connection.cursor() as cursor:
cursor.execute("SELECT VERSION()")
version = cursor.fetchone()[0]
header += "\nMySQL version: {}".format(version)
return header
|
Fix pytest version report when database does not exist, add Django version header
|
Fix pytest version report when database does not exist, add Django version header
|
Python
|
mit
|
nickmeharry/django-mysql,arnau126/django-mysql,arnau126/django-mysql,nickmeharry/django-mysql,adamchainz/django-mysql
|
from __future__ import unicode_literals
+ import django
from django.db import connection
def pytest_report_header(config):
+ dot_version = '.'.join(str(x) for x in django.VERSION)
+ header = "Django version: " + dot_version
- with connection.cursor() as cursor:
- cursor.execute("SELECT VERSION()")
- version = cursor.fetchone()[0]
- return "MySQL version: {}".format(version)
+ if hasattr(connection, '_nodb_connection'):
+ with connection._nodb_connection.cursor() as cursor:
+ cursor.execute("SELECT VERSION()")
+ version = cursor.fetchone()[0]
+ header += "\nMySQL version: {}".format(version)
+
+ return header
+
|
Fix pytest version report when database does not exist, add Django version header
|
## Code Before:
from __future__ import unicode_literals
from django.db import connection
def pytest_report_header(config):
with connection.cursor() as cursor:
cursor.execute("SELECT VERSION()")
version = cursor.fetchone()[0]
return "MySQL version: {}".format(version)
## Instruction:
Fix pytest version report when database does not exist, add Django version header
## Code After:
from __future__ import unicode_literals
import django
from django.db import connection
def pytest_report_header(config):
dot_version = '.'.join(str(x) for x in django.VERSION)
header = "Django version: " + dot_version
if hasattr(connection, '_nodb_connection'):
with connection._nodb_connection.cursor() as cursor:
cursor.execute("SELECT VERSION()")
version = cursor.fetchone()[0]
header += "\nMySQL version: {}".format(version)
return header
|
from __future__ import unicode_literals
+ import django
from django.db import connection
def pytest_report_header(config):
+ dot_version = '.'.join(str(x) for x in django.VERSION)
+ header = "Django version: " + dot_version
+
+ if hasattr(connection, '_nodb_connection'):
- with connection.cursor() as cursor:
+ with connection._nodb_connection.cursor() as cursor:
? ++++ +++++++++++++++++
- cursor.execute("SELECT VERSION()")
+ cursor.execute("SELECT VERSION()")
? ++++
- version = cursor.fetchone()[0]
+ version = cursor.fetchone()[0]
? ++++
- return "MySQL version: {}".format(version)
? ^^^^^
+ header += "\nMySQL version: {}".format(version)
? +++++++++ ^^^ ++
+
+ return header
|
131cb9abd711cc71c558e5a89d5e2b8a28ae8517
|
tests/integration/test_gists.py
|
tests/integration/test_gists.py
|
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
|
"""Integration tests for methods implemented on Gist."""
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
"""Gist integration tests."""
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
"""Show that a user can iterate over the commits in a gist."""
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
"""Show that a user can iterate over the forks of a gist."""
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
|
Add docstrings to Gist integration tests
|
Add docstrings to Gist integration tests
@esacteksab would be so proud
|
Python
|
bsd-3-clause
|
krxsky/github3.py,balloob/github3.py,jim-minter/github3.py,ueg1990/github3.py,wbrefvem/github3.py,agamdua/github3.py,christophelec/github3.py,icio/github3.py,sigmavirus24/github3.py,itsmemattchung/github3.py,h4ck3rm1k3/github3.py,degustaf/github3.py
|
+ """Integration tests for methods implemented on Gist."""
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
+
+ """Gist integration tests."""
+
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
+ """Show that a user can iterate over the commits in a gist."""
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
+ """Show that a user can iterate over the forks of a gist."""
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
|
Add docstrings to Gist integration tests
|
## Code Before:
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
## Instruction:
Add docstrings to Gist integration tests
## Code After:
"""Integration tests for methods implemented on Gist."""
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
"""Gist integration tests."""
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
"""Show that a user can iterate over the commits in a gist."""
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
"""Show that a user can iterate over the forks of a gist."""
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
|
+ """Integration tests for methods implemented on Gist."""
from .helper import IntegrationHelper
import github3
class TestGist(IntegrationHelper):
+
+ """Gist integration tests."""
+
def test_comments(self):
"""Show that a user can iterate over the comments on a gist."""
cassette_name = self.cassette_name('comments')
with self.recorder.use_cassette(cassette_name):
gist = self.gh.gist(3342247)
assert gist is not None
for comment in gist.comments():
assert isinstance(comment, github3.gists.comment.GistComment)
def test_iter_commits(self):
+ """Show that a user can iterate over the commits in a gist."""
cassette_name = self.cassette_name('commits')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_commits():
assert isinstance(commit, github3.gists.history.GistHistory)
def test_iter_forks(self):
+ """Show that a user can iterate over the forks of a gist."""
cassette_name = self.cassette_name('forks')
with self.recorder.use_cassette(cassette_name,
preserve_exact_body_bytes=True):
gist = self.gh.gist(1834570)
assert gist is not None
for commit in gist.iter_forks():
assert isinstance(commit, github3.gists.gist.Gist)
|
d69b137bd19e0363173b120ff4f68becc6be7b3c
|
mama_cas/tests/backends.py
|
mama_cas/tests/backends.py
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username__iexact=username)
if user.check_password(password):
return user
except User.DoesNotExist:
return None
|
from django.contrib.auth.backends import ModelBackend
from mama_cas.compat import get_user_model
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
user_model = get_user_model()
try:
user = user_model.objects.get(username__iexact=username)
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
|
Use get_user_model within test backend
|
Use get_user_model within test backend
|
Python
|
bsd-3-clause
|
orbitvu/django-mama-cas,harlov/django-mama-cas,forcityplatform/django-mama-cas,jbittel/django-mama-cas,orbitvu/django-mama-cas,forcityplatform/django-mama-cas,jbittel/django-mama-cas,harlov/django-mama-cas
|
from django.contrib.auth.backends import ModelBackend
- from django.contrib.auth.models import User
+
+ from mama_cas.compat import get_user_model
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
+ user_model = get_user_model()
try:
- user = User.objects.get(username__iexact=username)
+ user = user_model.objects.get(username__iexact=username)
if user.check_password(password):
return user
- except User.DoesNotExist:
+ except user_model.DoesNotExist:
return None
|
Use get_user_model within test backend
|
## Code Before:
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(username__iexact=username)
if user.check_password(password):
return user
except User.DoesNotExist:
return None
## Instruction:
Use get_user_model within test backend
## Code After:
from django.contrib.auth.backends import ModelBackend
from mama_cas.compat import get_user_model
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
user_model = get_user_model()
try:
user = user_model.objects.get(username__iexact=username)
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
|
from django.contrib.auth.backends import ModelBackend
- from django.contrib.auth.models import User
+
+ from mama_cas.compat import get_user_model
class ExceptionBackend(ModelBackend):
"""Raise an exception on authentication for testing purposes."""
def authenticate(self, username=None, password=None):
raise Exception
class CaseInsensitiveBackend(ModelBackend):
"""A case-insenstitive authentication backend."""
def authenticate(self, username=None, password=None):
+ user_model = get_user_model()
try:
- user = User.objects.get(username__iexact=username)
? ^
+ user = user_model.objects.get(username__iexact=username)
? ^ ++++++
if user.check_password(password):
return user
- except User.DoesNotExist:
? ^
+ except user_model.DoesNotExist:
? ^ ++++++
return None
|
6e04a5c4953ef3fde5f2f5b3ef4f7fd8b7e8437e
|
tests/test_server.py
|
tests/test_server.py
|
def test_info(logged_rocket):
info = logged_rocket.info().json()
assert "info" in info
assert info.get("success")
def test_statistics(logged_rocket):
statistics = logged_rocket.statistics().json()
assert statistics.get("success")
def test_statistics_list(logged_rocket):
statistics_list = logged_rocket.statistics_list().json()
assert statistics_list.get("success")
def test_directory(logged_rocket):
directory = logged_rocket.directory(
query={"text": "rocket", "type": "users"}
).json()
assert directory.get("success")
def test_spotlight(logged_rocket):
spotlight = logged_rocket.spotlight(query="user1").json()
assert spotlight.get("success")
assert spotlight.get("users") is not None, "No users list found"
assert spotlight.get("rooms") is not None, "No rooms list found"
|
from rocketchat_API.rocketchat import RocketChat
def test_info(logged_rocket):
info = logged_rocket.info().json()
assert "info" in info
assert info.get("success")
def test_statistics(logged_rocket):
statistics = logged_rocket.statistics().json()
assert statistics.get("success")
def test_statistics_list(logged_rocket):
statistics_list = logged_rocket.statistics_list().json()
assert statistics_list.get("success")
def test_directory(logged_rocket):
directory = logged_rocket.directory(
query={"text": "rocket", "type": "users"}
).json()
assert directory.get("success")
def test_spotlight(logged_rocket):
spotlight = logged_rocket.spotlight(query="user1").json()
assert spotlight.get("success")
assert spotlight.get("users") is not None, "No users list found"
assert spotlight.get("rooms") is not None, "No rooms list found"
def test_login_token(logged_rocket):
user_id = logged_rocket.headers["X-User-Id"]
auth_token = logged_rocket.headers["X-Auth-Token"]
another_rocket = RocketChat(user_id=user_id, auth_token=auth_token)
logged_user = another_rocket.me().json()
assert logged_user.get("_id") == user_id
|
Add a test to check that authentication using the token directly works
|
Add a test to check that authentication using the token directly works
|
Python
|
mit
|
jadolg/rocketchat_API
|
+ from rocketchat_API.rocketchat import RocketChat
+
+
def test_info(logged_rocket):
info = logged_rocket.info().json()
assert "info" in info
assert info.get("success")
def test_statistics(logged_rocket):
statistics = logged_rocket.statistics().json()
assert statistics.get("success")
def test_statistics_list(logged_rocket):
statistics_list = logged_rocket.statistics_list().json()
assert statistics_list.get("success")
def test_directory(logged_rocket):
directory = logged_rocket.directory(
query={"text": "rocket", "type": "users"}
).json()
assert directory.get("success")
def test_spotlight(logged_rocket):
spotlight = logged_rocket.spotlight(query="user1").json()
assert spotlight.get("success")
assert spotlight.get("users") is not None, "No users list found"
assert spotlight.get("rooms") is not None, "No rooms list found"
+
+ def test_login_token(logged_rocket):
+ user_id = logged_rocket.headers["X-User-Id"]
+ auth_token = logged_rocket.headers["X-Auth-Token"]
+
+ another_rocket = RocketChat(user_id=user_id, auth_token=auth_token)
+ logged_user = another_rocket.me().json()
+
+ assert logged_user.get("_id") == user_id
+
|
Add a test to check that authentication using the token directly works
|
## Code Before:
def test_info(logged_rocket):
info = logged_rocket.info().json()
assert "info" in info
assert info.get("success")
def test_statistics(logged_rocket):
statistics = logged_rocket.statistics().json()
assert statistics.get("success")
def test_statistics_list(logged_rocket):
statistics_list = logged_rocket.statistics_list().json()
assert statistics_list.get("success")
def test_directory(logged_rocket):
directory = logged_rocket.directory(
query={"text": "rocket", "type": "users"}
).json()
assert directory.get("success")
def test_spotlight(logged_rocket):
spotlight = logged_rocket.spotlight(query="user1").json()
assert spotlight.get("success")
assert spotlight.get("users") is not None, "No users list found"
assert spotlight.get("rooms") is not None, "No rooms list found"
## Instruction:
Add a test to check that authentication using the token directly works
## Code After:
from rocketchat_API.rocketchat import RocketChat
def test_info(logged_rocket):
info = logged_rocket.info().json()
assert "info" in info
assert info.get("success")
def test_statistics(logged_rocket):
statistics = logged_rocket.statistics().json()
assert statistics.get("success")
def test_statistics_list(logged_rocket):
statistics_list = logged_rocket.statistics_list().json()
assert statistics_list.get("success")
def test_directory(logged_rocket):
directory = logged_rocket.directory(
query={"text": "rocket", "type": "users"}
).json()
assert directory.get("success")
def test_spotlight(logged_rocket):
spotlight = logged_rocket.spotlight(query="user1").json()
assert spotlight.get("success")
assert spotlight.get("users") is not None, "No users list found"
assert spotlight.get("rooms") is not None, "No rooms list found"
def test_login_token(logged_rocket):
user_id = logged_rocket.headers["X-User-Id"]
auth_token = logged_rocket.headers["X-Auth-Token"]
another_rocket = RocketChat(user_id=user_id, auth_token=auth_token)
logged_user = another_rocket.me().json()
assert logged_user.get("_id") == user_id
|
+ from rocketchat_API.rocketchat import RocketChat
+
+
def test_info(logged_rocket):
info = logged_rocket.info().json()
assert "info" in info
assert info.get("success")
def test_statistics(logged_rocket):
statistics = logged_rocket.statistics().json()
assert statistics.get("success")
def test_statistics_list(logged_rocket):
statistics_list = logged_rocket.statistics_list().json()
assert statistics_list.get("success")
def test_directory(logged_rocket):
directory = logged_rocket.directory(
query={"text": "rocket", "type": "users"}
).json()
assert directory.get("success")
def test_spotlight(logged_rocket):
spotlight = logged_rocket.spotlight(query="user1").json()
assert spotlight.get("success")
assert spotlight.get("users") is not None, "No users list found"
assert spotlight.get("rooms") is not None, "No rooms list found"
+
+
+ def test_login_token(logged_rocket):
+ user_id = logged_rocket.headers["X-User-Id"]
+ auth_token = logged_rocket.headers["X-Auth-Token"]
+
+ another_rocket = RocketChat(user_id=user_id, auth_token=auth_token)
+ logged_user = another_rocket.me().json()
+
+ assert logged_user.get("_id") == user_id
|
f8d793eef586f2097a9a80e79c497204d2f6ffa0
|
banner/models.py
|
banner/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from jmbo.models import Image, ModelBase
from link.models import Link
from banner.styles import BANNER_STYLE_CLASSES
class Banner(ModelBase):
"""Base class for all banners"""
link = models.ForeignKey(
Link, help_text=_("Link to which this banner should redirect.")
)
background_image = models.OneToOneField(
Image, null=True, blank=True
)
style = models.CharField(choices=[(klass.__name__, klass.__name__) for klass in BANNER_STYLE_CLASSES], max_length=128)
class Button(models.Model):
"""Call to action handling"""
text = models.CharField(
max_length=60,
help_text=_("The text to be displayed as the button label")
)
link = models.ForeignKey(
Link, help_text=_("CTA link for this button"), null=True, blank=True
)
banner = models.ManyToManyField(to=Banner, related_name="buttons", null=True, blank=True, through="ButtonOrder")
class ButtonOrder(models.Model):
banner = models.ForeignKey(Banner)
button = models.ForeignKey(Button)
position = models.PositiveIntegerField(default=0)
class Meta(object):
ordering = ["position"]
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from jmbo.models import Image, ModelBase
from link.models import Link
from banner.styles import BANNER_STYLE_CLASSES
class Banner(ModelBase):
"""Base class for all banners"""
link = models.ForeignKey(
Link, help_text=_("Link to which this banner should redirect."),
blank=True, null=True
)
background_image = models.OneToOneField(
Image, null=True, blank=True
)
style = models.CharField(choices=[(klass.__name__, klass.__name__) for klass in BANNER_STYLE_CLASSES], max_length=128)
class Button(models.Model):
"""Call to action handling"""
text = models.CharField(
max_length=60,
help_text=_("The text to be displayed as the button label")
)
link = models.ForeignKey(
Link, help_text=_("CTA link for this button"), null=True, blank=True
)
banner = models.ManyToManyField(to=Banner, related_name="buttons", null=True, blank=True, through="ButtonOrder")
class ButtonOrder(models.Model):
banner = models.ForeignKey(Banner)
button = models.ForeignKey(Button)
position = models.PositiveIntegerField(default=0)
class Meta(object):
ordering = ["position"]
|
Make link on Banner model nullable
|
Make link on Banner model nullable
|
Python
|
bsd-3-clause
|
praekelt/jmbo-banner,praekelt/jmbo-banner
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from jmbo.models import Image, ModelBase
from link.models import Link
from banner.styles import BANNER_STYLE_CLASSES
class Banner(ModelBase):
"""Base class for all banners"""
link = models.ForeignKey(
- Link, help_text=_("Link to which this banner should redirect.")
+ Link, help_text=_("Link to which this banner should redirect."),
+ blank=True, null=True
)
background_image = models.OneToOneField(
Image, null=True, blank=True
)
style = models.CharField(choices=[(klass.__name__, klass.__name__) for klass in BANNER_STYLE_CLASSES], max_length=128)
class Button(models.Model):
"""Call to action handling"""
text = models.CharField(
max_length=60,
help_text=_("The text to be displayed as the button label")
)
link = models.ForeignKey(
Link, help_text=_("CTA link for this button"), null=True, blank=True
)
banner = models.ManyToManyField(to=Banner, related_name="buttons", null=True, blank=True, through="ButtonOrder")
class ButtonOrder(models.Model):
banner = models.ForeignKey(Banner)
button = models.ForeignKey(Button)
position = models.PositiveIntegerField(default=0)
class Meta(object):
ordering = ["position"]
|
Make link on Banner model nullable
|
## Code Before:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from jmbo.models import Image, ModelBase
from link.models import Link
from banner.styles import BANNER_STYLE_CLASSES
class Banner(ModelBase):
"""Base class for all banners"""
link = models.ForeignKey(
Link, help_text=_("Link to which this banner should redirect.")
)
background_image = models.OneToOneField(
Image, null=True, blank=True
)
style = models.CharField(choices=[(klass.__name__, klass.__name__) for klass in BANNER_STYLE_CLASSES], max_length=128)
class Button(models.Model):
"""Call to action handling"""
text = models.CharField(
max_length=60,
help_text=_("The text to be displayed as the button label")
)
link = models.ForeignKey(
Link, help_text=_("CTA link for this button"), null=True, blank=True
)
banner = models.ManyToManyField(to=Banner, related_name="buttons", null=True, blank=True, through="ButtonOrder")
class ButtonOrder(models.Model):
banner = models.ForeignKey(Banner)
button = models.ForeignKey(Button)
position = models.PositiveIntegerField(default=0)
class Meta(object):
ordering = ["position"]
## Instruction:
Make link on Banner model nullable
## Code After:
from django.db import models
from django.utils.translation import ugettext_lazy as _
from jmbo.models import Image, ModelBase
from link.models import Link
from banner.styles import BANNER_STYLE_CLASSES
class Banner(ModelBase):
"""Base class for all banners"""
link = models.ForeignKey(
Link, help_text=_("Link to which this banner should redirect."),
blank=True, null=True
)
background_image = models.OneToOneField(
Image, null=True, blank=True
)
style = models.CharField(choices=[(klass.__name__, klass.__name__) for klass in BANNER_STYLE_CLASSES], max_length=128)
class Button(models.Model):
"""Call to action handling"""
text = models.CharField(
max_length=60,
help_text=_("The text to be displayed as the button label")
)
link = models.ForeignKey(
Link, help_text=_("CTA link for this button"), null=True, blank=True
)
banner = models.ManyToManyField(to=Banner, related_name="buttons", null=True, blank=True, through="ButtonOrder")
class ButtonOrder(models.Model):
banner = models.ForeignKey(Banner)
button = models.ForeignKey(Button)
position = models.PositiveIntegerField(default=0)
class Meta(object):
ordering = ["position"]
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from jmbo.models import Image, ModelBase
from link.models import Link
from banner.styles import BANNER_STYLE_CLASSES
class Banner(ModelBase):
"""Base class for all banners"""
link = models.ForeignKey(
- Link, help_text=_("Link to which this banner should redirect.")
+ Link, help_text=_("Link to which this banner should redirect."),
? +
+ blank=True, null=True
)
background_image = models.OneToOneField(
Image, null=True, blank=True
)
style = models.CharField(choices=[(klass.__name__, klass.__name__) for klass in BANNER_STYLE_CLASSES], max_length=128)
class Button(models.Model):
"""Call to action handling"""
text = models.CharField(
max_length=60,
help_text=_("The text to be displayed as the button label")
)
link = models.ForeignKey(
Link, help_text=_("CTA link for this button"), null=True, blank=True
)
banner = models.ManyToManyField(to=Banner, related_name="buttons", null=True, blank=True, through="ButtonOrder")
class ButtonOrder(models.Model):
banner = models.ForeignKey(Banner)
button = models.ForeignKey(Button)
position = models.PositiveIntegerField(default=0)
class Meta(object):
ordering = ["position"]
|
9ff92d0a437e5af08fbf996ed0e3362cbd9cf2c9
|
tests/instrumentdb_test.py
|
tests/instrumentdb_test.py
|
'Test the functions in the instrumentdb module.'
import os.path
import unittest as ut
import stripeline.instrumentdb as idb
class TestInstrumentDb(ut.TestCase):
def test_paths(self):
self.assertTrue(os.path.exists(idb.instrument_db_path()))
self.assertTrue(os.path.exists(idb.focal_plane_db_file_name()))
self.assertTrue(os.path.exists(idb.detector_db_file_name()))
self.assertTrue(os.path.exists(idb.scanning_strategy_db_file_name()))
|
'Test the functions in the instrumentdb module.'
import os.path
import unittest as ut
import stripeline.instrumentdb as idb
class TestInstrumentDb(ut.TestCase):
def test_paths(self):
self.assertTrue(os.path.exists(idb.instrument_db_path()),
'Path "{0}" not found'.format(idb.instrument_db_path()))
for file_name in (idb.focal_plane_db_file_name(),
idb.detector_db_file_name(),
idb.scanning_strategy_db_file_name()):
self.assertTrue(os.path.exists(file_name),
'File "{0}" not found'.format(file_name))
|
Print more helpful messages when tests fail
|
Print more helpful messages when tests fail
|
Python
|
mit
|
ziotom78/stripeline,ziotom78/stripeline
|
'Test the functions in the instrumentdb module.'
import os.path
import unittest as ut
import stripeline.instrumentdb as idb
class TestInstrumentDb(ut.TestCase):
def test_paths(self):
- self.assertTrue(os.path.exists(idb.instrument_db_path()))
+ self.assertTrue(os.path.exists(idb.instrument_db_path()),
+ 'Path "{0}" not found'.format(idb.instrument_db_path()))
- self.assertTrue(os.path.exists(idb.focal_plane_db_file_name()))
+ for file_name in (idb.focal_plane_db_file_name(),
+ idb.detector_db_file_name(),
+ idb.scanning_strategy_db_file_name()):
- self.assertTrue(os.path.exists(idb.detector_db_file_name()))
+ self.assertTrue(os.path.exists(file_name),
- self.assertTrue(os.path.exists(idb.scanning_strategy_db_file_name()))
+ 'File "{0}" not found'.format(file_name))
|
Print more helpful messages when tests fail
|
## Code Before:
'Test the functions in the instrumentdb module.'
import os.path
import unittest as ut
import stripeline.instrumentdb as idb
class TestInstrumentDb(ut.TestCase):
def test_paths(self):
self.assertTrue(os.path.exists(idb.instrument_db_path()))
self.assertTrue(os.path.exists(idb.focal_plane_db_file_name()))
self.assertTrue(os.path.exists(idb.detector_db_file_name()))
self.assertTrue(os.path.exists(idb.scanning_strategy_db_file_name()))
## Instruction:
Print more helpful messages when tests fail
## Code After:
'Test the functions in the instrumentdb module.'
import os.path
import unittest as ut
import stripeline.instrumentdb as idb
class TestInstrumentDb(ut.TestCase):
def test_paths(self):
self.assertTrue(os.path.exists(idb.instrument_db_path()),
'Path "{0}" not found'.format(idb.instrument_db_path()))
for file_name in (idb.focal_plane_db_file_name(),
idb.detector_db_file_name(),
idb.scanning_strategy_db_file_name()):
self.assertTrue(os.path.exists(file_name),
'File "{0}" not found'.format(file_name))
|
'Test the functions in the instrumentdb module.'
import os.path
import unittest as ut
import stripeline.instrumentdb as idb
class TestInstrumentDb(ut.TestCase):
def test_paths(self):
- self.assertTrue(os.path.exists(idb.instrument_db_path()))
? ^
+ self.assertTrue(os.path.exists(idb.instrument_db_path()),
? ^
+ 'Path "{0}" not found'.format(idb.instrument_db_path()))
- self.assertTrue(os.path.exists(idb.focal_plane_db_file_name()))
+ for file_name in (idb.focal_plane_db_file_name(),
+ idb.detector_db_file_name(),
+ idb.scanning_strategy_db_file_name()):
- self.assertTrue(os.path.exists(idb.detector_db_file_name()))
? ---------------- - ^^
+ self.assertTrue(os.path.exists(file_name),
? ++++ ^
- self.assertTrue(os.path.exists(idb.scanning_strategy_db_file_name()))
+ 'File "{0}" not found'.format(file_name))
|
a797f4862ccfdb84ff87f0f64a6abdc405823215
|
tests/app/na_celery/test_email_tasks.py
|
tests/app/na_celery/test_email_tasks.py
|
from app.na_celery.email_tasks import send_emails
class WhenProcessingSendEmailsTask:
def it_calls_send_email_to_task(self, mocker, db, db_session, sample_admin_user, sample_email):
mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email')
send_emails(sample_email.id)
assert mock_send_email.call_args[0][0] == '[email protected]'
assert mock_send_email.call_args[0][1] == 'workshop: test title'
def it_sends_an_email_to_members_up_to_email_limit(self):
pass
def it_does_not_send_an_email_if_not_between_start_and_expiry(self):
pass
def it_sends_email_with_correct_template(self):
pass
|
from app.na_celery.email_tasks import send_emails
class WhenProcessingSendEmailsTask:
def it_calls_send_email_to_task(self, mocker, db, db_session, sample_email, sample_member):
mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email', return_value=200)
send_emails(sample_email.id)
assert mock_send_email.call_args[0][0] == sample_member.email
assert mock_send_email.call_args[0][1] == 'workshop: test title'
def it_sends_an_email_to_members_up_to_email_limit(self):
pass
def it_does_not_send_an_email_if_not_between_start_and_expiry(self):
pass
def it_sends_email_with_correct_template(self):
pass
|
Update email task test for members
|
Update email task test for members
|
Python
|
mit
|
NewAcropolis/api,NewAcropolis/api,NewAcropolis/api
|
from app.na_celery.email_tasks import send_emails
class WhenProcessingSendEmailsTask:
- def it_calls_send_email_to_task(self, mocker, db, db_session, sample_admin_user, sample_email):
+ def it_calls_send_email_to_task(self, mocker, db, db_session, sample_email, sample_member):
- mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email')
+ mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email', return_value=200)
send_emails(sample_email.id)
- assert mock_send_email.call_args[0][0] == '[email protected]'
+ assert mock_send_email.call_args[0][0] == sample_member.email
assert mock_send_email.call_args[0][1] == 'workshop: test title'
def it_sends_an_email_to_members_up_to_email_limit(self):
pass
def it_does_not_send_an_email_if_not_between_start_and_expiry(self):
pass
def it_sends_email_with_correct_template(self):
pass
|
Update email task test for members
|
## Code Before:
from app.na_celery.email_tasks import send_emails
class WhenProcessingSendEmailsTask:
def it_calls_send_email_to_task(self, mocker, db, db_session, sample_admin_user, sample_email):
mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email')
send_emails(sample_email.id)
assert mock_send_email.call_args[0][0] == '[email protected]'
assert mock_send_email.call_args[0][1] == 'workshop: test title'
def it_sends_an_email_to_members_up_to_email_limit(self):
pass
def it_does_not_send_an_email_if_not_between_start_and_expiry(self):
pass
def it_sends_email_with_correct_template(self):
pass
## Instruction:
Update email task test for members
## Code After:
from app.na_celery.email_tasks import send_emails
class WhenProcessingSendEmailsTask:
def it_calls_send_email_to_task(self, mocker, db, db_session, sample_email, sample_member):
mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email', return_value=200)
send_emails(sample_email.id)
assert mock_send_email.call_args[0][0] == sample_member.email
assert mock_send_email.call_args[0][1] == 'workshop: test title'
def it_sends_an_email_to_members_up_to_email_limit(self):
pass
def it_does_not_send_an_email_if_not_between_start_and_expiry(self):
pass
def it_sends_email_with_correct_template(self):
pass
|
from app.na_celery.email_tasks import send_emails
class WhenProcessingSendEmailsTask:
- def it_calls_send_email_to_task(self, mocker, db, db_session, sample_admin_user, sample_email):
? -- ^^^^^^ ^^^
+ def it_calls_send_email_to_task(self, mocker, db, db_session, sample_email, sample_member):
? ++ ^ + ^^^
- mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email')
+ mock_send_email = mocker.patch('app.na_celery.email_tasks.send_email', return_value=200)
? ++++++++++++++++++
send_emails(sample_email.id)
- assert mock_send_email.call_args[0][0] == '[email protected]'
? ^^^^^^^^^ ^^ ^
+ assert mock_send_email.call_args[0][0] == sample_member.email
? ^ +++++++ ^ ^^^
assert mock_send_email.call_args[0][1] == 'workshop: test title'
def it_sends_an_email_to_members_up_to_email_limit(self):
pass
def it_does_not_send_an_email_if_not_between_start_and_expiry(self):
pass
def it_sends_email_with_correct_template(self):
pass
|
65731a34e152d085f55893c65607b8fa25dcfd63
|
pathvalidate/_interface.py
|
pathvalidate/_interface.py
|
from __future__ import absolute_import, unicode_literals
import abc
from ._common import validate_null_string
from ._six import add_metaclass
@add_metaclass(abc.ABCMeta)
class NameSanitizer(object):
@abc.abstractproperty
def reserved_keywords(self): # pragma: no cover
pass
@abc.abstractmethod
def validate(self, value): # pragma: no cover
pass
@abc.abstractmethod
def sanitize(self, value, replacement_text=""): # pragma: no cover
pass
def _is_reserved_keyword(self, value):
return value in self.reserved_keywords
@staticmethod
def _validate_null_string(text):
validate_null_string(text, error_msg="null name")
|
from __future__ import absolute_import, unicode_literals
import abc
from ._common import validate_null_string
from ._six import add_metaclass
from .error import ValidationError
@add_metaclass(abc.ABCMeta)
class NameSanitizer(object):
@abc.abstractproperty
def reserved_keywords(self): # pragma: no cover
pass
@abc.abstractmethod
def validate(self, value): # pragma: no cover
pass
def is_valid(self, value):
try:
self.validate(value)
except (TypeError, ValidationError):
return False
return True
@abc.abstractmethod
def sanitize(self, value, replacement_text=""): # pragma: no cover
pass
def _is_reserved_keyword(self, value):
return value in self.reserved_keywords
@staticmethod
def _validate_null_string(text):
validate_null_string(text, error_msg="null name")
|
Add is_valid method for file sanitizer classes
|
Add is_valid method for file sanitizer classes
|
Python
|
mit
|
thombashi/pathvalidate
|
from __future__ import absolute_import, unicode_literals
import abc
from ._common import validate_null_string
from ._six import add_metaclass
+ from .error import ValidationError
@add_metaclass(abc.ABCMeta)
class NameSanitizer(object):
@abc.abstractproperty
def reserved_keywords(self): # pragma: no cover
pass
@abc.abstractmethod
def validate(self, value): # pragma: no cover
pass
+ def is_valid(self, value):
+ try:
+ self.validate(value)
+ except (TypeError, ValidationError):
+ return False
+
+ return True
+
@abc.abstractmethod
def sanitize(self, value, replacement_text=""): # pragma: no cover
pass
def _is_reserved_keyword(self, value):
return value in self.reserved_keywords
@staticmethod
def _validate_null_string(text):
validate_null_string(text, error_msg="null name")
|
Add is_valid method for file sanitizer classes
|
## Code Before:
from __future__ import absolute_import, unicode_literals
import abc
from ._common import validate_null_string
from ._six import add_metaclass
@add_metaclass(abc.ABCMeta)
class NameSanitizer(object):
@abc.abstractproperty
def reserved_keywords(self): # pragma: no cover
pass
@abc.abstractmethod
def validate(self, value): # pragma: no cover
pass
@abc.abstractmethod
def sanitize(self, value, replacement_text=""): # pragma: no cover
pass
def _is_reserved_keyword(self, value):
return value in self.reserved_keywords
@staticmethod
def _validate_null_string(text):
validate_null_string(text, error_msg="null name")
## Instruction:
Add is_valid method for file sanitizer classes
## Code After:
from __future__ import absolute_import, unicode_literals
import abc
from ._common import validate_null_string
from ._six import add_metaclass
from .error import ValidationError
@add_metaclass(abc.ABCMeta)
class NameSanitizer(object):
@abc.abstractproperty
def reserved_keywords(self): # pragma: no cover
pass
@abc.abstractmethod
def validate(self, value): # pragma: no cover
pass
def is_valid(self, value):
try:
self.validate(value)
except (TypeError, ValidationError):
return False
return True
@abc.abstractmethod
def sanitize(self, value, replacement_text=""): # pragma: no cover
pass
def _is_reserved_keyword(self, value):
return value in self.reserved_keywords
@staticmethod
def _validate_null_string(text):
validate_null_string(text, error_msg="null name")
|
from __future__ import absolute_import, unicode_literals
import abc
from ._common import validate_null_string
from ._six import add_metaclass
+ from .error import ValidationError
@add_metaclass(abc.ABCMeta)
class NameSanitizer(object):
@abc.abstractproperty
def reserved_keywords(self): # pragma: no cover
pass
@abc.abstractmethod
def validate(self, value): # pragma: no cover
pass
+ def is_valid(self, value):
+ try:
+ self.validate(value)
+ except (TypeError, ValidationError):
+ return False
+
+ return True
+
@abc.abstractmethod
def sanitize(self, value, replacement_text=""): # pragma: no cover
pass
def _is_reserved_keyword(self, value):
return value in self.reserved_keywords
@staticmethod
def _validate_null_string(text):
validate_null_string(text, error_msg="null name")
|
936302bf5db057a01644014aabc1357f925c6afa
|
mezzanine/accounts/models.py
|
mezzanine/accounts/models.py
|
from django.db import DatabaseError, connection
from django.db.models.signals import post_save
from mezzanine.accounts import get_profile_for_user
from mezzanine.conf import settings
__all__ = ()
if getattr(settings, "AUTH_PROFILE_MODULE", None):
def create_profile(user_model, instance, created, **kwargs):
if created:
try:
get_profile_for_user(instance)
except DatabaseError:
# User creation in initial syncdb may have been triggered,
# while profile model is under migration management and
# doesn't exist yet. We close the connection so that it
# gets re-opened, allowing syncdb to continue and complete.
connection.close()
post_save.connect(create_profile, sender=settings.AUTH_USER_MODEL, weak=False)
|
from django.db import DatabaseError, connection
from django.db.models.signals import post_save
from mezzanine.accounts import get_profile_for_user
from mezzanine.conf import settings
__all__ = ()
if getattr(settings, "AUTH_PROFILE_MODULE", None):
def create_profile(**kwargs):
if kwargs["created"]:
try:
get_profile_for_user(kwargs["instance"])
except DatabaseError:
# User creation in initial syncdb may have been triggered,
# while profile model is under migration management and
# doesn't exist yet. We close the connection so that it
# gets re-opened, allowing syncdb to continue and complete.
connection.close()
post_save.connect(create_profile, sender=settings.AUTH_USER_MODEL, weak=False)
|
Fix user profile signal handler.
|
Fix user profile signal handler.
|
Python
|
bsd-2-clause
|
wbtuomela/mezzanine,Cicero-Zhao/mezzanine,gradel/mezzanine,christianwgd/mezzanine,eino-makitalo/mezzanine,frankier/mezzanine,jerivas/mezzanine,Cicero-Zhao/mezzanine,frankier/mezzanine,readevalprint/mezzanine,dsanders11/mezzanine,stephenmcd/mezzanine,frankier/mezzanine,eino-makitalo/mezzanine,ryneeverett/mezzanine,viaregio/mezzanine,wbtuomela/mezzanine,readevalprint/mezzanine,dsanders11/mezzanine,douglaskastle/mezzanine,ryneeverett/mezzanine,vladir/mezzanine,stephenmcd/mezzanine,douglaskastle/mezzanine,vladir/mezzanine,christianwgd/mezzanine,sjdines/mezzanine,sjuxax/mezzanine,jerivas/mezzanine,stephenmcd/mezzanine,sjdines/mezzanine,readevalprint/mezzanine,viaregio/mezzanine,gradel/mezzanine,eino-makitalo/mezzanine,gradel/mezzanine,spookylukey/mezzanine,ryneeverett/mezzanine,viaregio/mezzanine,sjuxax/mezzanine,vladir/mezzanine,molokov/mezzanine,dsanders11/mezzanine,jerivas/mezzanine,spookylukey/mezzanine,sjuxax/mezzanine,douglaskastle/mezzanine,christianwgd/mezzanine,molokov/mezzanine,sjdines/mezzanine,molokov/mezzanine,spookylukey/mezzanine,wbtuomela/mezzanine
|
from django.db import DatabaseError, connection
from django.db.models.signals import post_save
from mezzanine.accounts import get_profile_for_user
from mezzanine.conf import settings
__all__ = ()
if getattr(settings, "AUTH_PROFILE_MODULE", None):
- def create_profile(user_model, instance, created, **kwargs):
+ def create_profile(**kwargs):
- if created:
+ if kwargs["created"]:
try:
- get_profile_for_user(instance)
+ get_profile_for_user(kwargs["instance"])
except DatabaseError:
# User creation in initial syncdb may have been triggered,
# while profile model is under migration management and
# doesn't exist yet. We close the connection so that it
# gets re-opened, allowing syncdb to continue and complete.
connection.close()
post_save.connect(create_profile, sender=settings.AUTH_USER_MODEL, weak=False)
|
Fix user profile signal handler.
|
## Code Before:
from django.db import DatabaseError, connection
from django.db.models.signals import post_save
from mezzanine.accounts import get_profile_for_user
from mezzanine.conf import settings
__all__ = ()
if getattr(settings, "AUTH_PROFILE_MODULE", None):
def create_profile(user_model, instance, created, **kwargs):
if created:
try:
get_profile_for_user(instance)
except DatabaseError:
# User creation in initial syncdb may have been triggered,
# while profile model is under migration management and
# doesn't exist yet. We close the connection so that it
# gets re-opened, allowing syncdb to continue and complete.
connection.close()
post_save.connect(create_profile, sender=settings.AUTH_USER_MODEL, weak=False)
## Instruction:
Fix user profile signal handler.
## Code After:
from django.db import DatabaseError, connection
from django.db.models.signals import post_save
from mezzanine.accounts import get_profile_for_user
from mezzanine.conf import settings
__all__ = ()
if getattr(settings, "AUTH_PROFILE_MODULE", None):
def create_profile(**kwargs):
if kwargs["created"]:
try:
get_profile_for_user(kwargs["instance"])
except DatabaseError:
# User creation in initial syncdb may have been triggered,
# while profile model is under migration management and
# doesn't exist yet. We close the connection so that it
# gets re-opened, allowing syncdb to continue and complete.
connection.close()
post_save.connect(create_profile, sender=settings.AUTH_USER_MODEL, weak=False)
|
from django.db import DatabaseError, connection
from django.db.models.signals import post_save
from mezzanine.accounts import get_profile_for_user
from mezzanine.conf import settings
__all__ = ()
if getattr(settings, "AUTH_PROFILE_MODULE", None):
- def create_profile(user_model, instance, created, **kwargs):
+ def create_profile(**kwargs):
- if created:
+ if kwargs["created"]:
? ++++++++ ++
try:
- get_profile_for_user(instance)
+ get_profile_for_user(kwargs["instance"])
? ++++++++ ++
except DatabaseError:
# User creation in initial syncdb may have been triggered,
# while profile model is under migration management and
# doesn't exist yet. We close the connection so that it
# gets re-opened, allowing syncdb to continue and complete.
connection.close()
post_save.connect(create_profile, sender=settings.AUTH_USER_MODEL, weak=False)
|
208081800ab7e6217ec0f88e76c2dffd32187db1
|
whyp/shell.py
|
whyp/shell.py
|
import os
from pysyte.types.paths import path
def value(key):
"""A value from the shell environment, defaults to empty string
>>> value('SHELL') is not None
True
"""
try:
return os.environ[key]
except KeyError:
return ''
def paths(name=None):
"""A list of paths in the environment's PATH
>>> '/bin' in paths()
True
"""
path_value = value(name or 'PATH')
path_strings = path_value.split(':')
path_paths = [path(_) for _ in path_strings]
return path_paths
def path_commands():
"""Gives a dictionary of all executable files in the environment's PATH
>>> path_commands()['python'] == sys.executable or True
True
"""
commands = {}
for path_dir in paths():
for file_path in path_dir.list_files():
if not file_path.isexec():
continue
if file_path.name in commands:
continue
commands[file_path.name] = file_path
return commands
_path_commands = path_commands()
def which(name):
"""Looks for the name as an executable is shell's PATH
If name is not found, look for name.exe
If still not found, return empty string
>>> which('python') == sys.executable or True
True
"""
try:
commands = _path_commands
return commands[name]
except KeyError:
if name.endswith('.exe'):
return ''
return which('%s.exe' % name)
def is_path_command(name):
return name in _path_commands
|
import os
from pysyte.types.paths import path
def value(key):
"""A value from the shell environment, defaults to empty string
>>> value('SHELL') is not None
True
"""
try:
return os.environ[key]
except KeyError:
return ''
def paths(name=None):
"""A list of paths in the environment's PATH
>>> '/bin' in paths()
True
"""
path_value = value(name or 'PATH')
path_strings = path_value.split(':')
path_paths = [path(_) for _ in path_strings]
return path_paths
def path_commands():
"""Gives a dictionary of all executable files in the environment's PATH
>>> path_commands()['python'] == sys.executable or True
True
"""
commands = {}
for path_dir in paths():
if not path_dir.isdir():
continue
for file_path in path_dir.list_files():
if not file_path.isexec():
continue
if file_path.name in commands:
continue
commands[file_path.name] = file_path
return commands
_path_commands = path_commands()
def which(name):
"""Looks for the name as an executable is shell's PATH
If name is not found, look for name.exe
If still not found, return empty string
>>> which('python') == sys.executable or True
True
"""
try:
commands = _path_commands
return commands[name]
except KeyError:
if name.endswith('.exe'):
return ''
return which('%s.exe' % name)
def is_path_command(name):
return name in _path_commands
|
Allow for missing directories in $PATH
|
Allow for missing directories in $PATH
|
Python
|
mit
|
jalanb/what,jalanb/what
|
import os
from pysyte.types.paths import path
def value(key):
"""A value from the shell environment, defaults to empty string
>>> value('SHELL') is not None
True
"""
try:
return os.environ[key]
except KeyError:
return ''
def paths(name=None):
"""A list of paths in the environment's PATH
>>> '/bin' in paths()
True
"""
path_value = value(name or 'PATH')
path_strings = path_value.split(':')
path_paths = [path(_) for _ in path_strings]
return path_paths
def path_commands():
"""Gives a dictionary of all executable files in the environment's PATH
>>> path_commands()['python'] == sys.executable or True
True
"""
commands = {}
for path_dir in paths():
+ if not path_dir.isdir():
+ continue
for file_path in path_dir.list_files():
if not file_path.isexec():
continue
if file_path.name in commands:
continue
commands[file_path.name] = file_path
return commands
_path_commands = path_commands()
def which(name):
"""Looks for the name as an executable is shell's PATH
If name is not found, look for name.exe
If still not found, return empty string
>>> which('python') == sys.executable or True
True
"""
try:
commands = _path_commands
return commands[name]
except KeyError:
if name.endswith('.exe'):
return ''
return which('%s.exe' % name)
def is_path_command(name):
return name in _path_commands
|
Allow for missing directories in $PATH
|
## Code Before:
import os
from pysyte.types.paths import path
def value(key):
"""A value from the shell environment, defaults to empty string
>>> value('SHELL') is not None
True
"""
try:
return os.environ[key]
except KeyError:
return ''
def paths(name=None):
"""A list of paths in the environment's PATH
>>> '/bin' in paths()
True
"""
path_value = value(name or 'PATH')
path_strings = path_value.split(':')
path_paths = [path(_) for _ in path_strings]
return path_paths
def path_commands():
"""Gives a dictionary of all executable files in the environment's PATH
>>> path_commands()['python'] == sys.executable or True
True
"""
commands = {}
for path_dir in paths():
for file_path in path_dir.list_files():
if not file_path.isexec():
continue
if file_path.name in commands:
continue
commands[file_path.name] = file_path
return commands
_path_commands = path_commands()
def which(name):
"""Looks for the name as an executable is shell's PATH
If name is not found, look for name.exe
If still not found, return empty string
>>> which('python') == sys.executable or True
True
"""
try:
commands = _path_commands
return commands[name]
except KeyError:
if name.endswith('.exe'):
return ''
return which('%s.exe' % name)
def is_path_command(name):
return name in _path_commands
## Instruction:
Allow for missing directories in $PATH
## Code After:
import os
from pysyte.types.paths import path
def value(key):
"""A value from the shell environment, defaults to empty string
>>> value('SHELL') is not None
True
"""
try:
return os.environ[key]
except KeyError:
return ''
def paths(name=None):
"""A list of paths in the environment's PATH
>>> '/bin' in paths()
True
"""
path_value = value(name or 'PATH')
path_strings = path_value.split(':')
path_paths = [path(_) for _ in path_strings]
return path_paths
def path_commands():
"""Gives a dictionary of all executable files in the environment's PATH
>>> path_commands()['python'] == sys.executable or True
True
"""
commands = {}
for path_dir in paths():
if not path_dir.isdir():
continue
for file_path in path_dir.list_files():
if not file_path.isexec():
continue
if file_path.name in commands:
continue
commands[file_path.name] = file_path
return commands
_path_commands = path_commands()
def which(name):
"""Looks for the name as an executable is shell's PATH
If name is not found, look for name.exe
If still not found, return empty string
>>> which('python') == sys.executable or True
True
"""
try:
commands = _path_commands
return commands[name]
except KeyError:
if name.endswith('.exe'):
return ''
return which('%s.exe' % name)
def is_path_command(name):
return name in _path_commands
|
import os
from pysyte.types.paths import path
def value(key):
"""A value from the shell environment, defaults to empty string
>>> value('SHELL') is not None
True
"""
try:
return os.environ[key]
except KeyError:
return ''
def paths(name=None):
"""A list of paths in the environment's PATH
>>> '/bin' in paths()
True
"""
path_value = value(name or 'PATH')
path_strings = path_value.split(':')
path_paths = [path(_) for _ in path_strings]
return path_paths
def path_commands():
"""Gives a dictionary of all executable files in the environment's PATH
>>> path_commands()['python'] == sys.executable or True
True
"""
commands = {}
for path_dir in paths():
+ if not path_dir.isdir():
+ continue
for file_path in path_dir.list_files():
if not file_path.isexec():
continue
if file_path.name in commands:
continue
commands[file_path.name] = file_path
return commands
_path_commands = path_commands()
def which(name):
"""Looks for the name as an executable is shell's PATH
If name is not found, look for name.exe
If still not found, return empty string
>>> which('python') == sys.executable or True
True
"""
try:
commands = _path_commands
return commands[name]
except KeyError:
if name.endswith('.exe'):
return ''
return which('%s.exe' % name)
def is_path_command(name):
return name in _path_commands
|
73a4aca6e9c0c4c9ef53e498319bf754c6bb8edb
|
rippl/rippl/urls.py
|
rippl/rippl/urls.py
|
"""rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(r'^mission_statement', TemplateView.as_view(template_name='mission_statement.html')),
url(r'^legislature/', include('legislature.urls')),
]
|
"""rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(
r'^mission_statement',
TemplateView.as_view(template_name='mission_statement.html'),
),
url(r'^legislature/', include('legislature.urls')),
]
|
Fix line length to pass CI
|
Fix line length to pass CI
|
Python
|
mit
|
gnmerritt/dailyrippl,gnmerritt/dailyrippl,gnmerritt/dailyrippl,gnmerritt/dailyrippl
|
"""rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
+ url(
+ r'^mission_statement',
- url(r'^mission_statement', TemplateView.as_view(template_name='mission_statement.html')),
+ TemplateView.as_view(template_name='mission_statement.html'),
+ ),
url(r'^legislature/', include('legislature.urls')),
]
|
Fix line length to pass CI
|
## Code Before:
"""rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(r'^mission_statement', TemplateView.as_view(template_name='mission_statement.html')),
url(r'^legislature/', include('legislature.urls')),
]
## Instruction:
Fix line length to pass CI
## Code After:
"""rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(
r'^mission_statement',
TemplateView.as_view(template_name='mission_statement.html'),
),
url(r'^legislature/', include('legislature.urls')),
]
|
"""rippl URL Configuration"""
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import TemplateView
from .registration.forms import RecaptchaRegView
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^accounts/register/$', RecaptchaRegView.as_view()),
url(r'^accounts/', include('registration.backends.simple.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
+ url(
+ r'^mission_statement',
- url(r'^mission_statement', TemplateView.as_view(template_name='mission_statement.html')),
? ^^^^^^^^^^^^^^^^^^^^^^^^^^ -
+ TemplateView.as_view(template_name='mission_statement.html'),
? ^^^
+ ),
url(r'^legislature/', include('legislature.urls')),
]
|
7d82f3accce0cf174fd7cf176d5c289ffc791647
|
ds_queue.py
|
ds_queue.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
class Queue(object):
"""Queue class."""
def __init__(self):
self.items = []
def is_empty(self):
return self.items == []
def enqueue(self, item):
self.items.insert(0, item)
def dequeue(self):
return self.items.pop()
def size(self):
return len(self.items)
def show(self):
return self.items
def main():
queue = Queue()
print('Is empty: {}'.format(queue.is_empty()))
print('Enqueue "dog", 4 & 8.4')
queue.enqueue('dog')
queue.enqueue(4)
queue.enqueue(8.4)
print('Is empty: {}'.format(queue.is_empty()))
print('Queue size: {}'.format(queue.size()))
print('Dequeue: {}'.format(queue.dequeue()))
print('Is empty: {}'.format(queue.is_empty()))
print('Queue size: {}'.format(queue.size()))
print('Show: {}'.format(queue.show()))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
class Queue(object):
"""Queue class."""
def __init__(self):
self.items = []
def is_empty(self):
return self.items == []
def peek(self):
return self.items[-1]
def enqueue(self, item):
self.items.insert(0, item)
def dequeue(self):
return self.items.pop()
def size(self):
return len(self.items)
def show(self):
return self.items
def main():
q = Queue()
print('Is empty: {}'.format(q.is_empty()))
print('Enqueue "dog", 4 & 8.4')
q.enqueue('dog')
q.enqueue(4)
q.enqueue(8.4)
print(q.peek())
print('Is empty: {}'.format(q.is_empty()))
print('Queue size: {}'.format(q.size()))
print('Dequeue: {}'.format(q.dequeue()))
print('Is empty: {}'.format(q.is_empty()))
print('Queue size: {}'.format(q.size()))
print('Show: {}'.format(q.show()))
if __name__ == '__main__':
main()
|
Revise Queue instance to q
|
Revise Queue instance to q
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
class Queue(object):
"""Queue class."""
def __init__(self):
self.items = []
def is_empty(self):
return self.items == []
+
+ def peek(self):
+ return self.items[-1]
def enqueue(self, item):
self.items.insert(0, item)
def dequeue(self):
return self.items.pop()
def size(self):
return len(self.items)
def show(self):
return self.items
def main():
- queue = Queue()
+ q = Queue()
- print('Is empty: {}'.format(queue.is_empty()))
+ print('Is empty: {}'.format(q.is_empty()))
print('Enqueue "dog", 4 & 8.4')
- queue.enqueue('dog')
+ q.enqueue('dog')
- queue.enqueue(4)
+ q.enqueue(4)
- queue.enqueue(8.4)
+ q.enqueue(8.4)
+ print(q.peek())
- print('Is empty: {}'.format(queue.is_empty()))
+ print('Is empty: {}'.format(q.is_empty()))
- print('Queue size: {}'.format(queue.size()))
+ print('Queue size: {}'.format(q.size()))
- print('Dequeue: {}'.format(queue.dequeue()))
+ print('Dequeue: {}'.format(q.dequeue()))
- print('Is empty: {}'.format(queue.is_empty()))
+ print('Is empty: {}'.format(q.is_empty()))
- print('Queue size: {}'.format(queue.size()))
+ print('Queue size: {}'.format(q.size()))
- print('Show: {}'.format(queue.show()))
+ print('Show: {}'.format(q.show()))
+
if __name__ == '__main__':
main()
|
Revise Queue instance to q
|
## Code Before:
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
class Queue(object):
"""Queue class."""
def __init__(self):
self.items = []
def is_empty(self):
return self.items == []
def enqueue(self, item):
self.items.insert(0, item)
def dequeue(self):
return self.items.pop()
def size(self):
return len(self.items)
def show(self):
return self.items
def main():
queue = Queue()
print('Is empty: {}'.format(queue.is_empty()))
print('Enqueue "dog", 4 & 8.4')
queue.enqueue('dog')
queue.enqueue(4)
queue.enqueue(8.4)
print('Is empty: {}'.format(queue.is_empty()))
print('Queue size: {}'.format(queue.size()))
print('Dequeue: {}'.format(queue.dequeue()))
print('Is empty: {}'.format(queue.is_empty()))
print('Queue size: {}'.format(queue.size()))
print('Show: {}'.format(queue.show()))
if __name__ == '__main__':
main()
## Instruction:
Revise Queue instance to q
## Code After:
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
class Queue(object):
"""Queue class."""
def __init__(self):
self.items = []
def is_empty(self):
return self.items == []
def peek(self):
return self.items[-1]
def enqueue(self, item):
self.items.insert(0, item)
def dequeue(self):
return self.items.pop()
def size(self):
return len(self.items)
def show(self):
return self.items
def main():
q = Queue()
print('Is empty: {}'.format(q.is_empty()))
print('Enqueue "dog", 4 & 8.4')
q.enqueue('dog')
q.enqueue(4)
q.enqueue(8.4)
print(q.peek())
print('Is empty: {}'.format(q.is_empty()))
print('Queue size: {}'.format(q.size()))
print('Dequeue: {}'.format(q.dequeue()))
print('Is empty: {}'.format(q.is_empty()))
print('Queue size: {}'.format(q.size()))
print('Show: {}'.format(q.show()))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
class Queue(object):
"""Queue class."""
def __init__(self):
self.items = []
def is_empty(self):
return self.items == []
+
+ def peek(self):
+ return self.items[-1]
def enqueue(self, item):
self.items.insert(0, item)
def dequeue(self):
return self.items.pop()
def size(self):
return len(self.items)
def show(self):
return self.items
def main():
- queue = Queue()
? ----
+ q = Queue()
- print('Is empty: {}'.format(queue.is_empty()))
? ----
+ print('Is empty: {}'.format(q.is_empty()))
print('Enqueue "dog", 4 & 8.4')
- queue.enqueue('dog')
? ----
+ q.enqueue('dog')
- queue.enqueue(4)
? ----
+ q.enqueue(4)
- queue.enqueue(8.4)
? ----
+ q.enqueue(8.4)
+ print(q.peek())
- print('Is empty: {}'.format(queue.is_empty()))
? ----
+ print('Is empty: {}'.format(q.is_empty()))
- print('Queue size: {}'.format(queue.size()))
? ----
+ print('Queue size: {}'.format(q.size()))
- print('Dequeue: {}'.format(queue.dequeue()))
? ----
+ print('Dequeue: {}'.format(q.dequeue()))
- print('Is empty: {}'.format(queue.is_empty()))
? ----
+ print('Is empty: {}'.format(q.is_empty()))
- print('Queue size: {}'.format(queue.size()))
? ----
+ print('Queue size: {}'.format(q.size()))
- print('Show: {}'.format(queue.show()))
? ----
+ print('Show: {}'.format(q.show()))
+
if __name__ == '__main__':
main()
|
30f259dbd1c5c9963a5a75855188e4f668626fb7
|
test/test_Spectrum.py
|
test/test_Spectrum.py
|
from __future__ import division, print_function
import pytest
import sys
# Add Spectrum location to path
sys.path.append('../')
import Spectrum
# Test using hypothesis
def test_spectrum_assigns_data():
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
z = 2200*x
spec = Spectrum.Spectrum(x, y, z)
assert spec.flux == y
assert spec.pixel == x
assert spec.wavelength == z
|
from __future__ import division, print_function
import pytest
import sys
# Add Spectrum location to path
sys.path.append('../')
import Spectrum
# Test using hypothesis
from hypothesis import given
import hypothesis.strategies as st
@given(st.lists(st.integers()), st.lists(st.floats()), st.lists(st.floats()))
def test_spectrum_assigns_hypothesis_data(x, y, z):
spec = Spectrum.Spectrum(x, y, z)
assert spec.flux == y
assert spec.pixel == x
assert spec.wavelength == z
def test_spectrum_assigns_data():
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
z = 2200*x
spec = Spectrum.Spectrum(x, y, z)
assert spec.flux == y
assert spec.pixel == x
assert spec.wavelength == z
|
Add hypothesis test to test assignment
|
Add hypothesis test to test assignment
|
Python
|
mit
|
jason-neal/spectrum_overload,jason-neal/spectrum_overload,jason-neal/spectrum_overload
|
from __future__ import division, print_function
import pytest
import sys
# Add Spectrum location to path
sys.path.append('../')
import Spectrum
# Test using hypothesis
+ from hypothesis import given
+ import hypothesis.strategies as st
+
+ @given(st.lists(st.integers()), st.lists(st.floats()), st.lists(st.floats()))
+ def test_spectrum_assigns_hypothesis_data(x, y, z):
+
+ spec = Spectrum.Spectrum(x, y, z)
+ assert spec.flux == y
+ assert spec.pixel == x
+ assert spec.wavelength == z
def test_spectrum_assigns_data():
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
z = 2200*x
+
spec = Spectrum.Spectrum(x, y, z)
assert spec.flux == y
assert spec.pixel == x
assert spec.wavelength == z
|
Add hypothesis test to test assignment
|
## Code Before:
from __future__ import division, print_function
import pytest
import sys
# Add Spectrum location to path
sys.path.append('../')
import Spectrum
# Test using hypothesis
def test_spectrum_assigns_data():
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
z = 2200*x
spec = Spectrum.Spectrum(x, y, z)
assert spec.flux == y
assert spec.pixel == x
assert spec.wavelength == z
## Instruction:
Add hypothesis test to test assignment
## Code After:
from __future__ import division, print_function
import pytest
import sys
# Add Spectrum location to path
sys.path.append('../')
import Spectrum
# Test using hypothesis
from hypothesis import given
import hypothesis.strategies as st
@given(st.lists(st.integers()), st.lists(st.floats()), st.lists(st.floats()))
def test_spectrum_assigns_hypothesis_data(x, y, z):
spec = Spectrum.Spectrum(x, y, z)
assert spec.flux == y
assert spec.pixel == x
assert spec.wavelength == z
def test_spectrum_assigns_data():
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
z = 2200*x
spec = Spectrum.Spectrum(x, y, z)
assert spec.flux == y
assert spec.pixel == x
assert spec.wavelength == z
|
from __future__ import division, print_function
import pytest
import sys
# Add Spectrum location to path
sys.path.append('../')
import Spectrum
# Test using hypothesis
+ from hypothesis import given
+ import hypothesis.strategies as st
+
+ @given(st.lists(st.integers()), st.lists(st.floats()), st.lists(st.floats()))
+ def test_spectrum_assigns_hypothesis_data(x, y, z):
+
+ spec = Spectrum.Spectrum(x, y, z)
+ assert spec.flux == y
+ assert spec.pixel == x
+ assert spec.wavelength == z
def test_spectrum_assigns_data():
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
z = 2200*x
+
spec = Spectrum.Spectrum(x, y, z)
assert spec.flux == y
assert spec.pixel == x
assert spec.wavelength == z
|
a0907ff742c81b676f602d1e17d820152f95d22e
|
django_docs/urls.py
|
django_docs/urls.py
|
from django.conf.urls import patterns, url, include
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
) + docs_urlpatterns
|
from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = docs_urlpatterns + patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
url(r'^google79eabba6bf6fd6d3\.html$', lambda req: HttpResponse('google-site-verification: google79eabba6bf6fd6d3.html')),
)
|
Add validation URL for Google Webmaster Tools.
|
Add validation URL for Google Webmaster Tools.
|
Python
|
bsd-3-clause
|
hassanabidpk/djangoproject.com,hassanabidpk/djangoproject.com,gnarf/djangoproject.com,rmoorman/djangoproject.com,vxvinh1511/djangoproject.com,rmoorman/djangoproject.com,nanuxbe/django,vxvinh1511/djangoproject.com,relekang/djangoproject.com,alawnchen/djangoproject.com,rmoorman/djangoproject.com,django/djangoproject.com,xavierdutreilh/djangoproject.com,relekang/djangoproject.com,relekang/djangoproject.com,gnarf/djangoproject.com,vxvinh1511/djangoproject.com,khkaminska/djangoproject.com,alawnchen/djangoproject.com,nanuxbe/django,hassanabidpk/djangoproject.com,khkaminska/djangoproject.com,gnarf/djangoproject.com,hassanabidpk/djangoproject.com,django/djangoproject.com,django/djangoproject.com,xavierdutreilh/djangoproject.com,khkaminska/djangoproject.com,alawnchen/djangoproject.com,xavierdutreilh/djangoproject.com,relekang/djangoproject.com,khkaminska/djangoproject.com,django/djangoproject.com,nanuxbe/django,alawnchen/djangoproject.com,rmoorman/djangoproject.com,vxvinh1511/djangoproject.com,gnarf/djangoproject.com,django/djangoproject.com,nanuxbe/django,xavierdutreilh/djangoproject.com,django/djangoproject.com
|
from django.conf.urls import patterns, url, include
+ from django.http import HttpResponse
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
- urlpatterns = patterns('',
+ urlpatterns = docs_urlpatterns + patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
- ) + docs_urlpatterns
+ url(r'^google79eabba6bf6fd6d3\.html$', lambda req: HttpResponse('google-site-verification: google79eabba6bf6fd6d3.html')),
+ )
|
Add validation URL for Google Webmaster Tools.
|
## Code Before:
from django.conf.urls import patterns, url, include
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
) + docs_urlpatterns
## Instruction:
Add validation URL for Google Webmaster Tools.
## Code After:
from django.conf.urls import patterns, url, include
from django.http import HttpResponse
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
urlpatterns = docs_urlpatterns + patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
url(r'^google79eabba6bf6fd6d3\.html$', lambda req: HttpResponse('google-site-verification: google79eabba6bf6fd6d3.html')),
)
|
from django.conf.urls import patterns, url, include
+ from django.http import HttpResponse
from docs.sitemaps import DocsSitemap
from docs.urls import urlpatterns as docs_urlpatterns
sitemaps = {'docs': DocsSitemap}
- urlpatterns = patterns('',
+ urlpatterns = docs_urlpatterns + patterns('',
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}),
- ) + docs_urlpatterns
+ url(r'^google79eabba6bf6fd6d3\.html$', lambda req: HttpResponse('google-site-verification: google79eabba6bf6fd6d3.html')),
+ )
|
f50efcb65d794985185f5cc82c697673f50e4c47
|
synapse/replication/slave/storage/keys.py
|
synapse/replication/slave/storage/keys.py
|
from synapse.storage import DataStore
from synapse.storage.keys import KeyStore
from ._base import BaseSlavedStore, __func__
class SlavedKeyStore(BaseSlavedStore):
_get_server_verify_key = KeyStore.__dict__[
"_get_server_verify_key"
]
get_server_verify_keys = __func__(DataStore.get_server_verify_keys)
store_server_verify_key = __func__(DataStore.store_server_verify_key)
get_server_keys_json = __func__(DataStore.get_server_keys_json)
store_server_keys_json = __func__(DataStore.store_server_keys_json)
|
from synapse.storage import KeyStore
# KeyStore isn't really safe to use from a worker, but for now we do so and hope that
# the races it creates aren't too bad.
SlavedKeyStore = KeyStore
|
Replace SlavedKeyStore with a shim
|
Replace SlavedKeyStore with a shim
since we're pulling everything out of KeyStore anyway, we may as well simplify
it.
|
Python
|
apache-2.0
|
matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse
|
- from synapse.storage import DataStore
- from synapse.storage.keys import KeyStore
+ from synapse.storage import KeyStore
- from ._base import BaseSlavedStore, __func__
+ # KeyStore isn't really safe to use from a worker, but for now we do so and hope that
+ # the races it creates aren't too bad.
+ SlavedKeyStore = KeyStore
- class SlavedKeyStore(BaseSlavedStore):
- _get_server_verify_key = KeyStore.__dict__[
- "_get_server_verify_key"
- ]
-
- get_server_verify_keys = __func__(DataStore.get_server_verify_keys)
- store_server_verify_key = __func__(DataStore.store_server_verify_key)
-
- get_server_keys_json = __func__(DataStore.get_server_keys_json)
- store_server_keys_json = __func__(DataStore.store_server_keys_json)
-
|
Replace SlavedKeyStore with a shim
|
## Code Before:
from synapse.storage import DataStore
from synapse.storage.keys import KeyStore
from ._base import BaseSlavedStore, __func__
class SlavedKeyStore(BaseSlavedStore):
_get_server_verify_key = KeyStore.__dict__[
"_get_server_verify_key"
]
get_server_verify_keys = __func__(DataStore.get_server_verify_keys)
store_server_verify_key = __func__(DataStore.store_server_verify_key)
get_server_keys_json = __func__(DataStore.get_server_keys_json)
store_server_keys_json = __func__(DataStore.store_server_keys_json)
## Instruction:
Replace SlavedKeyStore with a shim
## Code After:
from synapse.storage import KeyStore
# KeyStore isn't really safe to use from a worker, but for now we do so and hope that
# the races it creates aren't too bad.
SlavedKeyStore = KeyStore
|
- from synapse.storage import DataStore
- from synapse.storage.keys import KeyStore
? -----
+ from synapse.storage import KeyStore
- from ._base import BaseSlavedStore, __func__
+ # KeyStore isn't really safe to use from a worker, but for now we do so and hope that
+ # the races it creates aren't too bad.
+ SlavedKeyStore = KeyStore
-
- class SlavedKeyStore(BaseSlavedStore):
- _get_server_verify_key = KeyStore.__dict__[
- "_get_server_verify_key"
- ]
-
- get_server_verify_keys = __func__(DataStore.get_server_verify_keys)
- store_server_verify_key = __func__(DataStore.store_server_verify_key)
-
- get_server_keys_json = __func__(DataStore.get_server_keys_json)
- store_server_keys_json = __func__(DataStore.store_server_keys_json)
|
4601937752f707110d303e403153cc4412bcde58
|
oshino/util.py
|
oshino/util.py
|
from time import time
def dynamic_import(path):
module, builder = path.rsplit(".", 1)
return getattr(__import__(module, fromlist=[builder]), builder)
def current_ts():
"""
Just gives current timestamp.
"""
return int(time() * 1000)
|
from datetime import datetime
def dynamic_import(path):
module, builder = path.rsplit(".", 1)
return getattr(__import__(module, fromlist=[builder]), builder)
def current_ts():
"""
Just gives current timestamp.
"""
utcnow = datetime.utcnow()
return int(utcnow.timestamp() * 1000)
|
Use UTC timestamp as timestamp
|
Use UTC timestamp as timestamp
|
Python
|
mit
|
CodersOfTheNight/oshino
|
- from time import time
+ from datetime import datetime
def dynamic_import(path):
module, builder = path.rsplit(".", 1)
return getattr(__import__(module, fromlist=[builder]), builder)
def current_ts():
"""
Just gives current timestamp.
"""
+ utcnow = datetime.utcnow()
- return int(time() * 1000)
+ return int(utcnow.timestamp() * 1000)
|
Use UTC timestamp as timestamp
|
## Code Before:
from time import time
def dynamic_import(path):
module, builder = path.rsplit(".", 1)
return getattr(__import__(module, fromlist=[builder]), builder)
def current_ts():
"""
Just gives current timestamp.
"""
return int(time() * 1000)
## Instruction:
Use UTC timestamp as timestamp
## Code After:
from datetime import datetime
def dynamic_import(path):
module, builder = path.rsplit(".", 1)
return getattr(__import__(module, fromlist=[builder]), builder)
def current_ts():
"""
Just gives current timestamp.
"""
utcnow = datetime.utcnow()
return int(utcnow.timestamp() * 1000)
|
- from time import time
+ from datetime import datetime
? ++++ ++++
def dynamic_import(path):
module, builder = path.rsplit(".", 1)
return getattr(__import__(module, fromlist=[builder]), builder)
def current_ts():
"""
Just gives current timestamp.
"""
+ utcnow = datetime.utcnow()
- return int(time() * 1000)
+ return int(utcnow.timestamp() * 1000)
? +++++++ +++++
|
800ffecbed76f306806642546ed949153c8414c3
|
astropy/vo/samp/tests/test_hub_proxy.py
|
astropy/vo/samp/tests/test_hub_proxy.py
|
from ..hub_proxy import SAMPHubProxy
from ..hub import SAMPHubServer
from ..client import SAMPClient
class TestHubProxy(object):
def setup_method(self, method):
self.hub = SAMPHubServer(web_profile=False)
self.hub.start()
self.proxy = SAMPHubProxy()
self.proxy.connect()
def teardown_method(self, method):
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_get_running_hubs(self):
SAMPHubProxy.get_running_hubs()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
|
import os
import tempfile
from ..hub_proxy import SAMPHubProxy
from ..hub import SAMPHubServer
from ..client import SAMPClient
class TestHubProxy(object):
def setup_method(self, method):
fileobj, self.lockfile = tempfile.mkstemp()
self.hub = SAMPHubServer(web_profile=False,
lockfile=self.lockfile)
self.hub.start()
os.environ['SAMP_HUB'] = "std-lockurl:file://" + os.path.abspath(self.lockfile)
self.proxy = SAMPHubProxy()
self.proxy.connect()
def teardown_method(self, method):
del os.environ['SAMP_HUB'] # hacky
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
if os.path.exists(self.lockfile):
os.remove(self.lockfile)
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_get_running_hubs(self):
SAMPHubProxy.get_running_hubs()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
|
Use temporary SAMP lock file
|
Use temporary SAMP lock file
|
Python
|
bsd-3-clause
|
saimn/astropy,DougBurke/astropy,joergdietrich/astropy,AustereCuriosity/astropy,lpsinger/astropy,lpsinger/astropy,tbabej/astropy,kelle/astropy,tbabej/astropy,mhvk/astropy,kelle/astropy,dhomeier/astropy,larrybradley/astropy,DougBurke/astropy,larrybradley/astropy,joergdietrich/astropy,dhomeier/astropy,stargaser/astropy,astropy/astropy,StuartLittlefair/astropy,larrybradley/astropy,tbabej/astropy,aleksandr-bakanov/astropy,kelle/astropy,astropy/astropy,AustereCuriosity/astropy,funbaker/astropy,stargaser/astropy,pllim/astropy,funbaker/astropy,funbaker/astropy,pllim/astropy,lpsinger/astropy,mhvk/astropy,astropy/astropy,bsipocz/astropy,bsipocz/astropy,StuartLittlefair/astropy,dhomeier/astropy,MSeifert04/astropy,MSeifert04/astropy,DougBurke/astropy,aleksandr-bakanov/astropy,mhvk/astropy,mhvk/astropy,kelle/astropy,kelle/astropy,saimn/astropy,dhomeier/astropy,AustereCuriosity/astropy,tbabej/astropy,stargaser/astropy,saimn/astropy,StuartLittlefair/astropy,larrybradley/astropy,joergdietrich/astropy,pllim/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,AustereCuriosity/astropy,MSeifert04/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,pllim/astropy,saimn/astropy,pllim/astropy,DougBurke/astropy,saimn/astropy,lpsinger/astropy,larrybradley/astropy,lpsinger/astropy,funbaker/astropy,AustereCuriosity/astropy,mhvk/astropy,bsipocz/astropy,MSeifert04/astropy,tbabej/astropy,bsipocz/astropy,joergdietrich/astropy,StuartLittlefair/astropy,StuartLittlefair/astropy,astropy/astropy,stargaser/astropy,astropy/astropy
|
+ import os
+ import tempfile
+
from ..hub_proxy import SAMPHubProxy
from ..hub import SAMPHubServer
from ..client import SAMPClient
class TestHubProxy(object):
def setup_method(self, method):
+ fileobj, self.lockfile = tempfile.mkstemp()
+
- self.hub = SAMPHubServer(web_profile=False)
+ self.hub = SAMPHubServer(web_profile=False,
+ lockfile=self.lockfile)
self.hub.start()
+
+ os.environ['SAMP_HUB'] = "std-lockurl:file://" + os.path.abspath(self.lockfile)
self.proxy = SAMPHubProxy()
self.proxy.connect()
def teardown_method(self, method):
+
+ del os.environ['SAMP_HUB'] # hacky
+
if self.proxy.is_connected:
self.proxy.disconnect()
+
self.hub.stop()
+
+ if os.path.exists(self.lockfile):
+ os.remove(self.lockfile)
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_get_running_hubs(self):
SAMPHubProxy.get_running_hubs()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
|
Use temporary SAMP lock file
|
## Code Before:
from ..hub_proxy import SAMPHubProxy
from ..hub import SAMPHubServer
from ..client import SAMPClient
class TestHubProxy(object):
def setup_method(self, method):
self.hub = SAMPHubServer(web_profile=False)
self.hub.start()
self.proxy = SAMPHubProxy()
self.proxy.connect()
def teardown_method(self, method):
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_get_running_hubs(self):
SAMPHubProxy.get_running_hubs()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
## Instruction:
Use temporary SAMP lock file
## Code After:
import os
import tempfile
from ..hub_proxy import SAMPHubProxy
from ..hub import SAMPHubServer
from ..client import SAMPClient
class TestHubProxy(object):
def setup_method(self, method):
fileobj, self.lockfile = tempfile.mkstemp()
self.hub = SAMPHubServer(web_profile=False,
lockfile=self.lockfile)
self.hub.start()
os.environ['SAMP_HUB'] = "std-lockurl:file://" + os.path.abspath(self.lockfile)
self.proxy = SAMPHubProxy()
self.proxy.connect()
def teardown_method(self, method):
del os.environ['SAMP_HUB'] # hacky
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
if os.path.exists(self.lockfile):
os.remove(self.lockfile)
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_get_running_hubs(self):
SAMPHubProxy.get_running_hubs()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
|
+ import os
+ import tempfile
+
from ..hub_proxy import SAMPHubProxy
from ..hub import SAMPHubServer
from ..client import SAMPClient
class TestHubProxy(object):
def setup_method(self, method):
+ fileobj, self.lockfile = tempfile.mkstemp()
+
- self.hub = SAMPHubServer(web_profile=False)
? ^
+ self.hub = SAMPHubServer(web_profile=False,
? ^
+ lockfile=self.lockfile)
self.hub.start()
+
+ os.environ['SAMP_HUB'] = "std-lockurl:file://" + os.path.abspath(self.lockfile)
self.proxy = SAMPHubProxy()
self.proxy.connect()
def teardown_method(self, method):
+
+ del os.environ['SAMP_HUB'] # hacky
+
if self.proxy.is_connected:
self.proxy.disconnect()
+
self.hub.stop()
+
+ if os.path.exists(self.lockfile):
+ os.remove(self.lockfile)
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_get_running_hubs(self):
SAMPHubProxy.get_running_hubs()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
|
b7acc8ca9c6c41aff7ffb419125f54d21da09652
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='plyprotobuf',
version='1.0',
description='Protobuf Parsing Library that uses ply',
author='Dusan Klinec',
url='https://github.com/sb98052/plyprotobuf',
packages=['plyproto'],
)
|
from distutils.core import setup
setup(name='plyprotobuf',
version='1.0',
description='Protobuf Parsing Library that uses ply',
author='Dusan Klinec',
url='https://github.com/sb98052/plyprotobuf',
packages=['plyproto'],
install_requires=['ply']
)
|
Add dependency to ply package
|
Add dependency to ply package
|
Python
|
apache-2.0
|
sb98052/plyprotobuf
|
from distutils.core import setup
setup(name='plyprotobuf',
version='1.0',
description='Protobuf Parsing Library that uses ply',
author='Dusan Klinec',
url='https://github.com/sb98052/plyprotobuf',
packages=['plyproto'],
+ install_requires=['ply']
)
|
Add dependency to ply package
|
## Code Before:
from distutils.core import setup
setup(name='plyprotobuf',
version='1.0',
description='Protobuf Parsing Library that uses ply',
author='Dusan Klinec',
url='https://github.com/sb98052/plyprotobuf',
packages=['plyproto'],
)
## Instruction:
Add dependency to ply package
## Code After:
from distutils.core import setup
setup(name='plyprotobuf',
version='1.0',
description='Protobuf Parsing Library that uses ply',
author='Dusan Klinec',
url='https://github.com/sb98052/plyprotobuf',
packages=['plyproto'],
install_requires=['ply']
)
|
from distutils.core import setup
setup(name='plyprotobuf',
version='1.0',
description='Protobuf Parsing Library that uses ply',
author='Dusan Klinec',
url='https://github.com/sb98052/plyprotobuf',
packages=['plyproto'],
+ install_requires=['ply']
)
|
1bc61edde0e41ec3f2fe66758654b55ed51ec36a
|
test/test_repo.py
|
test/test_repo.py
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from asv import config
from asv import repo
def test_repo(tmpdir):
conf = config.Config()
conf.project = six.text_type(tmpdir.join("repo"))
conf.repo = "https://github.com/spacetelescope/asv.git"
r = repo.get_repo(conf)
r.checkout("master")
r.checkout("gh-pages")
r.checkout("master")
hashes = r.get_hashes_from_range("ae0c27b65741..e6f382a704f7")
assert len(hashes) == 4
dates = [r.get_date(hash) for hash in hashes]
assert dates == sorted(dates)[::-1]
tags = r.get_tags()
for tag in tags:
r.get_date_from_tag(tag)
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from asv import config
from asv import repo
def _test_generic_repo(conf,
hash_range="ae0c27b65741..e6f382a704f7",
master="master",
branch="gh-pages"):
r = repo.get_repo(conf)
r.checkout(master)
r.checkout(branch)
r.checkout(master)
hashes = r.get_hashes_from_range(hash_range)
assert len(hashes) == 4
dates = [r.get_date(hash) for hash in hashes]
assert dates == sorted(dates)[::-1]
tags = r.get_tags()
for tag in tags:
r.get_date_from_tag(tag)
def test_repo_git(tmpdir):
conf = config.Config()
conf.project = six.text_type(tmpdir.join("repo"))
conf.repo = "https://github.com/spacetelescope/asv.git"
_test_generic_repo(conf)
def test_repo_hg(tmpdir):
conf = config.Config()
conf.project = six.text_type(tmpdir.join("repo"))
conf.repo = "hg+https://bitbucket.org/nds-org/nds-labs"
_test_generic_repo(conf, hash_range="a8ca24ac6b77:9dc758deba8",
master="tip", branch="dev")
|
Add test for mercurial repo
|
Add test for mercurial repo
|
Python
|
bsd-3-clause
|
pv/asv,waylonflinn/asv,airspeed-velocity/asv,pv/asv,qwhelan/asv,mdboom/asv,waylonflinn/asv,waylonflinn/asv,ericdill/asv,giltis/asv,ericdill/asv,airspeed-velocity/asv,mdboom/asv,qwhelan/asv,giltis/asv,airspeed-velocity/asv,qwhelan/asv,edisongustavo/asv,mdboom/asv,spacetelescope/asv,edisongustavo/asv,ericdill/asv,pv/asv,ericdill/asv,giltis/asv,spacetelescope/asv,spacetelescope/asv,mdboom/asv,qwhelan/asv,pv/asv,edisongustavo/asv,spacetelescope/asv,airspeed-velocity/asv
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from asv import config
from asv import repo
+ def _test_generic_repo(conf,
+ hash_range="ae0c27b65741..e6f382a704f7",
+ master="master",
+ branch="gh-pages"):
- def test_repo(tmpdir):
- conf = config.Config()
-
- conf.project = six.text_type(tmpdir.join("repo"))
- conf.repo = "https://github.com/spacetelescope/asv.git"
r = repo.get_repo(conf)
- r.checkout("master")
+ r.checkout(master)
- r.checkout("gh-pages")
+ r.checkout(branch)
- r.checkout("master")
+ r.checkout(master)
- hashes = r.get_hashes_from_range("ae0c27b65741..e6f382a704f7")
+ hashes = r.get_hashes_from_range(hash_range)
assert len(hashes) == 4
dates = [r.get_date(hash) for hash in hashes]
assert dates == sorted(dates)[::-1]
tags = r.get_tags()
for tag in tags:
r.get_date_from_tag(tag)
+
+ def test_repo_git(tmpdir):
+ conf = config.Config()
+
+ conf.project = six.text_type(tmpdir.join("repo"))
+ conf.repo = "https://github.com/spacetelescope/asv.git"
+ _test_generic_repo(conf)
+
+
+ def test_repo_hg(tmpdir):
+ conf = config.Config()
+
+ conf.project = six.text_type(tmpdir.join("repo"))
+ conf.repo = "hg+https://bitbucket.org/nds-org/nds-labs"
+ _test_generic_repo(conf, hash_range="a8ca24ac6b77:9dc758deba8",
+ master="tip", branch="dev")
+
|
Add test for mercurial repo
|
## Code Before:
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from asv import config
from asv import repo
def test_repo(tmpdir):
conf = config.Config()
conf.project = six.text_type(tmpdir.join("repo"))
conf.repo = "https://github.com/spacetelescope/asv.git"
r = repo.get_repo(conf)
r.checkout("master")
r.checkout("gh-pages")
r.checkout("master")
hashes = r.get_hashes_from_range("ae0c27b65741..e6f382a704f7")
assert len(hashes) == 4
dates = [r.get_date(hash) for hash in hashes]
assert dates == sorted(dates)[::-1]
tags = r.get_tags()
for tag in tags:
r.get_date_from_tag(tag)
## Instruction:
Add test for mercurial repo
## Code After:
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from asv import config
from asv import repo
def _test_generic_repo(conf,
hash_range="ae0c27b65741..e6f382a704f7",
master="master",
branch="gh-pages"):
r = repo.get_repo(conf)
r.checkout(master)
r.checkout(branch)
r.checkout(master)
hashes = r.get_hashes_from_range(hash_range)
assert len(hashes) == 4
dates = [r.get_date(hash) for hash in hashes]
assert dates == sorted(dates)[::-1]
tags = r.get_tags()
for tag in tags:
r.get_date_from_tag(tag)
def test_repo_git(tmpdir):
conf = config.Config()
conf.project = six.text_type(tmpdir.join("repo"))
conf.repo = "https://github.com/spacetelescope/asv.git"
_test_generic_repo(conf)
def test_repo_hg(tmpdir):
conf = config.Config()
conf.project = six.text_type(tmpdir.join("repo"))
conf.repo = "hg+https://bitbucket.org/nds-org/nds-labs"
_test_generic_repo(conf, hash_range="a8ca24ac6b77:9dc758deba8",
master="tip", branch="dev")
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from asv import config
from asv import repo
+ def _test_generic_repo(conf,
+ hash_range="ae0c27b65741..e6f382a704f7",
+ master="master",
+ branch="gh-pages"):
- def test_repo(tmpdir):
- conf = config.Config()
-
- conf.project = six.text_type(tmpdir.join("repo"))
- conf.repo = "https://github.com/spacetelescope/asv.git"
r = repo.get_repo(conf)
- r.checkout("master")
? - -
+ r.checkout(master)
- r.checkout("gh-pages")
+ r.checkout(branch)
- r.checkout("master")
? - -
+ r.checkout(master)
- hashes = r.get_hashes_from_range("ae0c27b65741..e6f382a704f7")
+ hashes = r.get_hashes_from_range(hash_range)
assert len(hashes) == 4
dates = [r.get_date(hash) for hash in hashes]
assert dates == sorted(dates)[::-1]
tags = r.get_tags()
for tag in tags:
r.get_date_from_tag(tag)
+
+
+ def test_repo_git(tmpdir):
+ conf = config.Config()
+
+ conf.project = six.text_type(tmpdir.join("repo"))
+ conf.repo = "https://github.com/spacetelescope/asv.git"
+ _test_generic_repo(conf)
+
+
+ def test_repo_hg(tmpdir):
+ conf = config.Config()
+
+ conf.project = six.text_type(tmpdir.join("repo"))
+ conf.repo = "hg+https://bitbucket.org/nds-org/nds-labs"
+ _test_generic_repo(conf, hash_range="a8ca24ac6b77:9dc758deba8",
+ master="tip", branch="dev")
|
685365af5126c6e83db468eef24b008fc1526462
|
tools/game_utils.py
|
tools/game_utils.py
|
import scipy.misc
import scipy.special
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
|
import numpy as np
import scipy.misc
import scipy.special
from tools.walk_tree import walk_tree
from tools.game_tree.nodes import ActionNode
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
def is_correct_strategy(strategy_tree):
correct = True
def on_node(node):
if isinstance(node, ActionNode):
nonlocal correct
strategy_sum = np.sum(node.strategy)
if strategy_sum != 1:
correct = False
walk_tree(strategy_tree, on_node)
return correct
|
Add method to verify that all strategy probabilities add to 1
|
Add method to verify that all strategy probabilities add to 1
|
Python
|
mit
|
JakubPetriska/poker-cfr,JakubPetriska/poker-cfr
|
+ import numpy as np
import scipy.misc
import scipy.special
+
+ from tools.walk_tree import walk_tree
+ from tools.game_tree.nodes import ActionNode
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
+
+ def is_correct_strategy(strategy_tree):
+ correct = True
+ def on_node(node):
+ if isinstance(node, ActionNode):
+ nonlocal correct
+ strategy_sum = np.sum(node.strategy)
+ if strategy_sum != 1:
+ correct = False
+ walk_tree(strategy_tree, on_node)
+ return correct
+
|
Add method to verify that all strategy probabilities add to 1
|
## Code Before:
import scipy.misc
import scipy.special
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
## Instruction:
Add method to verify that all strategy probabilities add to 1
## Code After:
import numpy as np
import scipy.misc
import scipy.special
from tools.walk_tree import walk_tree
from tools.game_tree.nodes import ActionNode
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
def is_correct_strategy(strategy_tree):
correct = True
def on_node(node):
if isinstance(node, ActionNode):
nonlocal correct
strategy_sum = np.sum(node.strategy)
if strategy_sum != 1:
correct = False
walk_tree(strategy_tree, on_node)
return correct
|
+ import numpy as np
import scipy.misc
import scipy.special
+
+ from tools.walk_tree import walk_tree
+ from tools.game_tree.nodes import ActionNode
def get_num_hole_card_combinations(game):
num_players = game.get_num_players()
num_hole_cards = game.get_num_hole_cards()
num_cards = game.get_num_suits() * game.get_num_ranks()
num_total_hole_cards = num_players * num_hole_cards
return scipy.misc.comb(num_cards, num_total_hole_cards, exact=True) \
* scipy.special.perm(num_total_hole_cards, num_total_hole_cards, exact=True)
+
+
+ def is_correct_strategy(strategy_tree):
+ correct = True
+ def on_node(node):
+ if isinstance(node, ActionNode):
+ nonlocal correct
+ strategy_sum = np.sum(node.strategy)
+ if strategy_sum != 1:
+ correct = False
+ walk_tree(strategy_tree, on_node)
+ return correct
|
c8422778e31888cbc02dc764af875114916e5f88
|
smsviewer/views.py
|
smsviewer/views.py
|
from pyramid.view import view_config
from lxml import etree
@view_config(route_name='index', renderer='index.mako')
def my_view(request):
smsfile = "sms.xml"
with open(smsfile) as f:
tree = etree.parse(f)
root = tree.getroot()
return {"messages": sorted([e for e in root if e.get("contact_name") == "Lacey Shankle"], key=lambda message: message.get("date"))}
return {'project': 'smsviewer'}
|
from pyramid.view import view_config
from lxml import etree
@view_config(route_name='index', renderer='index.mako')
def my_view(request):
smsfile = "sms.xml"
with open(smsfile) as f:
tree = etree.parse(f)
root = tree.getroot()
els = root.xpath("*[@contact_name='Lacey Shankle']")
return {"messages": sorted(els, key=lambda message: message.get("date"))}
|
Use xpath to find messages instead of a loop
|
Use xpath to find messages instead of a loop
|
Python
|
mit
|
spiffytech/smsviewer,spiffytech/smsviewer
|
from pyramid.view import view_config
from lxml import etree
@view_config(route_name='index', renderer='index.mako')
def my_view(request):
smsfile = "sms.xml"
with open(smsfile) as f:
tree = etree.parse(f)
root = tree.getroot()
+ els = root.xpath("*[@contact_name='Lacey Shankle']")
- return {"messages": sorted([e for e in root if e.get("contact_name") == "Lacey Shankle"], key=lambda message: message.get("date"))}
- return {'project': 'smsviewer'}
+ return {"messages": sorted(els, key=lambda message: message.get("date"))}
+
|
Use xpath to find messages instead of a loop
|
## Code Before:
from pyramid.view import view_config
from lxml import etree
@view_config(route_name='index', renderer='index.mako')
def my_view(request):
smsfile = "sms.xml"
with open(smsfile) as f:
tree = etree.parse(f)
root = tree.getroot()
return {"messages": sorted([e for e in root if e.get("contact_name") == "Lacey Shankle"], key=lambda message: message.get("date"))}
return {'project': 'smsviewer'}
## Instruction:
Use xpath to find messages instead of a loop
## Code After:
from pyramid.view import view_config
from lxml import etree
@view_config(route_name='index', renderer='index.mako')
def my_view(request):
smsfile = "sms.xml"
with open(smsfile) as f:
tree = etree.parse(f)
root = tree.getroot()
els = root.xpath("*[@contact_name='Lacey Shankle']")
return {"messages": sorted(els, key=lambda message: message.get("date"))}
|
from pyramid.view import view_config
from lxml import etree
@view_config(route_name='index', renderer='index.mako')
def my_view(request):
smsfile = "sms.xml"
with open(smsfile) as f:
tree = etree.parse(f)
root = tree.getroot()
- return {"messages": sorted([e for e in root if e.get("contact_name") == "Lacey Shankle"], key=lambda message: message.get("date"))}
- return {'project': 'smsviewer'}
+ els = root.xpath("*[@contact_name='Lacey Shankle']")
+
+ return {"messages": sorted(els, key=lambda message: message.get("date"))}
|
9014dfde50d5f54cf79c544ce01e81266effa87d
|
game_info/tests/test_commands.py
|
game_info/tests/test_commands.py
|
from django.core.management import call_command
from django.test import TestCase
class ServerTest(TestCase):
def test_update_game_info(self):
call_command('update_game_info')
|
from django.core.management import call_command
from django.test import TestCase
from game_info.models import Server
class ServerTest(TestCase):
def create_server(self, title="Test Server", host="example.org", port=27015):
return Server.objects.create(title=title, host=host, port=port)
def test_update_game_info(self):
self.create_server().save()
call_command('update_game_info')
|
Fix testing on update_game_info management command
|
Fix testing on update_game_info management command
|
Python
|
bsd-3-clause
|
Azelphur-Servers/django-game-info
|
from django.core.management import call_command
from django.test import TestCase
+ from game_info.models import Server
class ServerTest(TestCase):
+ def create_server(self, title="Test Server", host="example.org", port=27015):
+ return Server.objects.create(title=title, host=host, port=port)
+
def test_update_game_info(self):
+ self.create_server().save()
call_command('update_game_info')
|
Fix testing on update_game_info management command
|
## Code Before:
from django.core.management import call_command
from django.test import TestCase
class ServerTest(TestCase):
def test_update_game_info(self):
call_command('update_game_info')
## Instruction:
Fix testing on update_game_info management command
## Code After:
from django.core.management import call_command
from django.test import TestCase
from game_info.models import Server
class ServerTest(TestCase):
def create_server(self, title="Test Server", host="example.org", port=27015):
return Server.objects.create(title=title, host=host, port=port)
def test_update_game_info(self):
self.create_server().save()
call_command('update_game_info')
|
from django.core.management import call_command
from django.test import TestCase
+ from game_info.models import Server
class ServerTest(TestCase):
+ def create_server(self, title="Test Server", host="example.org", port=27015):
+ return Server.objects.create(title=title, host=host, port=port)
+
def test_update_game_info(self):
+ self.create_server().save()
call_command('update_game_info')
|
3d5c68421b889abb4f56319b082aaff554ebaa0e
|
tests/test_parse.py
|
tests/test_parse.py
|
import json
import logging
import os
import unittest
import ptn
class ParseTest(unittest.TestCase):
def test_parser(self):
with open('files/input.json') as input_file:
torrents = json.load(input_file)
with open('files/output.json') as output_file:
results = json.load(output_file)
for torrent, result in zip(torrents, results):
logging.info('Checking %s', torrent)
self.assertEqual(ptn.parse(torrent), result)
if __name__ == '__main__':
unittest.main()
|
import json
import os
import unittest
import ptn
class ParseTest(unittest.TestCase):
def test_parser(self):
with open(os.path.join(
os.path.dirname(__file__),
'files/input.json'
)) as input_file:
torrents = json.load(input_file)
with open(os.path.join(
os.path.dirname(__file__),
'files/output.json'
)) as output_file:
expected_results = json.load(output_file)
for torrent, expected_result in zip(torrents, expected_results):
result = ptn.parse(torrent)
self.assertItemsEqual(result, expected_result)
if __name__ == '__main__':
unittest.main()
|
Fix minor syntax error in test
|
Fix minor syntax error in test
|
Python
|
mit
|
divijbindlish/parse-torrent-name,nivertech/parse-torrent-name
|
import json
- import logging
import os
import unittest
import ptn
class ParseTest(unittest.TestCase):
def test_parser(self):
- with open('files/input.json') as input_file:
+ with open(os.path.join(
+ os.path.dirname(__file__),
+ 'files/input.json'
+ )) as input_file:
torrents = json.load(input_file)
- with open('files/output.json') as output_file:
+ with open(os.path.join(
+ os.path.dirname(__file__),
+ 'files/output.json'
+ )) as output_file:
- results = json.load(output_file)
+ expected_results = json.load(output_file)
- for torrent, result in zip(torrents, results):
+ for torrent, expected_result in zip(torrents, expected_results):
- logging.info('Checking %s', torrent)
- self.assertEqual(ptn.parse(torrent), result)
+ result = ptn.parse(torrent)
+ self.assertItemsEqual(result, expected_result)
if __name__ == '__main__':
unittest.main()
|
Fix minor syntax error in test
|
## Code Before:
import json
import logging
import os
import unittest
import ptn
class ParseTest(unittest.TestCase):
def test_parser(self):
with open('files/input.json') as input_file:
torrents = json.load(input_file)
with open('files/output.json') as output_file:
results = json.load(output_file)
for torrent, result in zip(torrents, results):
logging.info('Checking %s', torrent)
self.assertEqual(ptn.parse(torrent), result)
if __name__ == '__main__':
unittest.main()
## Instruction:
Fix minor syntax error in test
## Code After:
import json
import os
import unittest
import ptn
class ParseTest(unittest.TestCase):
def test_parser(self):
with open(os.path.join(
os.path.dirname(__file__),
'files/input.json'
)) as input_file:
torrents = json.load(input_file)
with open(os.path.join(
os.path.dirname(__file__),
'files/output.json'
)) as output_file:
expected_results = json.load(output_file)
for torrent, expected_result in zip(torrents, expected_results):
result = ptn.parse(torrent)
self.assertItemsEqual(result, expected_result)
if __name__ == '__main__':
unittest.main()
|
import json
- import logging
import os
import unittest
import ptn
class ParseTest(unittest.TestCase):
def test_parser(self):
- with open('files/input.json') as input_file:
+ with open(os.path.join(
+ os.path.dirname(__file__),
+ 'files/input.json'
+ )) as input_file:
torrents = json.load(input_file)
- with open('files/output.json') as output_file:
+ with open(os.path.join(
+ os.path.dirname(__file__),
+ 'files/output.json'
+ )) as output_file:
- results = json.load(output_file)
+ expected_results = json.load(output_file)
? +++++++++
- for torrent, result in zip(torrents, results):
+ for torrent, expected_result in zip(torrents, expected_results):
? +++++++++ +++++++++
- logging.info('Checking %s', torrent)
- self.assertEqual(ptn.parse(torrent), result)
+ result = ptn.parse(torrent)
+ self.assertItemsEqual(result, expected_result)
if __name__ == '__main__':
unittest.main()
|
71cdbeada7e11634e1168ca2e825167cbe87b4de
|
spacy/lang/de/norm_exceptions.py
|
spacy/lang/de/norm_exceptions.py
|
from __future__ import unicode_literals
# Here we only want to include the absolute most common words. Otherwise,
# this list would get impossibly long for German – especially considering the
# old vs. new spelling rules, and all possible cases.
_exc = {
"daß": "dass"
}
NORM_EXCEPTIONS = {}
for string, norm in _exc.items():
NORM_EXCEPTIONS[string] = norm
NORM_EXCEPTIONS[string.title()] = norm
|
from __future__ import unicode_literals
# Here we only want to include the absolute most common words. Otherwise,
# this list would get impossibly long for German – especially considering the
# old vs. new spelling rules, and all possible cases.
_exc = {
"daß": "dass"
}
NORM_EXCEPTIONS = {}
for string, norm in _exc.items():
NORM_EXCEPTIONS[string.title()] = norm
|
Revert "Also include lowercase norm exceptions"
|
Revert "Also include lowercase norm exceptions"
This reverts commit 70f4e8adf37cfcfab60be2b97d6deae949b30e9e.
|
Python
|
mit
|
aikramer2/spaCy,spacy-io/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,spacy-io/spaCy,aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,honnibal/spaCy,aikramer2/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,recognai/spaCy,recognai/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,explosion/spaCy
|
from __future__ import unicode_literals
# Here we only want to include the absolute most common words. Otherwise,
# this list would get impossibly long for German – especially considering the
# old vs. new spelling rules, and all possible cases.
_exc = {
"daß": "dass"
}
NORM_EXCEPTIONS = {}
for string, norm in _exc.items():
- NORM_EXCEPTIONS[string] = norm
NORM_EXCEPTIONS[string.title()] = norm
|
Revert "Also include lowercase norm exceptions"
|
## Code Before:
from __future__ import unicode_literals
# Here we only want to include the absolute most common words. Otherwise,
# this list would get impossibly long for German – especially considering the
# old vs. new spelling rules, and all possible cases.
_exc = {
"daß": "dass"
}
NORM_EXCEPTIONS = {}
for string, norm in _exc.items():
NORM_EXCEPTIONS[string] = norm
NORM_EXCEPTIONS[string.title()] = norm
## Instruction:
Revert "Also include lowercase norm exceptions"
## Code After:
from __future__ import unicode_literals
# Here we only want to include the absolute most common words. Otherwise,
# this list would get impossibly long for German – especially considering the
# old vs. new spelling rules, and all possible cases.
_exc = {
"daß": "dass"
}
NORM_EXCEPTIONS = {}
for string, norm in _exc.items():
NORM_EXCEPTIONS[string.title()] = norm
|
from __future__ import unicode_literals
# Here we only want to include the absolute most common words. Otherwise,
# this list would get impossibly long for German – especially considering the
# old vs. new spelling rules, and all possible cases.
_exc = {
"daß": "dass"
}
NORM_EXCEPTIONS = {}
for string, norm in _exc.items():
- NORM_EXCEPTIONS[string] = norm
NORM_EXCEPTIONS[string.title()] = norm
|
87cd4025aed62d76e3c64ba939f5241307b4478f
|
CascadeCount.py
|
CascadeCount.py
|
from __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
MRJobNetworkX.run()
|
from __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
CascadeCount.run()
|
Load from the pre processed data
|
Load from the pre processed data
|
Python
|
mit
|
danjamker/DiffusionSimulation
|
from __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
- MRJobNetworkX.run()
+ CascadeCount.run()
|
Load from the pre processed data
|
## Code Before:
from __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
MRJobNetworkX.run()
## Instruction:
Load from the pre processed data
## Code After:
from __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
CascadeCount.run()
|
from __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
- MRJobNetworkX.run()
+ CascadeCount.run()
|
6be70d01bdf58389db2a6adc4035f82669d02a61
|
cms/plugins/googlemap/cms_plugins.py
|
cms/plugins/googlemap/cms_plugins.py
|
from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.utils.translation import ugettext_lazy as _
from cms.plugins.googlemap.models import GoogleMap
from cms.plugins.googlemap.settings import GOOGLE_MAPS_API_KEY
from cms.plugins.googlemap import settings
from django.forms.widgets import Media
class GoogleMapPlugin(CMSPluginBase):
model = GoogleMap
name = _("Google Map")
render_template = "cms/plugins/googlemap.html"
def render(self, context, instance, placeholder):
context.update({
'object':instance,
'placeholder':placeholder,
})
return context
def get_plugin_media(self, request, context, plugin):
if 'GOOGLE_MAPS_API_KEY' in context:
key = context['GOOGLE_MAPS_API_KEY']
else:
key = GOOGLE_MAPS_API_KEY
return Media(js = ('http://maps.google.com/maps?file=api&v=2&key=%s&hl=%s' % (key, request.LANGUAGE_CODE),))
plugin_pool.register_plugin(GoogleMapPlugin)
|
from django.conf import settings
from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.utils.translation import ugettext_lazy as _
from cms.plugins.googlemap.models import GoogleMap
from cms.plugins.googlemap.settings import GOOGLE_MAPS_API_KEY
from django.forms.widgets import Media
class GoogleMapPlugin(CMSPluginBase):
model = GoogleMap
name = _("Google Map")
render_template = "cms/plugins/googlemap.html"
def render(self, context, instance, placeholder):
context.update({
'object':instance,
'placeholder':placeholder,
})
return context
def get_plugin_media(self, request, context, plugin):
if 'GOOGLE_MAPS_API_KEY' in context:
key = context['GOOGLE_MAPS_API_KEY']
else:
key = GOOGLE_MAPS_API_KEY
lang = getattr(request, 'LANGUAGE_CODE', settings.LANGUAGE_CODE[0:2])
return Media(js = ('http://maps.google.com/maps?file=api&v=2&key=%s&hl=%s' % (key, lang),))
plugin_pool.register_plugin(GoogleMapPlugin)
|
Allow use of GoogleMaps plugin without Multilingual support
|
Allow use of GoogleMaps plugin without Multilingual support
|
Python
|
bsd-3-clause
|
cyberintruder/django-cms,chmberl/django-cms,owers19856/django-cms,jproffitt/django-cms,vstoykov/django-cms,MagicSolutions/django-cms,isotoma/django-cms,jproffitt/django-cms,chkir/django-cms,stefanw/django-cms,jeffreylu9/django-cms,divio/django-cms,Vegasvikk/django-cms,jrief/django-cms,pbs/django-cms,farhaadila/django-cms,chrisglass/django-cms,kk9599/django-cms,nimbis/django-cms,rsalmaso/django-cms,vad/django-cms,rscnt/django-cms,memnonila/django-cms,intip/django-cms,MagicSolutions/django-cms,stefanfoulis/django-cms,jrclaramunt/django-cms,SinnerSchraderMobileMirrors/django-cms,selecsosi/django-cms,donce/django-cms,liuyisiyisi/django-cms,jeffreylu9/django-cms,pbs/django-cms,liuyisiyisi/django-cms,jrclaramunt/django-cms,divio/django-cms,stefanfoulis/django-cms,astagi/django-cms,datakortet/django-cms,intip/django-cms,DylannCordel/django-cms,frnhr/django-cms,adaptivelogic/django-cms,stefanw/django-cms,qnub/django-cms,timgraham/django-cms,stefanw/django-cms,memnonila/django-cms,mkoistinen/django-cms,jeffreylu9/django-cms,vad/django-cms,AlexProfi/django-cms,intgr/django-cms,ojii/django-cms,sephii/django-cms,philippze/django-cms,rscnt/django-cms,kk9599/django-cms,pbs/django-cms,jsma/django-cms,astagi/django-cms,rscnt/django-cms,czpython/django-cms,jeffreylu9/django-cms,foobacca/django-cms,foobacca/django-cms,selecsosi/django-cms,donce/django-cms,SinnerSchraderMobileMirrors/django-cms,SachaMPS/django-cms,chkir/django-cms,11craft/django-cms,youprofit/django-cms,petecummings/django-cms,wyg3958/django-cms,frnhr/django-cms,foobacca/django-cms,pixbuffer/django-cms,datakortet/django-cms,vxsx/django-cms,datakortet/django-cms,driesdesmet/django-cms,bittner/django-cms,astagi/django-cms,netzkolchose/django-cms,owers19856/django-cms,netzkolchose/django-cms,wuzhihui1123/django-cms,wyg3958/django-cms,cyberintruder/django-cms,SofiaReis/django-cms,benzkji/django-cms,nimbis/django-cms,rryan/django-cms,takeshineshiro/django-cms,robmagee/django-cms,FinalAngel/django-cms,nostalgiaz/django-cms,andyzsf/django-cms,frnhr/django-cms,adaptivelogic/django-cms,dhorelik/django-cms,iddqd1/django-cms,jsma/django-cms,frnhr/django-cms,11craft/django-cms,saintbird/django-cms,adaptivelogic/django-cms,memnonila/django-cms,jrief/django-cms,pbs/django-cms,takeshineshiro/django-cms,MagicSolutions/django-cms,Vegasvikk/django-cms,evildmp/django-cms,AlexProfi/django-cms,jsma/django-cms,divio/django-cms,rsalmaso/django-cms,isotoma/django-cms,petecummings/django-cms,vxsx/django-cms,Livefyre/django-cms,11craft/django-cms,liuyisiyisi/django-cms,jalaziz/django-cms-grappelli-old,keimlink/django-cms,czpython/django-cms,keimlink/django-cms,stefanfoulis/django-cms,petecummings/django-cms,takeshineshiro/django-cms,ScholzVolkmer/django-cms,timgraham/django-cms,chkir/django-cms,foobacca/django-cms,stefanw/django-cms,jalaziz/django-cms-grappelli-old,rryan/django-cms,ScholzVolkmer/django-cms,qnub/django-cms,benzkji/django-cms,ScholzVolkmer/django-cms,kk9599/django-cms,nimbis/django-cms,webu/django-cms,nostalgiaz/django-cms,nostalgiaz/django-cms,vad/django-cms,mkoistinen/django-cms,selecsosi/django-cms,jsma/django-cms,keimlink/django-cms,timgraham/django-cms,yakky/django-cms,isotoma/django-cms,vxsx/django-cms,jrclaramunt/django-cms,intgr/django-cms,pancentric/django-cms,sznekol/django-cms,pixbuffer/django-cms,leture/django-cms,mkoistinen/django-cms,irudayarajisawa/django-cms,czpython/django-cms,intip/django-cms,Jaccorot/django-cms,sephii/django-cms,irudayarajisawa/django-cms,jproffitt/django-cms,wuzhihui1123/django-cms,vstoykov/django-cms,rsalmaso/django-cms,farhaadila/django-cms,vad/django-cms,SofiaReis/django-cms,360youlun/django-cms,DylannCordel/django-cms,intgr/django-cms,jrief/django-cms,leture/django-cms,SofiaReis/django-cms,yakky/django-cms,Vegasvikk/django-cms,owers19856/django-cms,mkoistinen/django-cms,saintbird/django-cms,yakky/django-cms,jrief/django-cms,robmagee/django-cms,pixbuffer/django-cms,philippze/django-cms,saintbird/django-cms,ojii/django-cms,360youlun/django-cms,webu/django-cms,sznekol/django-cms,dhorelik/django-cms,wuzhihui1123/django-cms,evildmp/django-cms,yakky/django-cms,Livefyre/django-cms,netzkolchose/django-cms,josjevv/django-cms,bittner/django-cms,rryan/django-cms,SmithsonianEnterprises/django-cms,evildmp/django-cms,farhaadila/django-cms,nimbis/django-cms,sznekol/django-cms,ojii/django-cms,sephii/django-cms,Livefyre/django-cms,rryan/django-cms,FinalAngel/django-cms,jalaziz/django-cms-grappelli-old,FinalAngel/django-cms,bittner/django-cms,SachaMPS/django-cms,webu/django-cms,pancentric/django-cms,driesdesmet/django-cms,benzkji/django-cms,divio/django-cms,isotoma/django-cms,VillageAlliance/django-cms,iddqd1/django-cms,intip/django-cms,evildmp/django-cms,chrisglass/django-cms,benzkji/django-cms,360youlun/django-cms,sephii/django-cms,pancentric/django-cms,datakortet/django-cms,dhorelik/django-cms,cyberintruder/django-cms,andyzsf/django-cms,iddqd1/django-cms,andyzsf/django-cms,SachaMPS/django-cms,czpython/django-cms,nostalgiaz/django-cms,robmagee/django-cms,vstoykov/django-cms,chmberl/django-cms,philippze/django-cms,wyg3958/django-cms,intgr/django-cms,andyzsf/django-cms,leture/django-cms,chmberl/django-cms,vxsx/django-cms,Livefyre/django-cms,FinalAngel/django-cms,DylannCordel/django-cms,josjevv/django-cms,11craft/django-cms,jproffitt/django-cms,selecsosi/django-cms,irudayarajisawa/django-cms,youprofit/django-cms,VillageAlliance/django-cms,rsalmaso/django-cms,netzkolchose/django-cms,SinnerSchraderMobileMirrors/django-cms,bittner/django-cms,VillageAlliance/django-cms,Jaccorot/django-cms,driesdesmet/django-cms,SmithsonianEnterprises/django-cms,donce/django-cms,youprofit/django-cms,AlexProfi/django-cms,wuzhihui1123/django-cms,qnub/django-cms,stefanfoulis/django-cms,Jaccorot/django-cms,josjevv/django-cms,SmithsonianEnterprises/django-cms
|
+ from django.conf import settings
from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.utils.translation import ugettext_lazy as _
from cms.plugins.googlemap.models import GoogleMap
from cms.plugins.googlemap.settings import GOOGLE_MAPS_API_KEY
- from cms.plugins.googlemap import settings
from django.forms.widgets import Media
class GoogleMapPlugin(CMSPluginBase):
model = GoogleMap
name = _("Google Map")
render_template = "cms/plugins/googlemap.html"
def render(self, context, instance, placeholder):
context.update({
'object':instance,
'placeholder':placeholder,
})
return context
def get_plugin_media(self, request, context, plugin):
if 'GOOGLE_MAPS_API_KEY' in context:
key = context['GOOGLE_MAPS_API_KEY']
else:
key = GOOGLE_MAPS_API_KEY
+ lang = getattr(request, 'LANGUAGE_CODE', settings.LANGUAGE_CODE[0:2])
- return Media(js = ('http://maps.google.com/maps?file=api&v=2&key=%s&hl=%s' % (key, request.LANGUAGE_CODE),))
+ return Media(js = ('http://maps.google.com/maps?file=api&v=2&key=%s&hl=%s' % (key, lang),))
-
+
plugin_pool.register_plugin(GoogleMapPlugin)
|
Allow use of GoogleMaps plugin without Multilingual support
|
## Code Before:
from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.utils.translation import ugettext_lazy as _
from cms.plugins.googlemap.models import GoogleMap
from cms.plugins.googlemap.settings import GOOGLE_MAPS_API_KEY
from cms.plugins.googlemap import settings
from django.forms.widgets import Media
class GoogleMapPlugin(CMSPluginBase):
model = GoogleMap
name = _("Google Map")
render_template = "cms/plugins/googlemap.html"
def render(self, context, instance, placeholder):
context.update({
'object':instance,
'placeholder':placeholder,
})
return context
def get_plugin_media(self, request, context, plugin):
if 'GOOGLE_MAPS_API_KEY' in context:
key = context['GOOGLE_MAPS_API_KEY']
else:
key = GOOGLE_MAPS_API_KEY
return Media(js = ('http://maps.google.com/maps?file=api&v=2&key=%s&hl=%s' % (key, request.LANGUAGE_CODE),))
plugin_pool.register_plugin(GoogleMapPlugin)
## Instruction:
Allow use of GoogleMaps plugin without Multilingual support
## Code After:
from django.conf import settings
from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.utils.translation import ugettext_lazy as _
from cms.plugins.googlemap.models import GoogleMap
from cms.plugins.googlemap.settings import GOOGLE_MAPS_API_KEY
from django.forms.widgets import Media
class GoogleMapPlugin(CMSPluginBase):
model = GoogleMap
name = _("Google Map")
render_template = "cms/plugins/googlemap.html"
def render(self, context, instance, placeholder):
context.update({
'object':instance,
'placeholder':placeholder,
})
return context
def get_plugin_media(self, request, context, plugin):
if 'GOOGLE_MAPS_API_KEY' in context:
key = context['GOOGLE_MAPS_API_KEY']
else:
key = GOOGLE_MAPS_API_KEY
lang = getattr(request, 'LANGUAGE_CODE', settings.LANGUAGE_CODE[0:2])
return Media(js = ('http://maps.google.com/maps?file=api&v=2&key=%s&hl=%s' % (key, lang),))
plugin_pool.register_plugin(GoogleMapPlugin)
|
+ from django.conf import settings
from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.utils.translation import ugettext_lazy as _
from cms.plugins.googlemap.models import GoogleMap
from cms.plugins.googlemap.settings import GOOGLE_MAPS_API_KEY
- from cms.plugins.googlemap import settings
from django.forms.widgets import Media
class GoogleMapPlugin(CMSPluginBase):
model = GoogleMap
name = _("Google Map")
render_template = "cms/plugins/googlemap.html"
def render(self, context, instance, placeholder):
context.update({
'object':instance,
'placeholder':placeholder,
})
return context
def get_plugin_media(self, request, context, plugin):
if 'GOOGLE_MAPS_API_KEY' in context:
key = context['GOOGLE_MAPS_API_KEY']
else:
key = GOOGLE_MAPS_API_KEY
+ lang = getattr(request, 'LANGUAGE_CODE', settings.LANGUAGE_CODE[0:2])
- return Media(js = ('http://maps.google.com/maps?file=api&v=2&key=%s&hl=%s' % (key, request.LANGUAGE_CODE),))
? ^^^^^^^^^^^^^^^^^^^^^
+ return Media(js = ('http://maps.google.com/maps?file=api&v=2&key=%s&hl=%s' % (key, lang),))
? ^^^^
-
+
plugin_pool.register_plugin(GoogleMapPlugin)
|
9fcf408dad5b97094445677eb42429beaa830c22
|
apps/homepage/templatetags/homepage_tags.py
|
apps/homepage/templatetags/homepage_tags.py
|
from django import template
from homepage.models import Tab
register = template.Library()
@register.tag(name="get_tabs")
def get_tabs(parser, token):
return GetElementNode()
class GetElementNode(template.Node):
def __init__(self):
pass
def render(self, context):
context['tabs'] = Tab.objects.all()
return ''
|
from django import template
from homepage.models import Tab
register = template.Library()
@register.tag(name="get_tabs")
def get_tabs(parser, token):
return GetElementNode()
class GetElementNode(template.Node):
def __init__(self):
pass
def render(self, context):
context['tabs'] = Tab.objects.all().select_related('grid')
return ''
|
Reduce queries on all pages by using select_related in the get_tabs template tag.
|
Reduce queries on all pages by using select_related in the get_tabs template tag.
|
Python
|
mit
|
benracine/opencomparison,audreyr/opencomparison,QLGu/djangopackages,cartwheelweb/packaginator,QLGu/djangopackages,miketheman/opencomparison,nanuxbe/djangopackages,QLGu/djangopackages,miketheman/opencomparison,nanuxbe/djangopackages,audreyr/opencomparison,cartwheelweb/packaginator,nanuxbe/djangopackages,cartwheelweb/packaginator,pydanny/djangopackages,pydanny/djangopackages,pydanny/djangopackages,benracine/opencomparison
|
from django import template
from homepage.models import Tab
register = template.Library()
@register.tag(name="get_tabs")
def get_tabs(parser, token):
return GetElementNode()
class GetElementNode(template.Node):
def __init__(self):
pass
def render(self, context):
- context['tabs'] = Tab.objects.all()
+ context['tabs'] = Tab.objects.all().select_related('grid')
return ''
|
Reduce queries on all pages by using select_related in the get_tabs template tag.
|
## Code Before:
from django import template
from homepage.models import Tab
register = template.Library()
@register.tag(name="get_tabs")
def get_tabs(parser, token):
return GetElementNode()
class GetElementNode(template.Node):
def __init__(self):
pass
def render(self, context):
context['tabs'] = Tab.objects.all()
return ''
## Instruction:
Reduce queries on all pages by using select_related in the get_tabs template tag.
## Code After:
from django import template
from homepage.models import Tab
register = template.Library()
@register.tag(name="get_tabs")
def get_tabs(parser, token):
return GetElementNode()
class GetElementNode(template.Node):
def __init__(self):
pass
def render(self, context):
context['tabs'] = Tab.objects.all().select_related('grid')
return ''
|
from django import template
from homepage.models import Tab
register = template.Library()
@register.tag(name="get_tabs")
def get_tabs(parser, token):
return GetElementNode()
class GetElementNode(template.Node):
def __init__(self):
pass
def render(self, context):
- context['tabs'] = Tab.objects.all()
+ context['tabs'] = Tab.objects.all().select_related('grid')
? +++++++++++++++++++++++
return ''
|
b79ed827f7211efbcdef95286bf2d4113d6e8b88
|
posts/views.py
|
posts/views.py
|
from django.shortcuts import get_object_or_404
from django.views.generic.dates import ArchiveIndexView
from django.views.generic.edit import FormView
from .models import Entry, Category
from .forms import ContactForm
class CategoryView(ArchiveIndexView):
model = Entry
date_field = 'date'
paginate_by = 20
template_name = 'posts/entry_category.html'
def get(self, request, slug, **kwargs):
self.kwargs['category'] = get_object_or_404(Category, slug=slug)
return super().get(request, kwargs)
def get_queryset(self):
return Entry.objects.filter(category=self.kwargs['category'])
def get_context_data(self, **kwargs):
result = super().get_context_data(**kwargs)
result['category'] = self.kwargs['category']
return result
class ContactView(FormView):
template_name = 'contact.html'
form_class = ContactForm
success_url = '/kontakt/'
def form_valid(self, form):
# This method is called when valid form data has been POSTed.
# It should return an HttpResponse.
form.send_email()
return super().form_valid(form)
|
from django.shortcuts import get_object_or_404
from django.views.generic.dates import ArchiveIndexView
from django.views.generic.edit import FormView
from .models import Entry, Category
from .forms import ContactForm
class CategoryView(ArchiveIndexView):
model = Entry
date_field = 'date'
paginate_by = 20
template_name = 'posts/entry_category.html'
def get(self, request, slug, **kwargs):
self.kwargs['category'] = get_object_or_404(Category, slug=slug)
return super().get(request, kwargs)
def get_queryset(self):
return super().get_queryset().filter(category=self.kwargs['category'])
def get_context_data(self, **kwargs):
result = super().get_context_data(**kwargs)
result['category'] = self.kwargs['category']
return result
class ContactView(FormView):
template_name = 'contact.html'
form_class = ContactForm
success_url = '/kontakt/'
def form_valid(self, form):
# This method is called when valid form data has been POSTed.
# It should return an HttpResponse.
form.send_email()
return super().form_valid(form)
|
Fix ordering of category view
|
Fix ordering of category view
Signed-off-by: Michal Čihař <[email protected]>
|
Python
|
agpl-3.0
|
nijel/photoblog,nijel/photoblog
|
from django.shortcuts import get_object_or_404
from django.views.generic.dates import ArchiveIndexView
from django.views.generic.edit import FormView
from .models import Entry, Category
from .forms import ContactForm
class CategoryView(ArchiveIndexView):
model = Entry
date_field = 'date'
paginate_by = 20
template_name = 'posts/entry_category.html'
def get(self, request, slug, **kwargs):
self.kwargs['category'] = get_object_or_404(Category, slug=slug)
return super().get(request, kwargs)
def get_queryset(self):
- return Entry.objects.filter(category=self.kwargs['category'])
+ return super().get_queryset().filter(category=self.kwargs['category'])
def get_context_data(self, **kwargs):
result = super().get_context_data(**kwargs)
result['category'] = self.kwargs['category']
return result
class ContactView(FormView):
template_name = 'contact.html'
form_class = ContactForm
success_url = '/kontakt/'
def form_valid(self, form):
# This method is called when valid form data has been POSTed.
# It should return an HttpResponse.
form.send_email()
return super().form_valid(form)
|
Fix ordering of category view
|
## Code Before:
from django.shortcuts import get_object_or_404
from django.views.generic.dates import ArchiveIndexView
from django.views.generic.edit import FormView
from .models import Entry, Category
from .forms import ContactForm
class CategoryView(ArchiveIndexView):
model = Entry
date_field = 'date'
paginate_by = 20
template_name = 'posts/entry_category.html'
def get(self, request, slug, **kwargs):
self.kwargs['category'] = get_object_or_404(Category, slug=slug)
return super().get(request, kwargs)
def get_queryset(self):
return Entry.objects.filter(category=self.kwargs['category'])
def get_context_data(self, **kwargs):
result = super().get_context_data(**kwargs)
result['category'] = self.kwargs['category']
return result
class ContactView(FormView):
template_name = 'contact.html'
form_class = ContactForm
success_url = '/kontakt/'
def form_valid(self, form):
# This method is called when valid form data has been POSTed.
# It should return an HttpResponse.
form.send_email()
return super().form_valid(form)
## Instruction:
Fix ordering of category view
## Code After:
from django.shortcuts import get_object_or_404
from django.views.generic.dates import ArchiveIndexView
from django.views.generic.edit import FormView
from .models import Entry, Category
from .forms import ContactForm
class CategoryView(ArchiveIndexView):
model = Entry
date_field = 'date'
paginate_by = 20
template_name = 'posts/entry_category.html'
def get(self, request, slug, **kwargs):
self.kwargs['category'] = get_object_or_404(Category, slug=slug)
return super().get(request, kwargs)
def get_queryset(self):
return super().get_queryset().filter(category=self.kwargs['category'])
def get_context_data(self, **kwargs):
result = super().get_context_data(**kwargs)
result['category'] = self.kwargs['category']
return result
class ContactView(FormView):
template_name = 'contact.html'
form_class = ContactForm
success_url = '/kontakt/'
def form_valid(self, form):
# This method is called when valid form data has been POSTed.
# It should return an HttpResponse.
form.send_email()
return super().form_valid(form)
|
from django.shortcuts import get_object_or_404
from django.views.generic.dates import ArchiveIndexView
from django.views.generic.edit import FormView
from .models import Entry, Category
from .forms import ContactForm
class CategoryView(ArchiveIndexView):
model = Entry
date_field = 'date'
paginate_by = 20
template_name = 'posts/entry_category.html'
def get(self, request, slug, **kwargs):
self.kwargs['category'] = get_object_or_404(Category, slug=slug)
return super().get(request, kwargs)
def get_queryset(self):
- return Entry.objects.filter(category=self.kwargs['category'])
? ^^ ^^^^ - ^
+ return super().get_queryset().filter(category=self.kwargs['category'])
? ^^^^^^^^^^ ++++ ^ ^^
def get_context_data(self, **kwargs):
result = super().get_context_data(**kwargs)
result['category'] = self.kwargs['category']
return result
class ContactView(FormView):
template_name = 'contact.html'
form_class = ContactForm
success_url = '/kontakt/'
def form_valid(self, form):
# This method is called when valid form data has been POSTed.
# It should return an HttpResponse.
form.send_email()
return super().form_valid(form)
|
2f3139b2dfa2662daa7e57b221836ff2923c5fc9
|
actstream/admin.py
|
actstream/admin.py
|
from django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
list_display = ('__str__', 'actor', 'verb', 'target')
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
|
from django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
list_display = ('__str__', 'actor', 'verb', 'target', 'public')
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
|
Add 'public' field to ActionAdmin list display
|
Add 'public' field to ActionAdmin list display
|
Python
|
mit
|
druss16/danslist,Shanto/django-activity-stream,jimlyndon/django-activity-stream,intelivix/django-activity-stream,pombredanne/django-activity-stream,github-account-because-they-want-it/django-activity-stream,thelabnyc/django-activity-stream,github-account-because-they-want-it/django-activity-stream,pknowles/django-activity-stream,druss16/danslist,justquick/django-activity-stream,pombredanne/django-activity-stream,druss16/danslist,Shanto/django-activity-stream,jimlyndon/django-activity-stream,pknowles/django-activity-stream,jrsupplee/django-activity-stream,jrsupplee/django-activity-stream,intelivix/django-activity-stream,thelabnyc/django-activity-stream,justquick/django-activity-stream
|
from django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
- list_display = ('__str__', 'actor', 'verb', 'target')
+ list_display = ('__str__', 'actor', 'verb', 'target', 'public')
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
|
Add 'public' field to ActionAdmin list display
|
## Code Before:
from django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
list_display = ('__str__', 'actor', 'verb', 'target')
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
## Instruction:
Add 'public' field to ActionAdmin list display
## Code After:
from django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
list_display = ('__str__', 'actor', 'verb', 'target', 'public')
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
|
from django.contrib import admin
from actstream import models
# Use django-generic-admin widgets if available
try:
from genericadmin.admin import GenericAdminModelAdmin as ModelAdmin
except ImportError:
ModelAdmin = admin.ModelAdmin
class ActionAdmin(ModelAdmin):
date_hierarchy = 'timestamp'
- list_display = ('__str__', 'actor', 'verb', 'target')
+ list_display = ('__str__', 'actor', 'verb', 'target', 'public')
? ++++++++++
list_editable = ('verb',)
list_filter = ('timestamp',)
raw_id_fields = ('actor_content_type', 'target_content_type',
'action_object_content_type')
class FollowAdmin(ModelAdmin):
list_display = ('__str__', 'user', 'follow_object', 'actor_only', 'started')
list_editable = ('user',)
list_filter = ('user', 'started',)
raw_id_fields = ('user', 'content_type')
admin.site.register(models.Action, ActionAdmin)
admin.site.register(models.Follow, FollowAdmin)
|
494e7ff2e249a8202c8a71172be7f1870f56f9c3
|
mcavatar/views/public/__init__.py
|
mcavatar/views/public/__init__.py
|
from flask import Blueprint
public = Blueprint('public', __name__, template_folder='templates')
@public.route('/')
def index():
return 'Hello World'
|
from flask import Blueprint
public = Blueprint('public', __name__)
@public.route('/')
def index():
return 'Hello World'
|
Remove blueprint specific template directories.
|
Remove blueprint specific template directories.
|
Python
|
mit
|
joealcorn/MCAvatar
|
from flask import Blueprint
- public = Blueprint('public', __name__, template_folder='templates')
+ public = Blueprint('public', __name__)
@public.route('/')
def index():
return 'Hello World'
|
Remove blueprint specific template directories.
|
## Code Before:
from flask import Blueprint
public = Blueprint('public', __name__, template_folder='templates')
@public.route('/')
def index():
return 'Hello World'
## Instruction:
Remove blueprint specific template directories.
## Code After:
from flask import Blueprint
public = Blueprint('public', __name__)
@public.route('/')
def index():
return 'Hello World'
|
from flask import Blueprint
- public = Blueprint('public', __name__, template_folder='templates')
+ public = Blueprint('public', __name__)
@public.route('/')
def index():
return 'Hello World'
|
c30898d785d131a8dc08d93fe4142acda5b34081
|
frappe/core/doctype/docfield/docfield.py
|
frappe/core/doctype/docfield/docfield.py
|
from __future__ import unicode_literals
from frappe.model.document import Document
class DocField(Document):
pass
|
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class DocField(Document):
def get_link_doctype(self):
'''Returns the Link doctype for the docfield (if applicable)
if fieldtype is Link: Returns "options"
if fieldtype is Table MultiSelect: Returns "options" of the Link field in the Child Table
'''
if self.fieldtype == 'Link':
return self.options
if self.fieldtype == 'Table MultiSelect':
table_doctype = self.options
link_doctype = frappe.db.get_value('DocField', {
'fieldtype': 'Link',
'parenttype': 'DocType',
'parent': table_doctype,
'in_list_view': 1
}, 'options')
return link_doctype
|
Add get_link_doctype method in DocField
|
fix: Add get_link_doctype method in DocField
|
Python
|
mit
|
adityahase/frappe,saurabh6790/frappe,almeidapaulopt/frappe,mhbu50/frappe,yashodhank/frappe,almeidapaulopt/frappe,mhbu50/frappe,vjFaLk/frappe,adityahase/frappe,vjFaLk/frappe,vjFaLk/frappe,frappe/frappe,vjFaLk/frappe,saurabh6790/frappe,StrellaGroup/frappe,StrellaGroup/frappe,yashodhank/frappe,frappe/frappe,almeidapaulopt/frappe,frappe/frappe,mhbu50/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,mhbu50/frappe,saurabh6790/frappe,yashodhank/frappe,adityahase/frappe,saurabh6790/frappe,adityahase/frappe,yashodhank/frappe
|
from __future__ import unicode_literals
+ import frappe
from frappe.model.document import Document
class DocField(Document):
- pass
+ def get_link_doctype(self):
+ '''Returns the Link doctype for the docfield (if applicable)
+ if fieldtype is Link: Returns "options"
+ if fieldtype is Table MultiSelect: Returns "options" of the Link field in the Child Table
+ '''
+ if self.fieldtype == 'Link':
+ return self.options
+ if self.fieldtype == 'Table MultiSelect':
+ table_doctype = self.options
+
+ link_doctype = frappe.db.get_value('DocField', {
+ 'fieldtype': 'Link',
+ 'parenttype': 'DocType',
+ 'parent': table_doctype,
+ 'in_list_view': 1
+ }, 'options')
+
+ return link_doctype
+
|
Add get_link_doctype method in DocField
|
## Code Before:
from __future__ import unicode_literals
from frappe.model.document import Document
class DocField(Document):
pass
## Instruction:
Add get_link_doctype method in DocField
## Code After:
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class DocField(Document):
def get_link_doctype(self):
'''Returns the Link doctype for the docfield (if applicable)
if fieldtype is Link: Returns "options"
if fieldtype is Table MultiSelect: Returns "options" of the Link field in the Child Table
'''
if self.fieldtype == 'Link':
return self.options
if self.fieldtype == 'Table MultiSelect':
table_doctype = self.options
link_doctype = frappe.db.get_value('DocField', {
'fieldtype': 'Link',
'parenttype': 'DocType',
'parent': table_doctype,
'in_list_view': 1
}, 'options')
return link_doctype
|
from __future__ import unicode_literals
+ import frappe
from frappe.model.document import Document
class DocField(Document):
- pass
+ def get_link_doctype(self):
+ '''Returns the Link doctype for the docfield (if applicable)
+ if fieldtype is Link: Returns "options"
+ if fieldtype is Table MultiSelect: Returns "options" of the Link field in the Child Table
+ '''
+ if self.fieldtype == 'Link':
+ return self.options
+
+ if self.fieldtype == 'Table MultiSelect':
+ table_doctype = self.options
+
+ link_doctype = frappe.db.get_value('DocField', {
+ 'fieldtype': 'Link',
+ 'parenttype': 'DocType',
+ 'parent': table_doctype,
+ 'in_list_view': 1
+ }, 'options')
+
+ return link_doctype
|
80fa2f3c47ddc845d4dc9e549df38f68267873d6
|
corehq/ex-submodules/pillow_retry/tasks.py
|
corehq/ex-submodules/pillow_retry/tasks.py
|
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
'group:celery'
])
|
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
'group:celery',
'error_type:%s' % row['error_type']
])
|
Send error-type info to pillow error DD metrics
|
Send error-type info to pillow error DD metrics
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
- 'group:celery'
+ 'group:celery',
+ 'error_type:%s' % row['error_type']
])
|
Send error-type info to pillow error DD metrics
|
## Code Before:
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
'group:celery'
])
## Instruction:
Send error-type info to pillow error DD metrics
## Code After:
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
'group:celery',
'error_type:%s' % row['error_type']
])
|
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
- 'group:celery'
+ 'group:celery',
? +
+ 'error_type:%s' % row['error_type']
])
|
be9d58ffcf23e4fb47d2c09e869368ab9ec738c9
|
localore/localore/embeds.py
|
localore/localore/embeds.py
|
from urllib.parse import urlparse
from django.conf import settings
from wagtail.wagtailembeds.finders.embedly import embedly
from wagtail.wagtailembeds.finders.oembed import oembed
def get_default_finder():
if hasattr(settings, 'WAGTAILEMBEDS_EMBEDLY_KEY'):
return embedly
return oembed
def finder(url, max_width=None):
domain = urlparse(url).netloc
# work around Embedly missing embedding HTML for Twitter and Instagram URLs
if domain.endswith((
'instagram.com',
'twitter.com',
)):
return oembed(url, max_width)
embed_dict = get_default_finder()(url, max_width)
if domain.endswith('soundcloud.com'):
embed_dict['html'] = (
embed_dict['html']
.replace('visual%3Dtrue', 'visual%3Dfalse')
.replace('width="500"', 'width="100%"')
.replace('height="500"', 'height="166"')
)
embed_dict['width'] = '100%'
embed_dict['height'] = '166'
return embed_dict
|
from urllib.parse import urlparse
from django.conf import settings
from wagtail.wagtailembeds.finders.embedly import embedly
from wagtail.wagtailembeds.finders.oembed import oembed
def get_default_finder():
if hasattr(settings, 'WAGTAILEMBEDS_EMBEDLY_KEY'):
return embedly
return oembed
def finder(url, max_width=None):
domain = urlparse(url).netloc
# work around Embedly missing embedding HTML for Twitter and Instagram URLs
if domain.endswith((
'instagram.com',
'twitter.com',
)):
return oembed(url, max_width)
embed_dict = get_default_finder()(url, max_width)
if domain.endswith('soundcloud.com'):
embed_dict['html'] = (
embed_dict['html']
.replace('visual%3Dtrue', 'visual%3Dfalse')
.replace('width="%s"' % embed_dict['width'], 'width="100%"')
.replace('height="%s"' % embed_dict['height'], 'height="166"')
)
embed_dict['width'] = None
embed_dict['height'] = 166
return embed_dict
|
Fix SoundCloud embed width/height replacement.
|
Fix SoundCloud embed width/height replacement.
SoundCloud embeds aren't always 500x500.
Also, don't set the "width" embed dict key to '100%':
"width"/"height" keys expect integers only.
|
Python
|
mpl-2.0
|
ghostwords/localore,ghostwords/localore,ghostwords/localore
|
from urllib.parse import urlparse
from django.conf import settings
from wagtail.wagtailembeds.finders.embedly import embedly
from wagtail.wagtailembeds.finders.oembed import oembed
def get_default_finder():
if hasattr(settings, 'WAGTAILEMBEDS_EMBEDLY_KEY'):
return embedly
return oembed
def finder(url, max_width=None):
domain = urlparse(url).netloc
# work around Embedly missing embedding HTML for Twitter and Instagram URLs
if domain.endswith((
'instagram.com',
'twitter.com',
)):
return oembed(url, max_width)
embed_dict = get_default_finder()(url, max_width)
if domain.endswith('soundcloud.com'):
embed_dict['html'] = (
embed_dict['html']
.replace('visual%3Dtrue', 'visual%3Dfalse')
- .replace('width="500"', 'width="100%"')
+ .replace('width="%s"' % embed_dict['width'], 'width="100%"')
- .replace('height="500"', 'height="166"')
+ .replace('height="%s"' % embed_dict['height'], 'height="166"')
)
- embed_dict['width'] = '100%'
+ embed_dict['width'] = None
- embed_dict['height'] = '166'
+ embed_dict['height'] = 166
return embed_dict
|
Fix SoundCloud embed width/height replacement.
|
## Code Before:
from urllib.parse import urlparse
from django.conf import settings
from wagtail.wagtailembeds.finders.embedly import embedly
from wagtail.wagtailembeds.finders.oembed import oembed
def get_default_finder():
if hasattr(settings, 'WAGTAILEMBEDS_EMBEDLY_KEY'):
return embedly
return oembed
def finder(url, max_width=None):
domain = urlparse(url).netloc
# work around Embedly missing embedding HTML for Twitter and Instagram URLs
if domain.endswith((
'instagram.com',
'twitter.com',
)):
return oembed(url, max_width)
embed_dict = get_default_finder()(url, max_width)
if domain.endswith('soundcloud.com'):
embed_dict['html'] = (
embed_dict['html']
.replace('visual%3Dtrue', 'visual%3Dfalse')
.replace('width="500"', 'width="100%"')
.replace('height="500"', 'height="166"')
)
embed_dict['width'] = '100%'
embed_dict['height'] = '166'
return embed_dict
## Instruction:
Fix SoundCloud embed width/height replacement.
## Code After:
from urllib.parse import urlparse
from django.conf import settings
from wagtail.wagtailembeds.finders.embedly import embedly
from wagtail.wagtailembeds.finders.oembed import oembed
def get_default_finder():
if hasattr(settings, 'WAGTAILEMBEDS_EMBEDLY_KEY'):
return embedly
return oembed
def finder(url, max_width=None):
domain = urlparse(url).netloc
# work around Embedly missing embedding HTML for Twitter and Instagram URLs
if domain.endswith((
'instagram.com',
'twitter.com',
)):
return oembed(url, max_width)
embed_dict = get_default_finder()(url, max_width)
if domain.endswith('soundcloud.com'):
embed_dict['html'] = (
embed_dict['html']
.replace('visual%3Dtrue', 'visual%3Dfalse')
.replace('width="%s"' % embed_dict['width'], 'width="100%"')
.replace('height="%s"' % embed_dict['height'], 'height="166"')
)
embed_dict['width'] = None
embed_dict['height'] = 166
return embed_dict
|
from urllib.parse import urlparse
from django.conf import settings
from wagtail.wagtailembeds.finders.embedly import embedly
from wagtail.wagtailembeds.finders.oembed import oembed
def get_default_finder():
if hasattr(settings, 'WAGTAILEMBEDS_EMBEDLY_KEY'):
return embedly
return oembed
def finder(url, max_width=None):
domain = urlparse(url).netloc
# work around Embedly missing embedding HTML for Twitter and Instagram URLs
if domain.endswith((
'instagram.com',
'twitter.com',
)):
return oembed(url, max_width)
embed_dict = get_default_finder()(url, max_width)
if domain.endswith('soundcloud.com'):
embed_dict['html'] = (
embed_dict['html']
.replace('visual%3Dtrue', 'visual%3Dfalse')
- .replace('width="500"', 'width="100%"')
? ^^^
+ .replace('width="%s"' % embed_dict['width'], 'width="100%"')
? ^^ ++++++++++++++++++++++
- .replace('height="500"', 'height="166"')
? ^^^
+ .replace('height="%s"' % embed_dict['height'], 'height="166"')
? ^^ +++++++++++++++++++++++
)
- embed_dict['width'] = '100%'
? ^^^^^^
+ embed_dict['width'] = None
? ^^^^
- embed_dict['height'] = '166'
? - -
+ embed_dict['height'] = 166
return embed_dict
|
bcc206b46c089ea7f7ea5dfbc5c8b11a1fe72447
|
movie_time_app/models.py
|
movie_time_app/models.py
|
from django.db import models
# Create your models here.
class Movie(models.Model):
movie_id = models.IntegerField(primary_key=True)
title = models.CharField(max_length=200)
poster = models.ImageField(null=True, blank=True)
year = models.IntegerField(null=True)
genres = models.CharField(max_length=200)
num_ratings = models.IntegerField(null=True)
rating_median = models.FloatField(null=True)
rating_mean = models.FloatField(null=True)
relatable = models.BooleanField(default=True)
def __str__(self):
return self.title
class LikedOrNot(models.Model):
movie = models.ForeignKey(Movie)
liked_or_not = models.SmallIntegerField(null=True, blank=True,
choices=((-1, "bad"), (0, "alright"), (1, "liked")))
class Similarity(models.Model):
first_movie = models.ForeignKey(Movie, related_name='first_movie')
second_movie = models.ForeignKey(Movie, related_name='second_movie')
similarity_score = models.FloatField()
class Tag(models.Model):
movie = models.ForeignKey(Movie)
tag = models.CharField(max_length=50)
relevance = models.FloatField()
class OnlineLink(models.Model):
movie = models.ForeignKey(Movie)
imdb_id = models.CharField(max_length=50)
|
from django.db import models
# Create your models here.
class Movie(models.Model):
movie_id = models.IntegerField(primary_key=True)
title = models.CharField(max_length=200)
poster = models.ImageField(null=True, blank=True)
year = models.IntegerField(null=True)
genres = models.CharField(max_length=200)
num_ratings = models.IntegerField(null=True)
rating_median = models.FloatField(null=True)
rating_mean = models.FloatField(null=True)
relatable = models.BooleanField(default=True)
liked_or_not = models.NullBooleanField(null=True, blank=True)
def __str__(self):
return self.title
class Similarity(models.Model):
first_movie = models.ForeignKey(Movie, related_name='first_movie')
second_movie = models.ForeignKey(Movie, related_name='second_movie')
similarity_score = models.FloatField()
class Tag(models.Model):
movie = models.ForeignKey(Movie)
tag = models.CharField(max_length=50)
relevance = models.FloatField()
class OnlineLink(models.Model):
movie = models.ForeignKey(Movie)
imdb_id = models.CharField(max_length=50)
|
Add self-rating flag for movies. Removed LikedOrNot table
|
Add self-rating flag for movies. Removed LikedOrNot table
|
Python
|
mit
|
osama-haggag/movie-time,osama-haggag/movie-time
|
from django.db import models
# Create your models here.
class Movie(models.Model):
movie_id = models.IntegerField(primary_key=True)
title = models.CharField(max_length=200)
poster = models.ImageField(null=True, blank=True)
year = models.IntegerField(null=True)
genres = models.CharField(max_length=200)
num_ratings = models.IntegerField(null=True)
rating_median = models.FloatField(null=True)
rating_mean = models.FloatField(null=True)
relatable = models.BooleanField(default=True)
+ liked_or_not = models.NullBooleanField(null=True, blank=True)
def __str__(self):
return self.title
-
-
- class LikedOrNot(models.Model):
- movie = models.ForeignKey(Movie)
- liked_or_not = models.SmallIntegerField(null=True, blank=True,
- choices=((-1, "bad"), (0, "alright"), (1, "liked")))
class Similarity(models.Model):
first_movie = models.ForeignKey(Movie, related_name='first_movie')
second_movie = models.ForeignKey(Movie, related_name='second_movie')
similarity_score = models.FloatField()
class Tag(models.Model):
movie = models.ForeignKey(Movie)
tag = models.CharField(max_length=50)
relevance = models.FloatField()
class OnlineLink(models.Model):
movie = models.ForeignKey(Movie)
imdb_id = models.CharField(max_length=50)
|
Add self-rating flag for movies. Removed LikedOrNot table
|
## Code Before:
from django.db import models
# Create your models here.
class Movie(models.Model):
movie_id = models.IntegerField(primary_key=True)
title = models.CharField(max_length=200)
poster = models.ImageField(null=True, blank=True)
year = models.IntegerField(null=True)
genres = models.CharField(max_length=200)
num_ratings = models.IntegerField(null=True)
rating_median = models.FloatField(null=True)
rating_mean = models.FloatField(null=True)
relatable = models.BooleanField(default=True)
def __str__(self):
return self.title
class LikedOrNot(models.Model):
movie = models.ForeignKey(Movie)
liked_or_not = models.SmallIntegerField(null=True, blank=True,
choices=((-1, "bad"), (0, "alright"), (1, "liked")))
class Similarity(models.Model):
first_movie = models.ForeignKey(Movie, related_name='first_movie')
second_movie = models.ForeignKey(Movie, related_name='second_movie')
similarity_score = models.FloatField()
class Tag(models.Model):
movie = models.ForeignKey(Movie)
tag = models.CharField(max_length=50)
relevance = models.FloatField()
class OnlineLink(models.Model):
movie = models.ForeignKey(Movie)
imdb_id = models.CharField(max_length=50)
## Instruction:
Add self-rating flag for movies. Removed LikedOrNot table
## Code After:
from django.db import models
# Create your models here.
class Movie(models.Model):
movie_id = models.IntegerField(primary_key=True)
title = models.CharField(max_length=200)
poster = models.ImageField(null=True, blank=True)
year = models.IntegerField(null=True)
genres = models.CharField(max_length=200)
num_ratings = models.IntegerField(null=True)
rating_median = models.FloatField(null=True)
rating_mean = models.FloatField(null=True)
relatable = models.BooleanField(default=True)
liked_or_not = models.NullBooleanField(null=True, blank=True)
def __str__(self):
return self.title
class Similarity(models.Model):
first_movie = models.ForeignKey(Movie, related_name='first_movie')
second_movie = models.ForeignKey(Movie, related_name='second_movie')
similarity_score = models.FloatField()
class Tag(models.Model):
movie = models.ForeignKey(Movie)
tag = models.CharField(max_length=50)
relevance = models.FloatField()
class OnlineLink(models.Model):
movie = models.ForeignKey(Movie)
imdb_id = models.CharField(max_length=50)
|
from django.db import models
# Create your models here.
class Movie(models.Model):
movie_id = models.IntegerField(primary_key=True)
title = models.CharField(max_length=200)
poster = models.ImageField(null=True, blank=True)
year = models.IntegerField(null=True)
genres = models.CharField(max_length=200)
num_ratings = models.IntegerField(null=True)
rating_median = models.FloatField(null=True)
rating_mean = models.FloatField(null=True)
relatable = models.BooleanField(default=True)
+ liked_or_not = models.NullBooleanField(null=True, blank=True)
def __str__(self):
return self.title
-
-
- class LikedOrNot(models.Model):
- movie = models.ForeignKey(Movie)
- liked_or_not = models.SmallIntegerField(null=True, blank=True,
- choices=((-1, "bad"), (0, "alright"), (1, "liked")))
class Similarity(models.Model):
first_movie = models.ForeignKey(Movie, related_name='first_movie')
second_movie = models.ForeignKey(Movie, related_name='second_movie')
similarity_score = models.FloatField()
class Tag(models.Model):
movie = models.ForeignKey(Movie)
tag = models.CharField(max_length=50)
relevance = models.FloatField()
class OnlineLink(models.Model):
movie = models.ForeignKey(Movie)
imdb_id = models.CharField(max_length=50)
|
17f3a2a491f8e4d1b1b6c2644a1642f02cfada17
|
apps/i4p_base/views.py
|
apps/i4p_base/views.py
|
from django.http import QueryDict
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.utils import translation
from apps.project_sheet.models import I4pProject
from apps.project_sheet.utils import get_project_translations_from_parents, build_filters_and_context
def homepage(request):
"""
I4P Homepage
"""
project_sheets = I4pProject.objects.filter(best_of=True).order_by('?')[:14]
project_translations = get_project_translations_from_parents(project_sheets,
language_code=translation.get_language()
)
data = request.GET
if not data :
data = QueryDict('best_of=on')
context = {'project_sheets': project_sheets,
'project_translations': project_translations,
'about_tab_selected' : True}
filter_forms, extra_context = build_filters_and_context(data)
context.update(filter_forms)
context.update(extra_context)
return render_to_response(template_name='homepage.html',
dictionary=context,
context_instance=RequestContext(request)
)
|
from django.http import QueryDict
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.utils import translation
from apps.project_sheet.models import I4pProject
from apps.project_sheet.utils import get_project_translations_from_parents, build_filters_and_context
def homepage(request):
"""
I4P Homepage
"""
project_sheets = I4pProject.objects.filter(best_of=True).order_by('?')[:14]
project_translations = get_project_translations_from_parents(project_sheets,
language_code=translation.get_language()
)
data = request.GET
context = {'project_sheets': project_sheets,
'project_translations': project_translations,
'about_tab_selected' : True}
filter_forms, extra_context = build_filters_and_context(data)
context.update(filter_forms)
context.update(extra_context)
return render_to_response(template_name='homepage.html',
dictionary=context,
context_instance=RequestContext(request)
)
|
Remove pre-selection of best-on filter
|
Remove pre-selection of best-on filter
|
Python
|
agpl-3.0
|
ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople,ImaginationForPeople/imaginationforpeople
|
from django.http import QueryDict
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.utils import translation
from apps.project_sheet.models import I4pProject
from apps.project_sheet.utils import get_project_translations_from_parents, build_filters_and_context
def homepage(request):
"""
I4P Homepage
"""
project_sheets = I4pProject.objects.filter(best_of=True).order_by('?')[:14]
project_translations = get_project_translations_from_parents(project_sheets,
language_code=translation.get_language()
)
data = request.GET
- if not data :
- data = QueryDict('best_of=on')
context = {'project_sheets': project_sheets,
'project_translations': project_translations,
'about_tab_selected' : True}
filter_forms, extra_context = build_filters_and_context(data)
context.update(filter_forms)
context.update(extra_context)
return render_to_response(template_name='homepage.html',
dictionary=context,
context_instance=RequestContext(request)
)
|
Remove pre-selection of best-on filter
|
## Code Before:
from django.http import QueryDict
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.utils import translation
from apps.project_sheet.models import I4pProject
from apps.project_sheet.utils import get_project_translations_from_parents, build_filters_and_context
def homepage(request):
"""
I4P Homepage
"""
project_sheets = I4pProject.objects.filter(best_of=True).order_by('?')[:14]
project_translations = get_project_translations_from_parents(project_sheets,
language_code=translation.get_language()
)
data = request.GET
if not data :
data = QueryDict('best_of=on')
context = {'project_sheets': project_sheets,
'project_translations': project_translations,
'about_tab_selected' : True}
filter_forms, extra_context = build_filters_and_context(data)
context.update(filter_forms)
context.update(extra_context)
return render_to_response(template_name='homepage.html',
dictionary=context,
context_instance=RequestContext(request)
)
## Instruction:
Remove pre-selection of best-on filter
## Code After:
from django.http import QueryDict
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.utils import translation
from apps.project_sheet.models import I4pProject
from apps.project_sheet.utils import get_project_translations_from_parents, build_filters_and_context
def homepage(request):
"""
I4P Homepage
"""
project_sheets = I4pProject.objects.filter(best_of=True).order_by('?')[:14]
project_translations = get_project_translations_from_parents(project_sheets,
language_code=translation.get_language()
)
data = request.GET
context = {'project_sheets': project_sheets,
'project_translations': project_translations,
'about_tab_selected' : True}
filter_forms, extra_context = build_filters_and_context(data)
context.update(filter_forms)
context.update(extra_context)
return render_to_response(template_name='homepage.html',
dictionary=context,
context_instance=RequestContext(request)
)
|
from django.http import QueryDict
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.utils import translation
from apps.project_sheet.models import I4pProject
from apps.project_sheet.utils import get_project_translations_from_parents, build_filters_and_context
def homepage(request):
"""
I4P Homepage
"""
project_sheets = I4pProject.objects.filter(best_of=True).order_by('?')[:14]
project_translations = get_project_translations_from_parents(project_sheets,
language_code=translation.get_language()
)
data = request.GET
- if not data :
- data = QueryDict('best_of=on')
context = {'project_sheets': project_sheets,
'project_translations': project_translations,
'about_tab_selected' : True}
filter_forms, extra_context = build_filters_and_context(data)
context.update(filter_forms)
context.update(extra_context)
return render_to_response(template_name='homepage.html',
dictionary=context,
context_instance=RequestContext(request)
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.